// zig fmt: off const Root = @import("root.zig"); const Store = @import("./store.zig"); const std = @import("std"); const json_utils = @import("./json_utils.zig"); const errors = @import("./errors.zig"); const stb_image = @import("../stb_image/root.zig"); const RateLimit = @import("./ratelimit.zig"); const FetchError = errors.FetchError; const assert = std.debug.assert; const json = std.json; const Allocator = std.mem.Allocator; const AuthToken = Root.AuthToken; const log = std.log.scoped(.api); const ServerURL = std.BoundedArray(u8, 256); const Equipment = @import("./schemas/equipment.zig"); const Item = @import("./schemas/item.zig"); const Craft = @import("./schemas/craft.zig"); const Status = @import("./schemas/status.zig"); const Character = @import("./schemas/character.zig"); const Monster = @import("./schemas/monster.zig"); const Resource = @import("./schemas/resource.zig"); const Position = @import("./schemas/position.zig"); const Map = @import("./schemas/map.zig"); const GEOrder = @import("./schemas/ge_order.zig"); const MoveResult = @import("./schemas/move_result.zig"); const SkillUsageResult = @import("./schemas/skill_usage_result.zig"); const EquipResult = @import("./schemas/equip_result.zig"); const UnequipResult = EquipResult; const GatherResult = SkillUsageResult; const CraftResult = SkillUsageResult; const Image = Store.Image; const Server = @This(); const max_response_size = 1024 * 1024 * 16; // TODO: Figure out a way to more accuretly pick a good 'self.fetch_buffer' size fetch_buffer: []u8, client: std.http.Client, ratelimits: RateLimit.CategoryArray, server_url: ServerURL, server_uri: std.Uri, token: ?AuthToken = null, store: *Store, pub fn init(allocator: Allocator, store: *Store) !Server { const response_buffer = try allocator.alloc(u8, max_response_size); errdefer allocator.free(response_buffer); const now = std.time.milliTimestamp(); // Limits gotten from https://docs.artifactsmmo.com/api_guide/rate_limits var ratelimits = RateLimit.CategoryArray.initFill(RateLimit{}); ratelimits.set(.account_creation, RateLimit.init(now, 50, null, null)); ratelimits.set(.token, RateLimit.init(now, 50, null, null)); ratelimits.set(.data, RateLimit.init(now, 7200, 200, 16)); ratelimits.set(.actions, RateLimit.init(now, 7200, 200, 5)); return Server{ .client = .{ .allocator = allocator }, .server_url = try ServerURL.fromSlice(Root.api_url), .server_uri = try std.Uri.parse(Root.api_url), .store = store, .fetch_buffer = response_buffer, .ratelimits = ratelimits }; } pub fn deinit(self: *Server) void { const allocator = self.client.allocator; self.client.deinit(); allocator.free(self.fetch_buffer); } pub fn setURL(self: *Server, url: []const u8) !void { const server_url = try ServerURL.fromSlice(url); self.server_uri = try std.Uri.parse(server_url); self.server_url = server_url; } pub fn setToken(self: *Server, token: ?[]const u8) !void { if (token) |t| { self.token = try AuthToken.fromSlice(t); } else { self.token = null; } } const FetchJsonOptions = struct { const QueryValue = struct { key: []const u8, value: []const u8, }; method: std.http.Method, path: []const u8, ratelimit: RateLimit.Category, payload: ?[]const u8 = null, query: ?[]const QueryValue = null, page: ?u64 = null, page_size: ?u64 = null, /// When enabled, will to iterate over all available pages paginated: bool = false, }; pub const FetchJsonResult = struct { status: std.http.Status, body: ?json.Value = null, }; fn formatQueryValues(buffer: *std.ArrayList(u8), pairs: []const FetchJsonOptions.QueryValue) ![]u8 { buffer.clearRetainingCapacity(); for (0.., pairs) |i, pair| { if (i > 0) { try buffer.appendSlice("&"); } try buffer.appendSlice(pair.key); try buffer.appendSlice("="); try buffer.appendSlice(pair.value); } return buffer.items; } fn maxIntBufferSize(comptime T: type) usize { const max_int_size = std.fmt.count("{}", .{ std.math.maxInt(T) }); const min_int_size = std.fmt.count("{}", .{ std.math.minInt(T) }); return @max(max_int_size, min_int_size); } fn fetch(self: *Server, ratelimit: RateLimit.Category, options: std.http.Client.FetchOptions) !std.http.Client.FetchResult { log.debug("+---- fetch -----", .{ }); log.debug("| endpoint {} {}", .{ options.method orelse .GET, options.location.uri }); if (options.payload) |payload| { log.debug("| payload {s}", .{ payload }); } const started_at = std.time.nanoTimestamp(); const result = try self.client.fetch(options); const duration_ns = std.time.nanoTimestamp() - started_at; var ratelimit_obj = self.ratelimits.getPtr(ratelimit); ratelimit_obj.increment_counters(); log.debug("| duration {d:.3}s", .{ @as(f64, @floatFromInt(duration_ns)) / std.time.ns_per_s }); log.debug("| status {}", .{ result.status }); if (ratelimit != .token) { const body = switch (options.response_storage) { .static => |body| body.items, .dynamic => |body| body.items, .ignore => null }; const png_header = [_]u8{ 137, 80, 78, 71, 13, 10, 26, 10 }; if (body) |b| { if (std.mem.startsWith(u8, b, &png_header)) { log.debug("| response ", .{ }); } else { log.debug("| response {s}", .{ b }); } } } return result; } fn fetchJson(self: *Server, options: FetchJsonOptions) FetchError!FetchJsonResult { var uri = self.server_uri; uri.path = .{ .raw = options.path }; var fbs = std.heap.FixedBufferAllocator.init(self.fetch_buffer); const allocator = fbs.allocator(); var result_status: std.http.Status = .ok; var result_body: ?json.Value = null; var current_page: u64 = options.page orelse 1; var total_pages: u64 = 1; var fetch_results = std.ArrayList(json.Value).init(allocator); var authorization_header: std.http.Client.Request.Headers.Value = .default; { // + 7 for "Bearer " var authorization_header_buffer: [Root.max_auth_token_size + 7]u8 = undefined; if (self.token) |token_buff| { const token = token_buff.slice(); authorization_header = .{ .override = std.fmt.bufPrint(&authorization_header_buffer, "Bearer {s}", .{token}) catch return FetchError.OutOfMemory }; } } var query_buffer = std.ArrayList(u8).init(allocator); const max_query_parametrs = 2 + @max( 0, 5, // self.getItems ); while (true) : (current_page += 1) { // NOTE: The limit on this array was hand picked. If the limit is hit, it should be increased. var query = std.BoundedArray(FetchJsonOptions.QueryValue, max_query_parametrs).init(0) catch unreachable; if (options.query) |additional_query| { query.appendSlice(additional_query) catch return error.OutOfMemory; } const page_buffer_size = comptime maxIntBufferSize(@TypeOf(current_page)); var page_buffer: [page_buffer_size]u8 = undefined; const page_size_buffer_size = comptime maxIntBufferSize(@TypeOf(options.page_size.?)); var page_size_buffer: [page_size_buffer_size]u8 = undefined; if (options.paginated) { const page_str = std.fmt.bufPrint(&page_buffer, "{}", .{ current_page }) catch unreachable; query.append(.{ .key = "page", .value = page_str }) catch return error.OutOfMemory; if (options.page_size) |page_size| { const page_size_str = std.fmt.bufPrint(&page_size_buffer, "{}", .{ page_size }) catch unreachable; query.append(.{ .key = "size", .value = page_size_str }) catch return error.OutOfMemory; } } if (query.len > 0) { uri.query = .{ .raw = try formatQueryValues(&query_buffer, query.slice()) }; } var response_storage = std.ArrayList(u8).init(allocator); var opts = std.http.Client.FetchOptions{ .method = options.method, .location = .{ .uri = uri }, .payload = options.payload, .response_storage = .{ .dynamic = &response_storage }, }; opts.headers.authorization = authorization_header; const result = self.fetch(options.ratelimit, opts) catch return FetchError.RequestFailed; const response_body = response_storage.items; var ratelimit = self.ratelimits.getPtr(options.ratelimit); ratelimit.increment_counters(); const status = @intFromEnum(result.status); if (status == errors.NotAuthenticated.code.?) { return FetchError.NotAuthenticated; } else if (status == errors.ServerUnavailable.code.?) { return FetchError.ServerUnavailable; } else if (status == errors.InvalidPayload.code.?) { return FetchError.InvalidPayload; } else if (status == errors.TooManyRequests.code.?) { return FetchError.TooManyRequests; } else if (status == errors.FatalError.code.?) { return FetchError.FatalError; } else if (result.status != .ok) { return FetchJsonResult{ .status = result.status }; } const parsed = json.parseFromSliceLeaky(json.Value, allocator, response_body, .{ .allocate = .alloc_if_needed }) catch return FetchError.ParseFailed; if (parsed != json.Value.object) { return FetchError.ParseFailed; } result_status = result.status; if (options.paginated) { const total_i64 = json_utils.getInteger(parsed.object, "total") orelse return FetchError.ParseFailed; if (total_i64 < 0) return FetchError.ParseFailed; try fetch_results.ensureTotalCapacity(@intCast(total_i64)); const total_pages_i64 = json_utils.getInteger(parsed.object, "pages") orelse return FetchError.ParseFailed; if (total_pages_i64 < 0) return FetchError.ParseFailed; total_pages = @intCast(total_pages_i64); const page_results = json_utils.getArray(parsed.object, "data") orelse return FetchError.ParseFailed; fetch_results.appendSlice(page_results.items) catch return FetchError.OutOfMemory; if (current_page >= total_pages) break; } else { result_body = parsed.object.get("data"); break; } } if (options.paginated) { result_body = json.Value{ .array = fetch_results }; } return FetchJsonResult{ .status = result_status, .body = result_body }; } fn handleFetchError( status: std.http.Status, Error: type, parseError: ?fn (status: std.http.Status) ?Error, ) ?Error { if (status != .ok) { if (Error != FetchError) { if (parseError == null) { @compileError("`parseError` must be defined, if `Error` is not `FetchError`"); } if (parseError.?(status)) |error_value| { return error_value; } } else { if (parseError != null) { @compileError("`parseError` must be null"); } } } return null; } fn fetchOptionalObject( self: *Server, Error: type, parseError: ?fn (status: std.http.Status) ?Error, Result: type, parseObject: fn (store: *Store, obj: json.ObjectMap) anyerror!Result, fetchOptions: FetchJsonOptions, ) Error!?Result { const result = try self.fetchJson(fetchOptions); if (handleFetchError(result.status, Error, parseError)) |error_value| { return error_value; } if (result.status == .not_found) { return null; } if (result.status != .ok) { return FetchError.RequestFailed; } if (result.body == null) { return FetchError.ParseFailed; } const body = json_utils.asObject(result.body.?) orelse return FetchError.ParseFailed; return parseObject(self.store, body) catch return FetchError.ParseFailed; } fn fetchObject( self: *Server, Error: type, parseError: ?fn (status: std.http.Status) ?Error, Object: type, parseObject: fn (store: *Store, obj: json.ObjectMap) anyerror!Object, fetchOptions: FetchJsonOptions ) Error!Object { const result = try self.fetchOptionalObject(Error, parseError, Object, parseObject, fetchOptions); return result orelse return FetchError.RequestFailed; } fn fetchOptionalArray( self: *Server, allocator: Allocator, Error: type, parseError: ?fn (status: std.http.Status) ?Error, Object: type, parseObject: fn (store: *Store, obj: json.ObjectMap) anyerror!Object, fetchOptions: FetchJsonOptions ) Error!?std.ArrayList(Object) { const result = try self.fetchJson(fetchOptions); if (handleFetchError(result.status, Error, parseError)) |error_value| { return error_value; } if (result.status == .not_found) { return null; } if (result.status != .ok) { return FetchError.RequestFailed; } if (result.body == null) { return FetchError.ParseFailed; } var array = std.ArrayList(Object).init(allocator); errdefer array.deinit(); const result_data = json_utils.asArray(result.body.?) orelse return FetchError.ParseFailed; for (result_data.items) |result_item| { const item_obj = json_utils.asObject(result_item) orelse return FetchError.ParseFailed; const parsed_item = parseObject(self.store, item_obj) catch return FetchError.ParseFailed; array.append(parsed_item) catch return FetchError.OutOfMemory; } return array; } fn fetchArray( self: *Server, allocator: Allocator, Error: type, parseError: ?fn (status: std.http.Status) ?Error, Object: type, parseObject: fn (store: *Store, body: json.ObjectMap) anyerror!Object, fetchOptions: FetchJsonOptions ) Error!std.ArrayList(Object) { const result = try self.fetchOptionalArray(allocator, Error, parseError, Object, parseObject, fetchOptions); return result orelse return FetchError.RequestFailed; } pub const PrefetchOptions = struct { resources: bool = true, maps: bool = true, monsters: bool = true, items: bool = true, images: bool = false, }; pub fn prefetch(self: *Server, allocator: std.mem.Allocator, opts: PrefetchOptions) !void { // TODO: Create a version of `getResources`, `getMonsters`, `getItems`, etc.. // which don't need an allocator to be passed. // This is for cases when you only care that everything will be saved into the store. if (opts.resources) { const resources = try self.getResources(allocator, .{}); defer resources.deinit(); } if (opts.maps) { const maps: std.ArrayList(Map) = try self.getMaps(allocator, .{}); defer maps.deinit(); } if (opts.monsters) { const monsters = try self.getMonsters(allocator, .{}); defer monsters.deinit(); } if (opts.items) { const items = try self.getItems(allocator, .{}); defer items.deinit(); } if (opts.images) { for (self.store.maps.items) |map| { const skin: []const u8 = map.skin.slice(); if (self.store.images.getId(.map, skin) == null) { _ = try self.getImage(.map, skin); } } inline for (std.meta.fields(Character.Skin)) |field| { const skin: Character.Skin = @enumFromInt(field.value); const skin_name = skin.toString(); if (self.store.images.getId(.character, skin_name) == null) { _ = try self.getImage(.character, skin_name); } } } } pub fn prefetchCached(self: *Server, allocator: std.mem.Allocator, absolute_cache_path: []const u8, opts: PrefetchOptions) !void { const status: Status = try self.getStatus(); const version = status.version.slice(); if (std.fs.openFileAbsolute(absolute_cache_path, .{})) |file| { defer file.close(); if (self.store.load(allocator, version, file.reader())) { return; // Saved store was loaded successfully } else |_| {} } else |_| {} try self.prefetch(allocator, opts); const file = try std.fs.createFileAbsolute(absolute_cache_path, .{}); defer file.close(); try self.store.save(version, file.writer()); } // ------------------------- Endpoints ------------------------ // https://api.artifactsmmo.com/docs/#/operations/get_status__get pub fn getStatus(self: *Server) FetchError!Status { return try self.fetchObject( FetchError, null, Status, Status.parse, .{ .method = .GET, .ratelimit = .data, .path = "/" } ); } // https://api.artifactsmmo.com/docs/#/operations/get_my_characters_my_characters_get pub fn getMyCharacters(self: *Server, allocator: Allocator) FetchError!std.ArrayList(Store.Id) { return try self.fetchArray( allocator, FetchError, null, Store.Id, Character.parseAndAppend, .{ .method = .GET, .path = "/my/characters", .ratelimit = .data } ); } // https://api.artifactsmmo.com/docs/#/operations/get_character_characters__name__get pub fn getCharacter(self: *Server, name: []const u8) FetchError!?Store.Id { const path_buff_size = comptime blk: { var count = 0; count += 12; // "/characters/" count += Character.max_name_size; break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/characters/{s}", .{name}) catch return FetchError.InvalidPayload; return try self.fetchOptionalObject( FetchError, null, Store.Id, Character.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); } // https://api.artifactsmmo.com/docs/#/operations/create_character_characters_create_post pub fn createCharacter(self: *Server, name: []const u8, skin: Character.Skin) errors.CreateCharacterError!Store.Id { var payload_buffer: [128]u8 = undefined; const payload = std.fmt.bufPrint(&payload_buffer, "{{ \"name\":\"{s}\",\"skin\":\"{s}\" }}", .{ name, skin.toString() }) catch return FetchError.InvalidPayload; return try self.fetchObject( errors.CreateCharacterError, errors.parseCreateCharacterError, Store.Id, Character.parseAndAppend, .{ .method = .POST, .path = "/characters/create", .ratelimit = .actions, .payload = payload } ); } // https://api.artifactsmmo.com/docs/#/operations/delete_character_characters_delete_post pub fn deleteCharacter(self: *Server, name: []const u8) errors.DeleteCharacterError!void { var payload_buffer: [64]u8 = undefined; const payload = std.fmt.bufPrint(&payload_buffer, "{{ \"name\":\"{s}\" }}", .{ name }) catch return FetchError.InvalidPayload; _ = try self.fetchObject( errors.DeleteCharacterError, errors.parseDeleteCharacterError, Character, Character.parse, .{ .method = .POST, .path = "/characters/delete", .ratelimit = .actions, .payload = payload } ); if (self.store.characters.getId(name)) |id| { _ = self.store.characters.remove(id); } } // https://api.artifactsmmo.com/docs/#/operations/get_item_items__code__get pub fn getItem(self: *Server, code: []const u8) FetchError!?Store.Id { const path_buff_size = comptime blk: { var count = 0; count += 7; // "/items/" count += Item.max_code_size; break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/items/{s}", .{code}) catch return FetchError.InvalidPayload; return try self.fetchOptionalObject( FetchError, null, Store.Id, Item.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); } pub const ItemOptions = struct { craft_material: ?[]const u8 = null, craft_skill: ?Craft.Skill = null, min_level: ?u64 = null, max_level: ?u64 = null, name: ?[]const u8 = null, type: ?Item.Type = null, }; // https://api.artifactsmmo.com/docs/#/operations/get_all_items_items_get pub fn getItems(self: *Server, allocator: Allocator, opts: ItemOptions) FetchError!std.ArrayList(Store.Id) { const min_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.min_level.?)); var min_level_buffer: [min_level_buffer_size]u8 = undefined; const max_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.max_level.?)); var max_level_buffer: [max_level_buffer_size]u8 = undefined; var query = std.BoundedArray(FetchJsonOptions.QueryValue, 5).init(0) catch unreachable; if (opts.craft_material) |craft_material| { query.append(.{ .key = "craft_material", .value = craft_material }) catch unreachable; } if (opts.min_level) |min_level| { const min_level_str = std.fmt.bufPrint(&min_level_buffer, "{}", .{ min_level }) catch unreachable; query.append(.{ .key = "min_level", .value = min_level_str }) catch unreachable; } if (opts.max_level) |max_level| { const max_level_str = std.fmt.bufPrint(&max_level_buffer, "{}", .{ max_level }) catch unreachable; query.append(.{ .key = "max_level", .value = max_level_str }) catch unreachable; } if (opts.name) |name| { query.append(.{ .key = "name", .value = name }) catch unreachable; } if (opts.type) |item_type| { query.append(.{ .key = "type", .value = Item.Type.toString(item_type) }) catch unreachable; } return try self.fetchArray( allocator, FetchError, null, Store.Id, Item.parseAndAppend, .{ .method = .GET, .path = "/items", .ratelimit = .data, .paginated = true, .page_size = 100, .query = query.slice() } ); } // https://api.artifactsmmo.com/docs/#/operations/get_monster_monsters__code__get pub fn getMonster(self: *Server, code: []const u8) FetchError!?Store.Id { const path_buff_size = comptime blk: { var count = 0; count += 10; // "/monsters/" count += Monster.max_code_size; break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/monsters/{s}", .{code}) catch return FetchError.InvalidPayload; return try self.fetchOptionalObject( FetchError, null, Store.Id, Monster.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); } pub const MonsterOptions = struct { drop: ?[]const u8 = null, max_level: ?u64 = null, min_level: ?u64 = null, }; // https://api.artifactsmmo.com/docs/#/operations/get_all_monsters_monsters_get pub fn getMonsters(self: *Server, allocator: Allocator, opts: MonsterOptions) FetchError!std.ArrayList(Store.Id) { const min_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.min_level.?)); var min_level_buffer: [min_level_buffer_size]u8 = undefined; const max_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.max_level.?)); var max_level_buffer: [max_level_buffer_size]u8 = undefined; var query = std.BoundedArray(FetchJsonOptions.QueryValue, 3).init(0) catch unreachable; if (opts.drop) |drop| { query.append(.{ .key = "drop", .value = drop }) catch unreachable; } if (opts.min_level) |min_level| { const min_level_str = std.fmt.bufPrint(&min_level_buffer, "{}", .{ min_level }) catch unreachable; query.append(.{ .key = "min_level", .value = min_level_str }) catch unreachable; } if (opts.max_level) |max_level| { const max_level_str = std.fmt.bufPrint(&max_level_buffer, "{}", .{ max_level }) catch unreachable; query.append(.{ .key = "max_level", .value = max_level_str }) catch unreachable; } return try self.fetchArray( allocator, FetchError, null, Store.Id, Monster.parseAndAppend, .{ .method = .GET, .path = "/monsters", .ratelimit = .data, .paginated = true, .page_size = 100, .query = query.slice() } ); } // https://api.artifactsmmo.com/docs/#/operations/get_resource_resources__code__get pub fn getResource(self: *Server, code: []const u8) FetchError!?Store.Id { const path_buff_size = comptime blk: { var count = 0; count += 11; // "/resources/" count += Resource.max_code_size; break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/resources/{s}", .{code}) catch return FetchError.InvalidPayload; return try self.fetchOptionalObject( FetchError, null, Store.Id, Resource.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); } pub const ResourceOptions = struct { drop: ?[]const u8 = null, max_level: ?u64 = null, min_level: ?u64 = null, skill: ?Resource.Skill = null, }; // https://api.artifactsmmo.com/docs/#/operations/get_all_resources_resources_get pub fn getResources(self: *Server, allocator: Allocator, opts: ResourceOptions) FetchError!std.ArrayList(Store.Id) { const min_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.min_level.?)); var min_level_buffer: [min_level_buffer_size]u8 = undefined; const max_level_buffer_size = comptime maxIntBufferSize(@TypeOf(opts.max_level.?)); var max_level_buffer: [max_level_buffer_size]u8 = undefined; var query = std.BoundedArray(FetchJsonOptions.QueryValue, 4).init(0) catch unreachable; if (opts.drop) |drop| { query.append(.{ .key = "drop", .value = drop }) catch unreachable; } if (opts.min_level) |min_level| { const min_level_str = std.fmt.bufPrint(&min_level_buffer, "{}", .{ min_level }) catch unreachable; query.append(.{ .key = "min_level", .value = min_level_str }) catch unreachable; } if (opts.max_level) |max_level| { const max_level_str = std.fmt.bufPrint(&max_level_buffer, "{}", .{ max_level }) catch unreachable; query.append(.{ .key = "max_level", .value = max_level_str }) catch unreachable; } if (opts.skill) |skill| { query.append(.{ .key = "skill", .value = skill.toString() }) catch unreachable; } return try self.fetchArray( allocator, FetchError, null, Store.Id, Resource.parseAndAppend, .{ .method = .GET, .path = "/resources", .ratelimit = .data, .paginated = true, .page_size = 100, .query = query.slice() } ); } // https://api.artifactsmmo.com/docs/#/operations/get_map_maps__x___y__get pub fn getMap(self: *Server, position: Position) FetchError!?Map { const path_buff_size = comptime blk: { var count = 0; count += 6; // "/maps/" count += maxIntBufferSize(@TypeOf(position.x)); count += 1; // "/" count += maxIntBufferSize(@TypeOf(position.y)); break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/maps/{}/{}", .{ position.x, position.y }) catch return FetchError.InvalidPayload; const result = try self.fetchOptionalObject( FetchError, null, Position, Map.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); if (result != null) { return self.store.getMap(result.?).?.*; } else { return null; } } pub const MapOptions = struct { code: ?[]const u8 = null, type: ?Map.Content.Type = null, }; // https://api.artifactsmmo.com/docs/#/operations/get_all_maps_maps_get pub fn getMaps(self: *Server, allocator: Allocator, opts: MapOptions) FetchError!std.ArrayList(Map) { var query = std.BoundedArray(FetchJsonOptions.QueryValue, 2).init(0) catch unreachable; if (opts.code) |code| { query.append(.{ .key = "content_code", .value = code }) catch unreachable; } if (opts.type) |content_type| { query.append(.{ .key = "content_type", .value = content_type.toString() }) catch unreachable; } return try self.fetchArray( allocator, FetchError, null, Map, Map.parseAndAppendObject, .{ .method = .GET, .path = "/maps", .ratelimit = .data, .paginated = true, .page_size = 100, .query = query.slice() } ); } // https://docs.artifactsmmo.com/resources/images pub fn getImage(self: *Server, category: Image.Category, code: []const u8) FetchError!Store.Id { const category_path = switch (category) { .character => "characters", .item => "items", .monster => "monsters", .map => "maps", .resource => "resources", .effect => "effects", }; const path_buff_size = comptime blk: { var count = 0; count += 8; // "/images/" count += 10; // For the longest path "characters" count += 1; // "/" count += Image.max_code_size; count += 4; // ".png" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/images/{s}/{s}.png", .{ category_path, code }) catch return FetchError.InvalidPayload; var uri = Root.images_uri; uri.path = .{ .raw = path }; var fbs = std.heap.FixedBufferAllocator.init(self.fetch_buffer); const allocator = fbs.allocator(); var response_storage = std.ArrayList(u8).init(allocator); const opts = std.http.Client.FetchOptions{ .method = .GET, .location = .{ .uri = uri }, .response_storage = .{ .dynamic = &response_storage }, }; const result = self.fetch(.data, opts) catch return FetchError.RequestFailed; const response_body = response_storage.items; if (result.status != .ok) { return FetchError.RequestFailed; } const image = stb_image.load(response_body) catch return FetchError.FatalError; defer image.deinit(); const image_id = self.store.images.append(category, code, image.width, image.height) catch return FetchError.OutOfMemory; const stored_rgba = self.store.images.getRGBA(image_id).?; @memcpy(stored_rgba, image.rgba); return image_id; } // https://api.artifactsmmo.com/docs/#/operations/get_ge_sell_order_grandexchange_orders__id__get pub fn getGEOrder(self: *Server, id: []const u8) FetchError!?Store.Id { const path_buff_size = comptime blk: { var count = 0; count += 22; // "/grandexchange/orders/" count += GEOrder.max_id_size; break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/grandexchange/orders/{s}", .{ id }) catch return FetchError.InvalidPayload; return try self.fetchOptionalObject( FetchError, null, Store.Id, GEOrder.parseAndAppend, .{ .method = .GET, .path = path, .ratelimit = .data } ); } // https://api.artifactsmmo.com/docs/#/operations/action_move_my__name__action_move_post pub fn move(self: *Server, character: []const u8, position: Position) errors.MoveError!MoveResult { const path_buff_size = comptime blk: { var count = 0; count += 4; // "/my/" count += Character.max_name_size; count += 12; // "/action/move" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/my/{s}/action/move", .{ character }) catch return FetchError.InvalidPayload; var payload_buffer: [64]u8 = undefined; const payload = std.fmt.bufPrint(&payload_buffer, "{{ \"x\":{}, \"y\":{} }}", .{ position.x, position.y }) catch return FetchError.InvalidPayload; const result = try self.fetchObject( errors.MoveError, errors.parseMoveError, MoveResult, MoveResult.parseAndUpdate, .{ .method = .POST, .path = path, .ratelimit = .actions, .payload = payload } ); return result; } // https://api.artifactsmmo.com/docs/#/operations/action_gathering_my__name__action_gathering_post pub fn gather(self: *Server, character: []const u8) errors.GatherError!GatherResult { const path_buff_size = comptime blk: { var count = 0; count += 4; // "/my/" count += Character.max_name_size; count += 17; // "/action/gathering" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/my/{s}/action/gathering", .{ character }) catch return FetchError.InvalidPayload; return try self.fetchObject( errors.GatherError, errors.parseGatherError, GatherResult, GatherResult.parseAndUpdate, .{ .method = .POST, .path = path, .ratelimit = .actions } ); } // https://api.artifactsmmo.com/docs/#/operations/action_crafting_my__name__action_crafting_post pub fn craft(self: *Server, character: []const u8, item: []const u8, quantity: u64) errors.CraftError!CraftResult { const path_buff_size = comptime blk: { var count = 0; count += 4; // "/my/" count += Character.max_name_size; count += 16; // "/action/crafting" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint(&path_buff, "/my/{s}/action/crafting", .{ character }) catch return FetchError.InvalidPayload; var payload_buff: [256]u8 = undefined; const payload = std.fmt.bufPrint(&payload_buff, "{{ \"code\":\"{s}\", \"quantity\":{} }}", .{ item, quantity }) catch return FetchError.InvalidPayload; return try self.fetchObject( errors.CraftError, errors.parseCraftError, CraftResult, CraftResult.parseAndUpdate, .{ .method = .POST, .path = path, .payload = payload, .ratelimit = .actions } ); } pub fn equip(self: *Server, character: []const u8, slot: Equipment.SlotId, item: []const u8, quantity: u64) errors.EquipError!EquipResult { const path_buff_size = comptime blk: { var count = 0; count += 4; // "/my/" count += Character.max_name_size; count += 13; // "/action/equip" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint( &path_buff, "/my/{s}/action/equip",.{ character } ) catch return FetchError.InvalidPayload; var payload_buff: [256]u8 = undefined; const payload = std.fmt.bufPrint( &payload_buff, "{{ \"slot\":\"{s}\", \"code\":\"{s}\", \"quantity\":{} }}", .{ slot.toString(), item, quantity } ) catch return FetchError.InvalidPayload; return try self.fetchObject( errors.EquipError, errors.parseEquipError, EquipResult, EquipResult.parseAndUpdate, .{ .method = .POST, .path = path, .payload = payload, .ratelimit = .actions } ); } pub fn unequip(self: *Server, character: []const u8, slot: Equipment.SlotId, quantity: u64) errors.UnequipError!UnequipResult { const path_buff_size = comptime blk: { var count = 0; count += 4; // "/my/" count += Character.max_name_size; count += 15; // "/action/unequip" break :blk count; }; var path_buff: [path_buff_size]u8 = undefined; const path = std.fmt.bufPrint( &path_buff, "/my/{s}/action/unequip",.{ character } ) catch return FetchError.InvalidPayload; var payload_buff: [256]u8 = undefined; const payload = std.fmt.bufPrint( &payload_buff, "{{ \"slot\":\"{s}\", \"quantity\":{} }}", .{ slot.toString(), quantity } ) catch return FetchError.InvalidPayload; return try self.fetchObject( errors.UnequipError, errors.parseUnequipError, UnequipResult, UnequipResult.parseAndUpdate, .{ .method = .POST, .path = path, .payload = payload, .ratelimit = .actions } ); } // https://api.artifactsmmo.com/docs/#/operations/generate_token_token_post pub fn generateToken(self: *Server, username: []const u8, password: []const u8) !AuthToken { const base64_encoder = std.base64.standard.Encoder; var credentials_buffer: [256]u8 = undefined; const credentials = try std.fmt.bufPrint(&credentials_buffer, "{s}:{s}", .{username, password}); const max_encoded_size = comptime base64_encoder.calcSize(credentials_buffer.len); const max_authorization_header_size = comptime blk: { var sum: usize = 0; sum += 6; // "Basic " sum += max_encoded_size; break :blk sum; }; var authorization_header_buffer: [max_authorization_header_size]u8 = undefined; @memcpy(authorization_header_buffer[0..6], "Basic "); const encoded_credentials = base64_encoder.encode(authorization_header_buffer[6..], credentials); const authorization_header = authorization_header_buffer[0..(6 + encoded_credentials.len)]; var fbs = std.heap.FixedBufferAllocator.init(self.fetch_buffer); const allocator = fbs.allocator(); var uri = self.server_uri; uri.path = .{ .raw = "/token" }; var response_storage = std.ArrayList(u8).init(allocator); var opts = std.http.Client.FetchOptions{ .method = .POST, .location = .{ .uri = uri }, .response_storage = .{ .dynamic = &response_storage }, }; opts.headers.authorization = .{ .override = authorization_header }; const result = self.fetch(.token, opts) catch return FetchError.RequestFailed; const response_body = response_storage.items; if (result.status != .ok) { return FetchError.RequestFailed; } const parsed = try json.parseFromSliceLeaky(json.Value, allocator, response_body, .{ .allocate = .alloc_if_needed }); if (parsed != json.Value.object) { return FetchError.ParseFailed; } const token = try json_utils.getStringRequired(parsed.object, "token"); return try AuthToken.fromSlice(token); }