Skip to content

Commit

Permalink
Deprecate data_version and introduce checksum for DataPackages. (A…
Browse files Browse the repository at this point in the history
…rchipelagoMW#684)



Co-authored-by: black-sliver <[email protected]>
  • Loading branch information
2 people authored and FlySniper committed Nov 14, 2023
1 parent e471338 commit a81cdb1
Show file tree
Hide file tree
Showing 20 changed files with 296 additions and 118 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ Output Logs/
/setup.ini
/installdelete.iss
/data/user.kv
/datapackage

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
2 changes: 1 addition & 1 deletion BaseClasses.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,7 +336,7 @@ def get_player_name(self, player: int) -> str:
return self.player_name[player]

def get_file_safe_player_name(self, player: int) -> str:
return ''.join(c for c in self.get_player_name(player) if c not in '<>:"/\\|?*')
return Utils.get_file_safe_name(self.get_player_name(player))

def get_out_file_name_base(self, player: int) -> str:
""" the base name (without file extension) for each player's output file for a seed """
Expand Down
52 changes: 32 additions & 20 deletions CommonClient.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ class CommonContext:
items_handling: typing.Optional[int] = None
want_slot_data: bool = True # should slot_data be retrieved via Connect

# datapackage
# data package
# Contents in flux until connection to server is made, to download correct data for this multiworld.
item_names: typing.Dict[int, str] = Utils.KeyedDefaultDict(lambda code: f'Unknown item (ID:{code})')
location_names: typing.Dict[int, str] = Utils.KeyedDefaultDict(lambda code: f'Unknown location (ID:{code})')
Expand Down Expand Up @@ -223,7 +223,7 @@ def __init__(self, server_address: typing.Optional[str], password: typing.Option
self.watcher_event = asyncio.Event()

self.jsontotextparser = JSONtoTextParser(self)
self.update_datapackage(network_data_package)
self.update_data_package(network_data_package)

# execution
self.keep_alive_task = asyncio.create_task(keep_alive(self), name="Bouncy")
Expand Down Expand Up @@ -399,32 +399,40 @@ async def shutdown(self):
self.input_task.cancel()

# DataPackage
async def prepare_datapackage(self, relevant_games: typing.Set[str],
remote_datepackage_versions: typing.Dict[str, int]):
async def prepare_data_package(self, relevant_games: typing.Set[str],
remote_date_package_versions: typing.Dict[str, int],
remote_data_package_checksums: typing.Dict[str, str]):
"""Validate that all data is present for the current multiworld.
Download, assimilate and cache missing data from the server."""
# by documentation any game can use Archipelago locations/items -> always relevant
relevant_games.add("Archipelago")

cache_package = Utils.persistent_load().get("datapackage", {}).get("games", {})
needed_updates: typing.Set[str] = set()
for game in relevant_games:
if game not in remote_datepackage_versions:
if game not in remote_date_package_versions and game not in remote_data_package_checksums:
continue
remote_version: int = remote_datepackage_versions[game]

if remote_version == 0: # custom datapackage for this game
remote_version: int = remote_date_package_versions.get(game, 0)
remote_checksum: typing.Optional[str] = remote_data_package_checksums.get(game)

if remote_version == 0 and not remote_checksum: # custom data package and no checksum for this game
needed_updates.add(game)
continue

local_version: int = network_data_package["games"].get(game, {}).get("version", 0)
local_checksum: typing.Optional[str] = network_data_package["games"].get(game, {}).get("checksum")
# no action required if local version is new enough
if remote_version > local_version:
cache_version: int = cache_package.get(game, {}).get("version", 0)
if (not remote_checksum and (remote_version > local_version or remote_version == 0)) \
or remote_checksum != local_checksum:
cached_game = Utils.load_data_package_for_checksum(game, remote_checksum)
cache_version: int = cached_game.get("version", 0)
cache_checksum: typing.Optional[str] = cached_game.get("checksum")
# download remote version if cache is not new enough
if remote_version > cache_version:
if (not remote_checksum and (remote_version > cache_version or remote_version == 0)) \
or remote_checksum != cache_checksum:
needed_updates.add(game)
else:
self.update_game(cache_package[game])
self.update_game(cached_game)
if needed_updates:
await self.send_msgs([{"cmd": "GetDataPackage", "games": list(needed_updates)}])

Expand All @@ -434,15 +442,17 @@ def update_game(self, game_package: dict):
for location_name, location_id in game_package["location_name_to_id"].items():
self.location_names[location_id] = location_name

def update_datapackage(self, data_package: dict):
for game, gamedata in data_package["games"].items():
self.update_game(gamedata)
def update_data_package(self, data_package: dict):
for game, game_data in data_package["games"].items():
self.update_game(game_data)

def consume_network_datapackage(self, data_package: dict):
self.update_datapackage(data_package)
def consume_network_data_package(self, data_package: dict):
self.update_data_package(data_package)
current_cache = Utils.persistent_load().get("datapackage", {}).get("games", {})
current_cache.update(data_package["games"])
Utils.persistent_store("datapackage", "games", current_cache)
for game, game_data in data_package["games"].items():
Utils.store_data_package_for_checksum(game, game_data)

# DeathLink hooks

Expand Down Expand Up @@ -661,14 +671,16 @@ async def process_server_cmd(ctx: CommonContext, args: dict):
current_team = network_player.team
logger.info(' %s (Player %d)' % (network_player.alias, network_player.slot))

# update datapackage
await ctx.prepare_datapackage(set(args["games"]), args["datapackage_versions"])
# update data package
data_package_versions = args.get("datapackage_versions", {})
data_package_checksums = args.get("datapackage_checksums", {})
await ctx.prepare_data_package(set(args["games"]), data_package_versions, data_package_checksums)

await ctx.server_auth(args['password'])

elif cmd == 'DataPackage':
logger.info("Got new ID/Name DataPackage")
ctx.consume_network_datapackage(args['data'])
ctx.consume_network_data_package(args['data'])

elif cmd == 'ConnectionRefused':
errors = args["errors"]
Expand Down
14 changes: 6 additions & 8 deletions Main.py
Original file line number Diff line number Diff line change
Expand Up @@ -355,13 +355,11 @@ def precollect_hint(location):
for player in world.groups.get(location.item.player, {}).get("players", [])]):
precollect_hint(location)

# custom datapackage
datapackage = {}
for game_world in world.worlds.values():
if game_world.data_version == 0 and game_world.game not in datapackage:
datapackage[game_world.game] = worlds.network_data_package["games"][game_world.game]
datapackage[game_world.game]["item_name_groups"] = game_world.item_name_groups
datapackage[game_world.game]["location_name_groups"] = game_world.location_name_groups
# embedded data package
data_package = {
game_world.game: worlds.network_data_package["games"][game_world.game]
for game_world in world.worlds.values()
}

multidata = {
"slot_data": slot_data,
Expand All @@ -378,7 +376,7 @@ def precollect_hint(location):
"tags": ["AP"],
"minimum_versions": minimum_versions,
"seed_name": world.seed_name,
"datapackage": datapackage,
"datapackage": data_package,
}
AutoWorld.call_all(world, "modify_multidata", multidata)

Expand Down
45 changes: 29 additions & 16 deletions MultiServer.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,20 @@
import logging
import zlib
import collections
import typing
import inspect
import weakref
import datetime
import threading
import random
import pickle
import functools
import hashlib
import inspect
import itertools
import time
import logging
import operator
import hashlib
import pickle
import random
import threading
import time
import typing
import weakref
import zlib

import ModuleUpdate

Expand Down Expand Up @@ -160,6 +163,7 @@ class Context:
stored_data_notification_clients: typing.Dict[str, typing.Set[Client]]
slot_info: typing.Dict[int, NetworkSlot]

checksums: typing.Dict[str, str]
item_names: typing.Dict[int, str] = Utils.KeyedDefaultDict(lambda code: f'Unknown item (ID:{code})')
item_name_groups: typing.Dict[str, typing.Dict[str, typing.Set[str]]]
location_names: typing.Dict[int, str] = Utils.KeyedDefaultDict(lambda code: f'Unknown location (ID:{code})')
Expand Down Expand Up @@ -233,6 +237,7 @@ def __init__(self, host: str, port: int, server_password: str, password: str, lo

# init empty to satisfy linter, I suppose
self.gamespackage = {}
self.checksums = {}
self.item_name_groups = {}
self.location_name_groups = {}
self.all_item_and_group_names = {}
Expand All @@ -241,7 +246,7 @@ def __init__(self, host: str, port: int, server_password: str, password: str, lo

self._load_game_data()

# Datapackage retrieval
# Data package retrieval
def _load_game_data(self):
import worlds
self.gamespackage = worlds.network_data_package["games"]
Expand All @@ -255,6 +260,7 @@ def _load_game_data(self):

def _init_game_data(self):
for game_name, game_package in self.gamespackage.items():
self.checksums[game_name] = game_package["checksum"]
for item_name, item_id in game_package["item_name_to_id"].items():
self.item_names[item_id] = item_name
for location_name, location_id in game_package["location_name_to_id"].items():
Expand Down Expand Up @@ -350,6 +356,7 @@ def notify_client_multiple(self, client: Client, texts: typing.List[str], additi
[{"cmd": "PrintJSON", "data": [{ "text": text }], **additional_arguments}
for text in texts]))


# loading

def load(self, multidatapath: str, use_embedded_server_options: bool = False):
Expand All @@ -366,7 +373,7 @@ def load(self, multidatapath: str, use_embedded_server_options: bool = False):
with open(multidatapath, 'rb') as f:
data = f.read()

self._load(self.decompress(data), use_embedded_server_options)
self._load(self.decompress(data), {}, use_embedded_server_options)
self.data_filename = multidatapath

@staticmethod
Expand All @@ -376,7 +383,8 @@ def decompress(data: bytes) -> dict:
raise Utils.VersionException("Incompatible multidata.")
return restricted_loads(zlib.decompress(data[1:]))

def _load(self, decoded_obj: dict, use_embedded_server_options: bool):
def _load(self, decoded_obj: dict, game_data_packages: typing.Dict[str, typing.Any],
use_embedded_server_options: bool):
self.read_data = {}
mdata_ver = decoded_obj["minimum_versions"]["server"]
if mdata_ver > Utils.version_tuple:
Expand Down Expand Up @@ -431,13 +439,15 @@ def _load(self, decoded_obj: dict, use_embedded_server_options: bool):
server_options = decoded_obj.get("server_options", {})
self._set_options(server_options)

# custom datapackage
# embedded data package
for game_name, data in decoded_obj.get("datapackage", {}).items():
logging.info(f"Loading custom datapackage for game {game_name}")
if game_name in game_data_packages:
data = game_data_packages[game_name]
logging.info(f"Loading embedded data package for game {game_name}")
self.gamespackage[game_name] = data
self.item_name_groups[game_name] = data["item_name_groups"]
self.location_name_groups[game_name] = data["location_name_groups"]
del data["item_name_groups"] # remove from datapackage, but keep in self.item_name_groups
del data["item_name_groups"] # remove from data package, but keep in self.item_name_groups
del data["location_name_groups"]
self._init_game_data()
for game_name, data in self.item_name_groups.items():
Expand Down Expand Up @@ -735,10 +745,11 @@ async def on_client_connected(ctx: Context, client: Client):
NetworkPlayer(team, slot,
ctx.name_aliases.get((team, slot), name), name)
)
games = {ctx.games[x] for x in range(1, len(ctx.games) + 1)}
await ctx.send_msgs(client, [{
'cmd': 'RoomInfo',
'password': bool(ctx.password),
'games': {ctx.games[x] for x in range(1, len(ctx.games) + 1)},
'games': games,
# tags are for additional features in the communication.
# Name them by feature or fork, as you feel is appropriate.
'tags': ctx.tags,
Expand All @@ -747,7 +758,9 @@ async def on_client_connected(ctx: Context, client: Client):
'hint_cost': ctx.hint_cost,
'location_check_points': ctx.location_check_points,
'datapackage_versions': {game: game_data["version"] for game, game_data
in ctx.gamespackage.items()},
in ctx.gamespackage.items() if game in games},
'datapackage_checksums': {game: game_data["checksum"] for game, game_data
in ctx.gamespackage.items() if game in games},
'seed_name': ctx.seed_name,
'time': time.time(),
}])
Expand Down
51 changes: 51 additions & 0 deletions Utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import asyncio
import json
import typing
import builtins
import os
Expand Down Expand Up @@ -142,6 +143,17 @@ def user_path(*path: str) -> str:
return os.path.join(user_path.cached_path, *path)


def cache_path(*path: str) -> str:
"""Returns path to a file in the user's Archipelago cache directory."""
if hasattr(cache_path, "cached_path"):
pass
else:
import appdirs
cache_path.cached_path = appdirs.user_cache_dir("Archipelago", False)

return os.path.join(cache_path.cached_path, *path)


def output_path(*path: str) -> str:
if hasattr(output_path, 'cached_path'):
return os.path.join(output_path.cached_path, *path)
Expand Down Expand Up @@ -385,6 +397,45 @@ def persistent_load() -> typing.Dict[str, dict]:
return storage


def get_file_safe_name(name: str) -> str:
return "".join(c for c in name if c not in '<>:"/\\|?*')


def load_data_package_for_checksum(game: str, checksum: typing.Optional[str]) -> Dict[str, Any]:
if checksum and game:
if checksum != get_file_safe_name(checksum):
raise ValueError(f"Bad symbols in checksum: {checksum}")
path = cache_path("datapackage", get_file_safe_name(game), f"{checksum}.json")
if os.path.exists(path):
try:
with open(path, "r", encoding="utf-8-sig") as f:
return json.load(f)
except Exception as e:
logging.debug(f"Could not load data package: {e}")

# fall back to old cache
cache = persistent_load().get("datapackage", {}).get("games", {}).get(game, {})
if cache.get("checksum") == checksum:
return cache

# cache does not match
return {}


def store_data_package_for_checksum(game: str, data: typing.Dict[str, Any]) -> None:
checksum = data.get("checksum")
if checksum and game:
if checksum != get_file_safe_name(checksum):
raise ValueError(f"Bad symbols in checksum: {checksum}")
game_folder = cache_path("datapackage", get_file_safe_name(game))
os.makedirs(game_folder, exist_ok=True)
try:
with open(os.path.join(game_folder, f"{checksum}.json"), "w", encoding="utf-8-sig") as f:
json.dump(data, f, ensure_ascii=False, separators=(",", ":"))
except Exception as e:
logging.debug(f"Could not store data package: {e}")


def get_adjuster_settings(game_name: str) -> typing.Dict[str, typing.Any]:
adjuster_settings = persistent_load().get("adjuster", {}).get(game_name, {})
return adjuster_settings
Expand Down
13 changes: 11 additions & 2 deletions WebHostLib/api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,21 @@ def get_datapackage():

@api_endpoints.route('/datapackage_version')
@cache.cached()

def get_datapackage_versions():
from worlds import network_data_package, AutoWorldRegister
from worlds import AutoWorldRegister

version_package = {game: world.data_version for game, world in AutoWorldRegister.world_types.items()}
return version_package


@api_endpoints.route('/datapackage_checksum')
@cache.cached()
def get_datapackage_checksums():
from worlds import network_data_package
version_package = {
game: game_data["checksum"] for game, game_data in network_data_package["games"].items()
}
return version_package


from . import generate, user # trigger registration
Loading

0 comments on commit a81cdb1

Please sign in to comment.