Files
Archipelago/WebHostLib/customserver.py

514 lines
19 KiB
Python

from __future__ import annotations
import asyncio
import collections
import datetime
import functools
import itertools
import logging
import multiprocessing
import pickle
import random
import socket
import threading
import time
import typing
import sys
import psutil
import websockets
from pony.orm import commit, db_session, select
import Utils
from MultiServer import (
Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor, load_server_cert,
server_per_message_deflate_factory,
)
from Utils import restricted_loads, cache_argsless
from .locker import Locker
from .models import Command, GameDataPackage, Room, db
class CustomClientMessageProcessor(ClientMessageProcessor):
ctx: WebHostContext
def _cmd_video(self, platform: str, user: str):
"""Set a link for your name in the WebHostLib tracker pointing to a video stream.
Currently, only YouTube and Twitch platforms are supported.
"""
if platform.lower().startswith("t"): # twitch
self.ctx.video[self.client.team, self.client.slot] = "Twitch", user
self.ctx.save()
self.output(f"Registered Twitch Stream https://www.twitch.tv/{user}")
return True
elif platform.lower().startswith("y"): # youtube
self.ctx.video[self.client.team, self.client.slot] = "Youtube", user
self.ctx.save()
self.output(f"Registered Youtube Stream for {user}")
return True
return False
# inject
import MultiServer
MultiServer.client_message_processor = CustomClientMessageProcessor
del MultiServer
class DBCommandProcessor(ServerCommandProcessor):
def output(self, text: str):
self.ctx.logger.info(text)
class WebHostContext(Context):
room_id: int
def __init__(self, static_server_data: dict, logger: logging.Logger):
# static server data is used during _load_game_data to load required data,
# without needing to import worlds system, which takes quite a bit of memory
self.static_server_data = static_server_data
super(WebHostContext, self).__init__("", 0, "", "", 1,
40, True, "enabled", "enabled",
"enabled", 0, 2, logger=logger)
del self.static_server_data
self.main_loop = asyncio.get_running_loop()
self.video = {}
self.tags = ["AP", "WebHost"]
def __del__(self):
try:
import psutil
from Utils import format_SI_prefix
self.logger.debug(f"Context destroyed, Mem: {format_SI_prefix(psutil.Process().memory_info().rss, 1024)}iB")
except ImportError:
self.logger.debug("Context destroyed")
def _load_game_data(self):
for key, value in self.static_server_data.items():
# NOTE: attributes are mutable and shared, so they will have to be copied before being modified
setattr(self, key, value)
self.non_hintable_names = collections.defaultdict(frozenset, self.non_hintable_names)
async def listen_to_db_commands(self):
cmdprocessor = DBCommandProcessor(self)
while not self.exit_event.is_set():
await self.main_loop.run_in_executor(None, self._process_db_commands, cmdprocessor)
try:
await asyncio.wait_for(self.exit_event.wait(), 5)
except asyncio.TimeoutError:
pass
def _process_db_commands(self, cmdprocessor):
with db_session:
commands = select(command for command in Command if command.room.id == self.room_id)
if commands:
for command in commands:
self.main_loop.call_soon_threadsafe(cmdprocessor, command.commandtext)
command.delete()
commit()
@db_session
def load(self, room_id: int):
self.room_id = room_id
room = Room.get(id=room_id)
if room.last_port:
self.port = room.last_port
else:
self.port = 0
multidata = self.decompress(room.seed.multidata)
game_data_packages = {}
static_gamespackage = self.gamespackage # this is shared across all rooms
static_item_name_groups = self.item_name_groups
static_location_name_groups = self.location_name_groups
self.gamespackage = {"Archipelago": static_gamespackage.get("Archipelago", {})} # this may be modified by _load
self.item_name_groups = {"Archipelago": static_item_name_groups.get("Archipelago", {})}
self.location_name_groups = {"Archipelago": static_location_name_groups.get("Archipelago", {})}
missing_checksum = False
for game in list(multidata.get("datapackage", {})):
game_data = multidata["datapackage"][game]
if "checksum" in game_data:
if static_gamespackage.get(game, {}).get("checksum") == game_data["checksum"]:
# non-custom. remove from multidata and use static data
# games package could be dropped from static data once all rooms embed data package
del multidata["datapackage"][game]
else:
row = GameDataPackage.get(checksum=game_data["checksum"])
if row: # None if rolled on >= 0.3.9 but uploaded to <= 0.3.8. multidata should be complete
game_data_packages[game] = restricted_loads(row.data)
continue
else:
self.logger.warning(f"Did not find game_data_package for {game}: {game_data['checksum']}")
else:
missing_checksum = True # Game rolled on old AP and will load data package from multidata
self.gamespackage[game] = static_gamespackage.get(game, {})
self.item_name_groups[game] = static_item_name_groups.get(game, {})
self.location_name_groups[game] = static_location_name_groups.get(game, {})
if not game_data_packages and not missing_checksum:
# all static -> use the static dicts directly
self.gamespackage = static_gamespackage
self.item_name_groups = static_item_name_groups
self.location_name_groups = static_location_name_groups
return self._load(multidata, game_data_packages, True)
def init_save(self, enabled: bool = True):
self.saving = enabled
if self.saving:
with db_session:
savegame_data = Room.get(id=self.room_id).multisave
if savegame_data:
self.set_save(restricted_loads(savegame_data))
self._start_async_saving(atexit_save=False)
asyncio.create_task(self.listen_to_db_commands())
@db_session
def _save(self, exit_save: bool = False) -> bool:
room = Room.get(id=self.room_id)
# Does not use Utils.restricted_dumps because we'd rather make a save than not make one
room.multisave = pickle.dumps(self.get_save())
# saving only occurs on activity, so we can "abuse" this information to mark this as last_activity
if not exit_save: # we don't want to count a shutdown as activity, which would restart the server again
room.last_activity = datetime.datetime.utcnow()
return True
def get_save(self) -> dict:
d = super(WebHostContext, self).get_save()
d["video"] = [(tuple(playerslot), videodata) for playerslot, videodata in self.video.items()]
return d
class GameRangePorts(typing.NamedTuple):
parsed_ports: list[range]
weights: list[int]
ephemeral_allowed: bool
@functools.cache
def parse_game_ports(game_ports: tuple[str | int, ...]) -> GameRangePorts:
parsed_ports: list[range] = []
weights = []
ephemeral_allowed = False
total_length = 0
for item in game_ports:
if isinstance(item, str) and "-" in item:
start, end = map(int, item.split("-"))
x = range(start, end + 1)
total_length += len(x)
weights.append(total_length)
parsed_ports.append(x)
elif int(item) == 0:
ephemeral_allowed = True
else:
total_length += 1
weights.append(total_length)
num = int(item)
parsed_ports.append(range(num, num + 1))
return GameRangePorts(parsed_ports, weights, ephemeral_allowed)
def weighted_random(ranges: list[range], cum_weights: list[int]) -> int:
[picked] = random.choices(ranges, cum_weights=cum_weights)
return random.randrange(picked.start, picked.stop, picked.step)
def create_random_port_socket(game_ports: tuple[str | int, ...], host: str) -> socket.socket:
parsed_ports, weights, ephemeral_allowed = parse_game_ports(game_ports)
used_ports = get_used_ports()
i = 1024 if len(parsed_ports) > 0 else 0
while i > 0:
port_num = weighted_random(parsed_ports, weights)
if port_num in used_ports:
used_ports = get_used_ports()
continue
i -= 0
try:
return socket.create_server((host, port_num))
except OSError:
pass
if ephemeral_allowed:
return socket.create_server((host, 0))
raise OSError(98, "No available ports")
def try_conns_per_process(p: psutil.Process) -> typing.Iterable[int]:
try:
return map(lambda c: c.laddr.port, p.net_connections("tcp4"))
except psutil.AccessDenied:
return []
def get_active_net_connections() -> typing.Iterable[int]:
# Don't even try to check if system using AIX
if psutil._common.AIX:
return []
try:
return map(lambda c: c.laddr.port, psutil.net_connections("tcp4"))
# raises AccessDenied when done on macOS
except psutil.AccessDenied:
# flatten the list of iterables
return itertools.chain.from_iterable(map(
# get the net connections of the process and then map its ports
try_conns_per_process,
# this method has caching handled by psutil
psutil.process_iter(["net_connections"])
))
def get_used_ports():
last_used_ports: tuple[frozenset[int], float] = getattr(get_used_ports, "last", None)
t_hash = time.time() // 900
if last_used_ports is None or last_used_ports[1] != t_hash:
last_used_ports = (frozenset(get_active_net_connections()), t_hash)
setattr(get_used_ports, "last", last_used_ports)
return last_used_ports[0]
@cache_argsless
def get_static_server_data() -> dict:
import worlds
data = {
"non_hintable_names": {
world_name: world.hint_blacklist
for world_name, world in worlds.AutoWorldRegister.world_types.items()
},
"gamespackage": {
world_name: {
key: value
for key, value in game_package.items()
if key not in ("item_name_groups", "location_name_groups")
}
for world_name, game_package in worlds.network_data_package["games"].items()
},
"item_name_groups": {
world_name: world.item_name_groups
for world_name, world in worlds.AutoWorldRegister.world_types.items()
},
"location_name_groups": {
world_name: world.location_name_groups
for world_name, world in worlds.AutoWorldRegister.world_types.items()
},
}
return data
def set_up_logging(room_id) -> logging.Logger:
import os
# logger setup
logger = logging.getLogger(f"RoomLogger {room_id}")
# this *should* be empty, but just in case.
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handler.close()
file_handler = logging.FileHandler(
os.path.join(Utils.user_path("logs"), f"{room_id}.txt"),
"a",
encoding="utf-8-sig")
file_handler.setFormatter(logging.Formatter("[%(asctime)s]: %(message)s"))
logger.setLevel(logging.INFO)
logger.addHandler(file_handler)
return logger
def tear_down_logging(room_id):
"""Close logging handling for a room."""
logger_name = f"RoomLogger {room_id}"
if logger_name in logging.Logger.manager.loggerDict:
logger = logging.getLogger(logger_name)
for handler in logger.handlers[:]:
logger.removeHandler(handler)
handler.close()
del logging.Logger.manager.loggerDict[logger_name]
def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
cert_file: typing.Optional[str], cert_key_file: typing.Optional[str],
host: str, game_ports: typing.Iterable[str | int],
rooms_to_run: multiprocessing.Queue, rooms_shutting_down: multiprocessing.Queue):
from setproctitle import setproctitle
setproctitle(name)
Utils.init_logging(name)
try:
import resource
except ModuleNotFoundError:
pass # unix only module
else:
# Each Server is another file handle, so request as many as we can from the system
file_limit = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
# set soft limit to hard limit
resource.setrlimit(resource.RLIMIT_NOFILE, (file_limit, file_limit))
del resource, file_limit
# convert to tuple because its hashable
game_ports = tuple(game_ports)
# establish DB connection for multidata and multisave
db.bind(**ponyconfig)
db.generate_mapping(check_tables=False)
if "worlds" in sys.modules:
raise Exception("Worlds system should not be loaded in the custom server.")
import gc
if not cert_file:
def get_ssl_context():
return None
else:
load_date = None
ssl_context = load_server_cert(cert_file, cert_key_file)
def get_ssl_context():
nonlocal load_date, ssl_context
today = datetime.date.today()
if load_date != today:
ssl_context = load_server_cert(cert_file, cert_key_file)
load_date = today
return ssl_context
del ponyconfig
gc.collect() # free intermediate objects used during setup
loop = asyncio.get_event_loop()
async def start_room(room_id):
with Locker(f"RoomLocker {room_id}"):
try:
logger = set_up_logging(room_id)
ctx = WebHostContext(static_server_data, logger)
ctx.load(room_id)
ctx.init_save()
assert ctx.server is None
if ctx.port != 0:
try:
ctx.server = websockets.serve(
functools.partial(server, ctx=ctx),
ctx.host,
ctx.port,
ssl=get_ssl_context(),
extensions=[server_per_message_deflate_factory],
)
await ctx.server
except OSError:
ctx.port = 0
if ctx.port == 0:
ctx.server = websockets.serve(
functools.partial(server, ctx=ctx),
sock=create_random_port_socket(game_ports, ctx.host),
ssl=get_ssl_context(),
extensions=[server_per_message_deflate_factory],
)
await ctx.server
port = 0
for wssocket in ctx.server.ws_server.sockets:
socketname = wssocket.getsockname()
if wssocket.family == socket.AF_INET6:
# Prefer IPv4, as most users seem to not have working ipv6 support
if not port:
port = socketname[1]
elif wssocket.family == socket.AF_INET:
port = socketname[1]
if port:
ctx.logger.info(f'Hosting game at {host}:{port}')
with db_session:
room = Room.get(id=ctx.room_id)
room.last_port = port
del room
else:
ctx.logger.exception("Could not determine port. Likely hosting failure.")
with db_session:
ctx.auto_shutdown = Room.get(id=room_id).timeout
if ctx.saving:
setattr(asyncio.current_task(), "save", lambda: ctx._save(True))
assert ctx.shutdown_task is None
ctx.shutdown_task = asyncio.create_task(auto_shutdown(ctx, []))
await ctx.shutdown_task
except (KeyboardInterrupt, SystemExit):
if ctx.saving:
ctx._save(True)
setattr(asyncio.current_task(), "save", None)
except Exception as e:
with db_session:
room = Room.get(id=room_id)
room.last_port = -1
del room
logger.exception(e)
raise
else:
if ctx.saving:
ctx._save(True)
setattr(asyncio.current_task(), "save", None)
finally:
try:
ctx.save_dirty = False # make sure the saving thread does not write to DB after final wakeup
ctx.exit_event.set() # make sure the saving thread stops at some point
# NOTE: async saving should probably be an async task and could be merged with shutdown_task
if ctx.server and hasattr(ctx.server, "ws_server"):
ctx.server.ws_server.close()
await ctx.server.ws_server.wait_closed()
with db_session:
# ensure the Room does not spin up again on its own, minute of safety buffer
room = Room.get(id=room_id)
room.last_activity = datetime.datetime.utcnow() - \
datetime.timedelta(minutes=1, seconds=room.timeout)
del room
tear_down_logging(room_id)
logging.info(f"Shutting down room {room_id} on {name}.")
finally:
await asyncio.sleep(5)
rooms_shutting_down.put(room_id)
class Starter(threading.Thread):
_tasks: typing.List[asyncio.Future]
def __init__(self):
super().__init__()
self._tasks = []
def _done(self, task: asyncio.Future):
self._tasks.remove(task)
task.result()
def run(self):
while 1:
next_room = rooms_to_run.get(block=True, timeout=None)
gc.collect()
task = asyncio.run_coroutine_threadsafe(start_room(next_room), loop)
self._tasks.append(task)
task.add_done_callback(self._done)
logging.info(f"Starting room {next_room} on {name}.")
del task # delete reference to task object
starter = Starter()
starter.daemon = True
starter.start()
try:
loop.run_forever()
finally:
# save all tasks that want to be saved during shutdown
for task in asyncio.all_tasks(loop):
save: typing.Optional[typing.Callable[[], typing.Any]] = getattr(task, "save", None)
if save:
save()