mirror of
https://github.com/ArchipelagoMW/Archipelago.git
synced 2026-03-18 21:38:13 -07:00
Compare commits
164 Commits
plando-cou
...
NewSoupVi-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
89ccce7805 | ||
|
|
88a4a589a0 | ||
|
|
bead81b64b | ||
|
|
16d5b453a7 | ||
|
|
48906de873 | ||
|
|
9a64b8c5ce | ||
|
|
6ba2b7f8c3 | ||
|
|
6f7ca082f2 | ||
|
|
eb09be3594 | ||
|
|
9d654b7e3b | ||
|
|
8f7fcd4889 | ||
|
|
b85887241f | ||
|
|
5110676c76 | ||
|
|
0020e6c3d3 | ||
|
|
6e6fd0e9bc | ||
|
|
85c26f9740 | ||
|
|
9057ce0ce3 | ||
|
|
378cc91a4d | ||
|
|
cdde38fdc9 | ||
|
|
c34c00baa4 | ||
|
|
9bd535752e | ||
|
|
ecb22642af | ||
|
|
17ccfdc266 | ||
|
|
4633f12972 | ||
|
|
1f6c99635e | ||
|
|
4e92cac171 | ||
|
|
3b88630b0d | ||
|
|
e6d2d8f455 | ||
|
|
84c2d70d9a | ||
|
|
d408f7cabc | ||
|
|
72ae076ce7 | ||
|
|
277f21db7a | ||
|
|
9edd55961f | ||
|
|
9ad6959559 | ||
|
|
37a9d94865 | ||
|
|
e8f5bc1c96 | ||
|
|
8bb236411d | ||
|
|
332f955159 | ||
|
|
e7131eddc2 | ||
|
|
8c07a2c930 | ||
|
|
2fe51d087f | ||
|
|
b1f729a970 | ||
|
|
754e0a0de4 | ||
|
|
7abe7fe304 | ||
|
|
8a552e3639 | ||
|
|
743501addc | ||
|
|
6125e59ce3 | ||
|
|
1d8a0b2940 | ||
|
|
2a0ed7faa2 | ||
|
|
ad17c7fd21 | ||
|
|
4d17366662 | ||
|
|
5e2702090c | ||
|
|
f8d1e4edf3 | ||
|
|
04a3f78605 | ||
|
|
ea1e074083 | ||
|
|
199a6df65e | ||
|
|
c9ebf69e0d | ||
|
|
a36e6259f1 | ||
|
|
de4014f02c | ||
|
|
774457b362 | ||
|
|
7a8048a8fd | ||
|
|
fa49fef695 | ||
|
|
faac2540bf | ||
|
|
4e1eb78163 | ||
|
|
46829487d6 | ||
|
|
8fd021e757 | ||
|
|
a3af953683 | ||
|
|
f27da5cc78 | ||
|
|
23f0b720de | ||
|
|
f66d8e9a61 | ||
|
|
8499c2fd24 | ||
|
|
ea4c4dcc0c | ||
|
|
88e8e2408b | ||
|
|
e5815ae5a2 | ||
|
|
387f79ceae | ||
|
|
bae1259aba | ||
|
|
4ac1d91c16 | ||
|
|
81b8f3fc0e | ||
|
|
8541c87c97 | ||
|
|
0e4314ad1e | ||
|
|
6b44f217a3 | ||
|
|
76760e1bf3 | ||
|
|
d313a74266 | ||
|
|
a535ca31a8 | ||
|
|
da0bb80fb4 | ||
|
|
fb9026d12d | ||
|
|
4ae36ac727 | ||
|
|
ffab3a43fc | ||
|
|
e38d04c655 | ||
|
|
1923d6b1bc | ||
|
|
608a38f873 | ||
|
|
604ab79af9 | ||
|
|
4a43a6ae13 | ||
|
|
e9e0861eb7 | ||
|
|
477028a025 | ||
|
|
b90dcfb041 | ||
|
|
1790a389c7 | ||
|
|
deed9de3e7 | ||
|
|
9e748332dc | ||
|
|
749c2435ed | ||
|
|
6360609980 | ||
|
|
fed60ca61a | ||
|
|
f18f9e2dce | ||
|
|
e1b26bc76f | ||
|
|
2aada8f683 | ||
|
|
f9f386fa19 | ||
|
|
507a9a53ef | ||
|
|
c1ae637fa7 | ||
|
|
f967444ac2 | ||
|
|
c879307b8e | ||
|
|
c8ca3e643d | ||
|
|
9a648efa70 | ||
|
|
f45410c917 | ||
|
|
ec3f168a09 | ||
|
|
a9b35de7ee | ||
|
|
125d053b61 | ||
|
|
585cbf95a6 | ||
|
|
909565e5d9 | ||
|
|
a79423534c | ||
|
|
7a6fb5e35b | ||
|
|
6af34b66fb | ||
|
|
2974f7d11f | ||
|
|
edc0c89753 | ||
|
|
b1ff55dd06 | ||
|
|
f4b5422f66 | ||
|
|
d4ebace99f | ||
|
|
95e09c8e2a | ||
|
|
4623d59206 | ||
|
|
e68b1ad428 | ||
|
|
072e2ece15 | ||
|
|
11130037fe | ||
|
|
ba66ef14cc | ||
|
|
8aacc23882 | ||
|
|
03e5fd3dae | ||
|
|
da52598c08 | ||
|
|
52389731eb | ||
|
|
21864f6f95 | ||
|
|
00f8625280 | ||
|
|
c34e29c712 | ||
|
|
e0ae3359f1 | ||
|
|
c2666bacd7 | ||
|
|
4eefd9c3ce | ||
|
|
211456242e | ||
|
|
6f244c4661 | ||
|
|
47bf6d724b | ||
|
|
5c710ad032 | ||
|
|
dda5a05cbb | ||
|
|
e0a63e0290 | ||
|
|
9246659589 | ||
|
|
377cdb84b4 | ||
|
|
0e759f25fd | ||
|
|
b408bb4f6e | ||
|
|
1356479415 | ||
|
|
ec5b4e704f | ||
|
|
aa9e617510 | ||
|
|
ecb739ce96 | ||
|
|
3b72140435 | ||
|
|
27a6770569 | ||
|
|
2ff611167a | ||
|
|
e83e178b63 | ||
|
|
068a757373 | ||
|
|
0ad4527719 | ||
|
|
8c6327d024 | ||
|
|
aecbb2ab02 |
210
.dockerignore
Normal file
210
.dockerignore
Normal file
@@ -0,0 +1,210 @@
|
|||||||
|
.git
|
||||||
|
.github
|
||||||
|
.run
|
||||||
|
docs
|
||||||
|
test
|
||||||
|
typings
|
||||||
|
*Client.py
|
||||||
|
|
||||||
|
.idea
|
||||||
|
.vscode
|
||||||
|
|
||||||
|
*_Spoiler.txt
|
||||||
|
*.bmbp
|
||||||
|
*.apbp
|
||||||
|
*.apl2ac
|
||||||
|
*.apm3
|
||||||
|
*.apmc
|
||||||
|
*.apz5
|
||||||
|
*.aptloz
|
||||||
|
*.apemerald
|
||||||
|
*.pyc
|
||||||
|
*.pyd
|
||||||
|
*.sfc
|
||||||
|
*.z64
|
||||||
|
*.n64
|
||||||
|
*.nes
|
||||||
|
*.smc
|
||||||
|
*.sms
|
||||||
|
*.gb
|
||||||
|
*.gbc
|
||||||
|
*.gba
|
||||||
|
*.wixobj
|
||||||
|
*.lck
|
||||||
|
*.db3
|
||||||
|
*multidata
|
||||||
|
*multisave
|
||||||
|
*.archipelago
|
||||||
|
*.apsave
|
||||||
|
*.BIN
|
||||||
|
*.puml
|
||||||
|
|
||||||
|
setups
|
||||||
|
build
|
||||||
|
bundle/components.wxs
|
||||||
|
dist
|
||||||
|
/prof/
|
||||||
|
README.html
|
||||||
|
.vs/
|
||||||
|
EnemizerCLI/
|
||||||
|
/Players/
|
||||||
|
/SNI/
|
||||||
|
/sni-*/
|
||||||
|
/appimagetool*
|
||||||
|
/host.yaml
|
||||||
|
/options.yaml
|
||||||
|
/config.yaml
|
||||||
|
/logs/
|
||||||
|
_persistent_storage.yaml
|
||||||
|
mystery_result_*.yaml
|
||||||
|
*-errors.txt
|
||||||
|
success.txt
|
||||||
|
output/
|
||||||
|
Output Logs/
|
||||||
|
/factorio/
|
||||||
|
/Minecraft Forge Server/
|
||||||
|
/WebHostLib/static/generated
|
||||||
|
/freeze_requirements.txt
|
||||||
|
/Archipelago.zip
|
||||||
|
/setup.ini
|
||||||
|
/installdelete.iss
|
||||||
|
/data/user.kv
|
||||||
|
/datapackage
|
||||||
|
/custom_worlds
|
||||||
|
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
*.dll
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
installer.log
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# vim editor
|
||||||
|
*.swp
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv*
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
/venv*/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
*.code-workspace
|
||||||
|
shell.nix
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# Cython intermediates
|
||||||
|
_speedups.c
|
||||||
|
_speedups.cpp
|
||||||
|
_speedups.html
|
||||||
|
|
||||||
|
# minecraft server stuff
|
||||||
|
jdk*/
|
||||||
|
minecraft*/
|
||||||
|
minecraft_versions.json
|
||||||
|
!worlds/minecraft/
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
#undertale stuff
|
||||||
|
/Undertale/
|
||||||
|
|
||||||
|
# OS General Files
|
||||||
|
.DS_Store
|
||||||
|
.AppleDouble
|
||||||
|
.LSOverride
|
||||||
|
Thumbs.db
|
||||||
|
[Dd]esktop.ini
|
||||||
2
.github/pyright-config.json
vendored
2
.github/pyright-config.json
vendored
@@ -29,7 +29,7 @@
|
|||||||
"reportMissingImports": true,
|
"reportMissingImports": true,
|
||||||
"reportMissingTypeStubs": true,
|
"reportMissingTypeStubs": true,
|
||||||
|
|
||||||
"pythonVersion": "3.10",
|
"pythonVersion": "3.11",
|
||||||
"pythonPlatform": "Windows",
|
"pythonPlatform": "Windows",
|
||||||
|
|
||||||
"executionEnvironments": [
|
"executionEnvironments": [
|
||||||
|
|||||||
2
.github/workflows/analyze-modified-files.yml
vendored
2
.github/workflows/analyze-modified-files.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
|||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
if: env.diff != ''
|
if: env.diff != ''
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.11'
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
if: env.diff != ''
|
if: env.diff != ''
|
||||||
|
|||||||
18
.github/workflows/build.yml
vendored
18
.github/workflows/build.yml
vendored
@@ -19,7 +19,12 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
ENEMIZER_VERSION: 7.1
|
ENEMIZER_VERSION: 7.1
|
||||||
APPIMAGETOOL_VERSION: 13
|
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||||
|
# we check the sha256 and require manual intervention if it was updated.
|
||||||
|
APPIMAGETOOL_VERSION: continuous
|
||||||
|
APPIMAGETOOL_X86_64_HASH: '29348a20b80827cd261c28e95172ff828b69d43d4e4e18e3fd069e2c8693c94e'
|
||||||
|
APPIMAGE_RUNTIME_VERSION: continuous
|
||||||
|
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||||
|
|
||||||
permissions: # permissions required for attestation
|
permissions: # permissions required for attestation
|
||||||
id-token: 'write'
|
id-token: 'write'
|
||||||
@@ -98,7 +103,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd build/exe*
|
cd build/exe*
|
||||||
cp Players/Templates/Clique.yaml Players/
|
cp Players/Templates/VVVVVV.yaml Players/
|
||||||
timeout 30 ./ArchipelagoGenerate
|
timeout 30 ./ArchipelagoGenerate
|
||||||
- name: Store 7z
|
- name: Store 7z
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@@ -134,10 +139,13 @@ jobs:
|
|||||||
- name: Install build-time dependencies
|
- name: Install build-time dependencies
|
||||||
run: |
|
run: |
|
||||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||||
wget -nv https://github.com/AppImage/AppImageKit/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||||
|
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||||
|
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||||
|
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||||
chmod a+rx appimagetool-x86_64.AppImage
|
chmod a+rx appimagetool-x86_64.AppImage
|
||||||
./appimagetool-x86_64.AppImage --appimage-extract
|
./appimagetool-x86_64.AppImage --appimage-extract
|
||||||
echo -e '#/bin/sh\n./squashfs-root/AppRun "$@"' > appimagetool
|
echo -e '#/bin/sh\n./squashfs-root/AppRun --runtime-file runtime-x86_64 "$@"' > appimagetool
|
||||||
chmod a+rx appimagetool
|
chmod a+rx appimagetool
|
||||||
- name: Download run-time dependencies
|
- name: Download run-time dependencies
|
||||||
run: |
|
run: |
|
||||||
@@ -189,7 +197,7 @@ jobs:
|
|||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd build/exe*
|
cd build/exe*
|
||||||
cp Players/Templates/Clique.yaml Players/
|
cp Players/Templates/VVVVVV.yaml Players/
|
||||||
timeout 30 ./ArchipelagoGenerate
|
timeout 30 ./ArchipelagoGenerate
|
||||||
- name: Store AppImage
|
- name: Store AppImage
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
|
|||||||
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -9,7 +9,12 @@ on:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
ENEMIZER_VERSION: 7.1
|
ENEMIZER_VERSION: 7.1
|
||||||
APPIMAGETOOL_VERSION: 13
|
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||||
|
# we check the sha256 and require manual intervention if it was updated.
|
||||||
|
APPIMAGETOOL_VERSION: continuous
|
||||||
|
APPIMAGETOOL_X86_64_HASH: '29348a20b80827cd261c28e95172ff828b69d43d4e4e18e3fd069e2c8693c94e'
|
||||||
|
APPIMAGE_RUNTIME_VERSION: continuous
|
||||||
|
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||||
|
|
||||||
permissions: # permissions required for attestation
|
permissions: # permissions required for attestation
|
||||||
id-token: 'write'
|
id-token: 'write'
|
||||||
@@ -122,10 +127,13 @@ jobs:
|
|||||||
- name: Install build-time dependencies
|
- name: Install build-time dependencies
|
||||||
run: |
|
run: |
|
||||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||||
wget -nv https://github.com/AppImage/AppImageKit/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||||
|
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||||
|
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||||
|
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||||
chmod a+rx appimagetool-x86_64.AppImage
|
chmod a+rx appimagetool-x86_64.AppImage
|
||||||
./appimagetool-x86_64.AppImage --appimage-extract
|
./appimagetool-x86_64.AppImage --appimage-extract
|
||||||
echo -e '#/bin/sh\n./squashfs-root/AppRun "$@"' > appimagetool
|
echo -e '#/bin/sh\n./squashfs-root/AppRun --runtime-file runtime-x86_64 "$@"' > appimagetool
|
||||||
chmod a+rx appimagetool
|
chmod a+rx appimagetool
|
||||||
- name: Download run-time dependencies
|
- name: Download run-time dependencies
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
11
.github/workflows/unittests.yml
vendored
11
.github/workflows/unittests.yml
vendored
@@ -8,18 +8,24 @@ on:
|
|||||||
paths:
|
paths:
|
||||||
- '**'
|
- '**'
|
||||||
- '!docs/**'
|
- '!docs/**'
|
||||||
|
- '!deploy/**'
|
||||||
- '!setup.py'
|
- '!setup.py'
|
||||||
|
- '!Dockerfile'
|
||||||
- '!*.iss'
|
- '!*.iss'
|
||||||
- '!.gitignore'
|
- '!.gitignore'
|
||||||
|
- '!.dockerignore'
|
||||||
- '!.github/workflows/**'
|
- '!.github/workflows/**'
|
||||||
- '.github/workflows/unittests.yml'
|
- '.github/workflows/unittests.yml'
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- '**'
|
- '**'
|
||||||
- '!docs/**'
|
- '!docs/**'
|
||||||
|
- '!deploy/**'
|
||||||
- '!setup.py'
|
- '!setup.py'
|
||||||
|
- '!Dockerfile'
|
||||||
- '!*.iss'
|
- '!*.iss'
|
||||||
- '!.gitignore'
|
- '!.gitignore'
|
||||||
|
- '!.dockerignore'
|
||||||
- '!.github/workflows/**'
|
- '!.github/workflows/**'
|
||||||
- '.github/workflows/unittests.yml'
|
- '.github/workflows/unittests.yml'
|
||||||
|
|
||||||
@@ -33,11 +39,10 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python:
|
python:
|
||||||
- {version: '3.10'}
|
- {version: '3.11.2'} # Change to '3.11' around 2026-06-10
|
||||||
- {version: '3.11'}
|
|
||||||
- {version: '3.12'}
|
- {version: '3.12'}
|
||||||
include:
|
include:
|
||||||
- python: {version: '3.10'} # old compat
|
- python: {version: '3.11'} # old compat
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
- python: {version: '3.12'} # current
|
- python: {version: '3.12'} # current
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
|
|||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -56,7 +56,6 @@ success.txt
|
|||||||
output/
|
output/
|
||||||
Output Logs/
|
Output Logs/
|
||||||
/factorio/
|
/factorio/
|
||||||
/Minecraft Forge Server/
|
|
||||||
/WebHostLib/static/generated
|
/WebHostLib/static/generated
|
||||||
/freeze_requirements.txt
|
/freeze_requirements.txt
|
||||||
/Archipelago.zip
|
/Archipelago.zip
|
||||||
@@ -184,12 +183,6 @@ _speedups.c
|
|||||||
_speedups.cpp
|
_speedups.cpp
|
||||||
_speedups.html
|
_speedups.html
|
||||||
|
|
||||||
# minecraft server stuff
|
|
||||||
jdk*/
|
|
||||||
minecraft*/
|
|
||||||
minecraft_versions.json
|
|
||||||
!worlds/minecraft/
|
|
||||||
|
|
||||||
# pyenv
|
# pyenv
|
||||||
.python-version
|
.python-version
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ from typing import List
|
|||||||
|
|
||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
|
from settings import get_settings
|
||||||
from NetUtils import ClientStatus
|
from NetUtils import ClientStatus
|
||||||
from Utils import async_start
|
from Utils import async_start
|
||||||
from CommonClient import CommonContext, server_loop, gui_enabled, ClientCommandProcessor, logger, \
|
from CommonClient import CommonContext, server_loop, gui_enabled, ClientCommandProcessor, logger, \
|
||||||
@@ -80,8 +81,8 @@ class AdventureContext(CommonContext):
|
|||||||
self.local_item_locations = {}
|
self.local_item_locations = {}
|
||||||
self.dragon_speed_info = {}
|
self.dragon_speed_info = {}
|
||||||
|
|
||||||
options = Utils.get_settings()
|
options = get_settings().adventure_options
|
||||||
self.display_msgs = options["adventure_options"]["display_msgs"]
|
self.display_msgs = options.display_msgs
|
||||||
|
|
||||||
async def server_auth(self, password_requested: bool = False):
|
async def server_auth(self, password_requested: bool = False):
|
||||||
if password_requested and not self.password:
|
if password_requested and not self.password:
|
||||||
@@ -102,7 +103,7 @@ class AdventureContext(CommonContext):
|
|||||||
def on_package(self, cmd: str, args: dict):
|
def on_package(self, cmd: str, args: dict):
|
||||||
if cmd == 'Connected':
|
if cmd == 'Connected':
|
||||||
self.locations_array = None
|
self.locations_array = None
|
||||||
if Utils.get_settings()["adventure_options"].get("death_link", False):
|
if get_settings().adventure_options.as_dict().get("death_link", False):
|
||||||
self.set_deathlink = True
|
self.set_deathlink = True
|
||||||
async_start(self.get_freeincarnates_used())
|
async_start(self.get_freeincarnates_used())
|
||||||
elif cmd == "RoomInfo":
|
elif cmd == "RoomInfo":
|
||||||
@@ -406,6 +407,7 @@ async def atari_sync_task(ctx: AdventureContext):
|
|||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
logger.debug("Connection Refused, Trying Again")
|
logger.debug("Connection Refused, Trying Again")
|
||||||
ctx.atari_status = CONNECTION_REFUSED_STATUS
|
ctx.atari_status = CONNECTION_REFUSED_STATUS
|
||||||
|
await asyncio.sleep(1)
|
||||||
continue
|
continue
|
||||||
except CancelledError:
|
except CancelledError:
|
||||||
pass
|
pass
|
||||||
@@ -415,8 +417,9 @@ async def atari_sync_task(ctx: AdventureContext):
|
|||||||
|
|
||||||
|
|
||||||
async def run_game(romfile):
|
async def run_game(romfile):
|
||||||
auto_start = Utils.get_settings()["adventure_options"].get("rom_start", True)
|
options = get_settings().adventure_options
|
||||||
rom_args = Utils.get_settings()["adventure_options"].get("rom_args")
|
auto_start = options.rom_start
|
||||||
|
rom_args = options.rom_args
|
||||||
if auto_start is True:
|
if auto_start is True:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(romfile)
|
webbrowser.open(romfile)
|
||||||
|
|||||||
272
BaseClasses.py
272
BaseClasses.py
@@ -5,12 +5,13 @@ import functools
|
|||||||
import logging
|
import logging
|
||||||
import random
|
import random
|
||||||
import secrets
|
import secrets
|
||||||
|
import warnings
|
||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from collections import Counter, deque
|
from collections import Counter, deque, defaultdict
|
||||||
from collections.abc import Collection, MutableSequence
|
from collections.abc import Collection, MutableSequence
|
||||||
from enum import IntEnum, IntFlag
|
from enum import IntEnum, IntFlag
|
||||||
from typing import (AbstractSet, Any, Callable, ClassVar, Dict, Iterable, Iterator, List, Literal, Mapping, NamedTuple,
|
from typing import (AbstractSet, Any, Callable, ClassVar, Dict, Iterable, Iterator, List, Literal, Mapping, NamedTuple,
|
||||||
Optional, Protocol, Set, Tuple, Union, TYPE_CHECKING)
|
Optional, Protocol, Set, Tuple, Union, TYPE_CHECKING, Literal, overload)
|
||||||
import dataclasses
|
import dataclasses
|
||||||
|
|
||||||
from typing_extensions import NotRequired, TypedDict
|
from typing_extensions import NotRequired, TypedDict
|
||||||
@@ -153,17 +154,11 @@ class MultiWorld():
|
|||||||
self.algorithm = 'balanced'
|
self.algorithm = 'balanced'
|
||||||
self.groups = {}
|
self.groups = {}
|
||||||
self.regions = self.RegionManager(players)
|
self.regions = self.RegionManager(players)
|
||||||
self.shops = []
|
|
||||||
self.itempool = []
|
self.itempool = []
|
||||||
self.seed = None
|
self.seed = None
|
||||||
self.seed_name: str = "Unavailable"
|
self.seed_name: str = "Unavailable"
|
||||||
self.precollected_items = {player: [] for player in self.player_ids}
|
self.precollected_items = {player: [] for player in self.player_ids}
|
||||||
self.required_locations = []
|
self.required_locations = []
|
||||||
self.light_world_light_cone = False
|
|
||||||
self.dark_world_light_cone = False
|
|
||||||
self.rupoor_cost = 10
|
|
||||||
self.aga_randomness = True
|
|
||||||
self.save_and_quit_from_boss = True
|
|
||||||
self.custom = False
|
self.custom = False
|
||||||
self.customitemarray = []
|
self.customitemarray = []
|
||||||
self.shuffle_ganon = True
|
self.shuffle_ganon = True
|
||||||
@@ -182,7 +177,7 @@ class MultiWorld():
|
|||||||
set_player_attr('completion_condition', lambda state: True)
|
set_player_attr('completion_condition', lambda state: True)
|
||||||
self.worlds = {}
|
self.worlds = {}
|
||||||
self.per_slot_randoms = Utils.DeprecateDict("Using per_slot_randoms is now deprecated. Please use the "
|
self.per_slot_randoms = Utils.DeprecateDict("Using per_slot_randoms is now deprecated. Please use the "
|
||||||
"world's random object instead (usually self.random)")
|
"world's random object instead (usually self.random)", True)
|
||||||
self.plando_options = PlandoOptions.none
|
self.plando_options = PlandoOptions.none
|
||||||
|
|
||||||
def get_all_ids(self) -> Tuple[int, ...]:
|
def get_all_ids(self) -> Tuple[int, ...]:
|
||||||
@@ -227,17 +222,8 @@ class MultiWorld():
|
|||||||
self.seed_name = name if name else str(self.seed)
|
self.seed_name = name if name else str(self.seed)
|
||||||
|
|
||||||
def set_options(self, args: Namespace) -> None:
|
def set_options(self, args: Namespace) -> None:
|
||||||
# TODO - remove this section once all worlds use options dataclasses
|
|
||||||
from worlds import AutoWorld
|
from worlds import AutoWorld
|
||||||
|
|
||||||
all_keys: Set[str] = {key for player in self.player_ids for key in
|
|
||||||
AutoWorld.AutoWorldRegister.world_types[self.game[player]].options_dataclass.type_hints}
|
|
||||||
for option_key in all_keys:
|
|
||||||
option = Utils.DeprecateDict(f"Getting options from multiworld is now deprecated. "
|
|
||||||
f"Please use `self.options.{option_key}` instead.", True)
|
|
||||||
option.update(getattr(args, option_key, {}))
|
|
||||||
setattr(self, option_key, option)
|
|
||||||
|
|
||||||
for player in self.player_ids:
|
for player in self.player_ids:
|
||||||
world_type = AutoWorld.AutoWorldRegister.world_types[self.game[player]]
|
world_type = AutoWorld.AutoWorldRegister.world_types[self.game[player]]
|
||||||
self.worlds[player] = world_type(self, player)
|
self.worlds[player] = world_type(self, player)
|
||||||
@@ -438,12 +424,27 @@ class MultiWorld():
|
|||||||
def get_location(self, location_name: str, player: int) -> Location:
|
def get_location(self, location_name: str, player: int) -> Location:
|
||||||
return self.regions.location_cache[player][location_name]
|
return self.regions.location_cache[player][location_name]
|
||||||
|
|
||||||
def get_all_state(self, use_cache: bool, allow_partial_entrances: bool = False,
|
def get_all_state(self, use_cache: bool | None = None, allow_partial_entrances: bool = False,
|
||||||
collect_pre_fill_items: bool = True, perform_sweep: bool = True) -> CollectionState:
|
collect_pre_fill_items: bool = True, perform_sweep: bool = True) -> CollectionState:
|
||||||
cached = getattr(self, "_all_state", None)
|
"""
|
||||||
if use_cache and cached:
|
Creates a new CollectionState, and collects all precollected items, all items in the multiworld itempool, those
|
||||||
return cached.copy()
|
specified in each worlds' `get_pre_fill_items()`, and then sweeps the multiworld collecting any other items
|
||||||
|
it is able to reach, building as complete of a completed game state as possible.
|
||||||
|
|
||||||
|
:param use_cache: Deprecated and unused.
|
||||||
|
:param allow_partial_entrances: Whether the CollectionState should allow for disconnected entrances while
|
||||||
|
sweeping, such as before entrance randomization is complete.
|
||||||
|
:param collect_pre_fill_items: Whether the items in each worlds' `get_pre_fill_items()` should be added to this
|
||||||
|
state.
|
||||||
|
:param perform_sweep: Whether this state should perform a sweep for reachable locations, collecting any placed
|
||||||
|
items it can.
|
||||||
|
|
||||||
|
:return: The completed CollectionState.
|
||||||
|
"""
|
||||||
|
if __debug__ and use_cache is not None:
|
||||||
|
# TODO swap to Utils.deprecate when we want this to crash on source and warn on frozen
|
||||||
|
warnings.warn("multiworld.get_all_state no longer caches all_state and this argument will be removed.",
|
||||||
|
DeprecationWarning)
|
||||||
ret = CollectionState(self, allow_partial_entrances)
|
ret = CollectionState(self, allow_partial_entrances)
|
||||||
|
|
||||||
for item in self.itempool:
|
for item in self.itempool:
|
||||||
@@ -456,8 +457,6 @@ class MultiWorld():
|
|||||||
if perform_sweep:
|
if perform_sweep:
|
||||||
ret.sweep_for_advancements()
|
ret.sweep_for_advancements()
|
||||||
|
|
||||||
if use_cache:
|
|
||||||
self._all_state = ret
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def get_items(self) -> List[Item]:
|
def get_items(self) -> List[Item]:
|
||||||
@@ -571,26 +570,9 @@ class MultiWorld():
|
|||||||
if self.has_beaten_game(state):
|
if self.has_beaten_game(state):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
base_locations = self.get_locations() if locations is None else locations
|
for _ in state.sweep_for_advancements(locations,
|
||||||
prog_locations = {location for location in base_locations if location.item
|
yield_each_sweep=True,
|
||||||
and location.item.advancement and location not in state.locations_checked}
|
checked_locations=state.locations_checked):
|
||||||
|
|
||||||
while prog_locations:
|
|
||||||
sphere: Set[Location] = set()
|
|
||||||
# build up spheres of collection radius.
|
|
||||||
# Everything in each sphere is independent from each other in dependencies and only depends on lower spheres
|
|
||||||
for location in prog_locations:
|
|
||||||
if location.can_reach(state):
|
|
||||||
sphere.add(location)
|
|
||||||
|
|
||||||
if not sphere:
|
|
||||||
# ran out of places and did not finish yet, quit
|
|
||||||
return False
|
|
||||||
|
|
||||||
for location in sphere:
|
|
||||||
state.collect(location.item, True, location)
|
|
||||||
prog_locations -= sphere
|
|
||||||
|
|
||||||
if self.has_beaten_game(state):
|
if self.has_beaten_game(state):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -706,6 +688,12 @@ class MultiWorld():
|
|||||||
sphere.append(locations.pop(n))
|
sphere.append(locations.pop(n))
|
||||||
|
|
||||||
if not sphere:
|
if not sphere:
|
||||||
|
if __debug__:
|
||||||
|
from Fill import FillError
|
||||||
|
raise FillError(
|
||||||
|
f"Could not access required locations for accessibility check. Missing: {locations}",
|
||||||
|
multiworld=self,
|
||||||
|
)
|
||||||
# ran out of places and did not finish yet, quit
|
# ran out of places and did not finish yet, quit
|
||||||
logging.warning(f"Could not access required locations for accessibility check."
|
logging.warning(f"Could not access required locations for accessibility check."
|
||||||
f" Missing: {locations}")
|
f" Missing: {locations}")
|
||||||
@@ -869,20 +857,133 @@ class CollectionState():
|
|||||||
"Please switch over to sweep_for_advancements.")
|
"Please switch over to sweep_for_advancements.")
|
||||||
return self.sweep_for_advancements(locations)
|
return self.sweep_for_advancements(locations)
|
||||||
|
|
||||||
def sweep_for_advancements(self, locations: Optional[Iterable[Location]] = None) -> None:
|
def _sweep_for_advancements_impl(self, advancements_per_player: List[Tuple[int, List[Location]]],
|
||||||
if locations is None:
|
yield_each_sweep: bool) -> Iterator[None]:
|
||||||
locations = self.multiworld.get_filled_locations()
|
"""
|
||||||
reachable_advancements = True
|
The implementation for sweep_for_advancements is separated here because it returns a generator due to the use
|
||||||
# since the loop has a good chance to run more than once, only filter the advancements once
|
of a yield statement.
|
||||||
locations = {location for location in locations if location.advancement and location not in self.advancements}
|
"""
|
||||||
|
all_players = {player for player, _ in advancements_per_player}
|
||||||
|
players_to_check = all_players
|
||||||
|
# As an optimization, it is assumed that each player's world only logically depends on itself. However, worlds
|
||||||
|
# are allowed to logically depend on other worlds, so once there are no more players that should be checked
|
||||||
|
# under this assumption, an extra sweep iteration is performed that checks every player, to confirm that the
|
||||||
|
# sweep is finished.
|
||||||
|
checking_if_finished = False
|
||||||
|
while players_to_check:
|
||||||
|
next_advancements_per_player: List[Tuple[int, List[Location]]] = []
|
||||||
|
next_players_to_check = set()
|
||||||
|
|
||||||
while reachable_advancements:
|
for player, locations in advancements_per_player:
|
||||||
reachable_advancements = {location for location in locations if location.can_reach(self)}
|
if player not in players_to_check:
|
||||||
locations -= reachable_advancements
|
next_advancements_per_player.append((player, locations))
|
||||||
for advancement in reachable_advancements:
|
continue
|
||||||
self.advancements.add(advancement)
|
|
||||||
assert isinstance(advancement.item, Item), "tried to collect Event with no Item"
|
# Accessibility of each location is checked first because a player's region accessibility cache becomes
|
||||||
self.collect(advancement.item, True, advancement)
|
# stale whenever one of their own items is collected into the state.
|
||||||
|
reachable_locations: List[Location] = []
|
||||||
|
unreachable_locations: List[Location] = []
|
||||||
|
for location in locations:
|
||||||
|
if location.can_reach(self):
|
||||||
|
# Locations containing items that do not belong to `player` could be collected immediately
|
||||||
|
# because they won't stale `player`'s region accessibility cache, but, for simplicity, all the
|
||||||
|
# items at reachable locations are collected in a single loop.
|
||||||
|
reachable_locations.append(location)
|
||||||
|
else:
|
||||||
|
unreachable_locations.append(location)
|
||||||
|
if unreachable_locations:
|
||||||
|
next_advancements_per_player.append((player, unreachable_locations))
|
||||||
|
|
||||||
|
# A previous player's locations processed in the current `while players_to_check` iteration could have
|
||||||
|
# collected items belonging to `player`, but now that all of `player`'s reachable locations have been
|
||||||
|
# found, it can be assumed that `player` will not gain any more reachable locations until another one of
|
||||||
|
# their items is collected.
|
||||||
|
# It would be clearer to not add players to `next_players_to_check` in the first place if they have yet
|
||||||
|
# to be processed in the current `while players_to_check` iteration, but checking if a player should be
|
||||||
|
# added to `next_players_to_check` would need to be run once for every item that is collected, so it is
|
||||||
|
# more performant to instead discard `player` from `next_players_to_check` once their locations have
|
||||||
|
# been processed.
|
||||||
|
next_players_to_check.discard(player)
|
||||||
|
|
||||||
|
# Collect the items from the reachable locations.
|
||||||
|
for advancement in reachable_locations:
|
||||||
|
self.advancements.add(advancement)
|
||||||
|
item = advancement.item
|
||||||
|
assert isinstance(item, Item), "tried to collect advancement Location with no Item"
|
||||||
|
if self.collect(item, True, advancement):
|
||||||
|
# The player the item belongs to may be able to reach additional locations in the next sweep
|
||||||
|
# iteration.
|
||||||
|
next_players_to_check.add(item.player)
|
||||||
|
|
||||||
|
if not next_players_to_check:
|
||||||
|
if not checking_if_finished:
|
||||||
|
# It is assumed that each player's world only logically depends on itself, which may not be the
|
||||||
|
# case, so confirm that the sweep is finished by doing an extra iteration that checks every player.
|
||||||
|
checking_if_finished = True
|
||||||
|
next_players_to_check = all_players
|
||||||
|
else:
|
||||||
|
checking_if_finished = False
|
||||||
|
|
||||||
|
players_to_check = next_players_to_check
|
||||||
|
advancements_per_player = next_advancements_per_player
|
||||||
|
|
||||||
|
if yield_each_sweep:
|
||||||
|
yield
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sweep_for_advancements(self, locations: Optional[Iterable[Location]] = None, *,
|
||||||
|
yield_each_sweep: Literal[True],
|
||||||
|
checked_locations: Optional[Set[Location]] = None) -> Iterator[None]: ...
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def sweep_for_advancements(self, locations: Optional[Iterable[Location]] = None,
|
||||||
|
yield_each_sweep: Literal[False] = False,
|
||||||
|
checked_locations: Optional[Set[Location]] = None) -> None: ...
|
||||||
|
|
||||||
|
def sweep_for_advancements(self, locations: Optional[Iterable[Location]] = None, yield_each_sweep: bool = False,
|
||||||
|
checked_locations: Optional[Set[Location]] = None) -> Optional[Iterator[None]]:
|
||||||
|
"""
|
||||||
|
Sweep through the locations that contain uncollected advancement items, collecting the items into the state
|
||||||
|
until there are no more reachable locations that contain uncollected advancement items.
|
||||||
|
|
||||||
|
:param locations: The locations to sweep through, defaulting to all locations in the multiworld.
|
||||||
|
:param yield_each_sweep: When True, return a generator that yields at the end of each sweep iteration.
|
||||||
|
:param checked_locations: Optional override of locations to filter out from the locations argument, defaults to
|
||||||
|
self.advancements when None.
|
||||||
|
"""
|
||||||
|
if checked_locations is None:
|
||||||
|
checked_locations = self.advancements
|
||||||
|
|
||||||
|
# Since the sweep loop usually performs many iterations, the locations are filtered in advance.
|
||||||
|
# A list of tuples is used, instead of a dictionary, because it is faster to iterate.
|
||||||
|
advancements_per_player: List[Tuple[int, List[Location]]]
|
||||||
|
if locations is None:
|
||||||
|
# `location.advancement` can only be True for filled locations, so unfilled locations are filtered out.
|
||||||
|
advancements_per_player = []
|
||||||
|
for player, locations_dict in self.multiworld.regions.location_cache.items():
|
||||||
|
filtered_locations = [location for location in locations_dict.values()
|
||||||
|
if location.advancement and location not in checked_locations]
|
||||||
|
if filtered_locations:
|
||||||
|
advancements_per_player.append((player, filtered_locations))
|
||||||
|
else:
|
||||||
|
# Filter and separate the locations into a list for each player.
|
||||||
|
advancements_per_player_dict: Dict[int, List[Location]] = defaultdict(list)
|
||||||
|
for location in locations:
|
||||||
|
if location.advancement and location not in checked_locations:
|
||||||
|
advancements_per_player_dict[location.player].append(location)
|
||||||
|
# Convert to a list of tuples.
|
||||||
|
advancements_per_player = list(advancements_per_player_dict.items())
|
||||||
|
del advancements_per_player_dict
|
||||||
|
|
||||||
|
if yield_each_sweep:
|
||||||
|
# Return a generator that will yield at the end of each sweep iteration.
|
||||||
|
return self._sweep_for_advancements_impl(advancements_per_player, True)
|
||||||
|
else:
|
||||||
|
# Create the generator, but tell it not to yield anything, so it will run to completion in zero iterations
|
||||||
|
# once started, then start and exhaust the generator by attempting to iterate it.
|
||||||
|
for _ in self._sweep_for_advancements_impl(advancements_per_player, False):
|
||||||
|
assert False, "Generator yielded when it should have run to completion without yielding"
|
||||||
|
return None
|
||||||
|
|
||||||
# item name related
|
# item name related
|
||||||
def has(self, item: str, player: int, count: int = 1) -> bool:
|
def has(self, item: str, player: int, count: int = 1) -> bool:
|
||||||
@@ -1150,13 +1251,13 @@ class Region:
|
|||||||
self.region_manager = region_manager
|
self.region_manager = region_manager
|
||||||
|
|
||||||
def __getitem__(self, index: int) -> Location:
|
def __getitem__(self, index: int) -> Location:
|
||||||
return self._list.__getitem__(index)
|
return self._list[index]
|
||||||
|
|
||||||
def __setitem__(self, index: int, value: Location) -> None:
|
def __setitem__(self, index: int, value: Location) -> None:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return self._list.__len__()
|
return len(self._list)
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
return iter(self._list)
|
return iter(self._list)
|
||||||
@@ -1170,8 +1271,8 @@ class Region:
|
|||||||
|
|
||||||
class LocationRegister(Register):
|
class LocationRegister(Register):
|
||||||
def __delitem__(self, index: int) -> None:
|
def __delitem__(self, index: int) -> None:
|
||||||
location: Location = self._list.__getitem__(index)
|
location: Location = self._list[index]
|
||||||
self._list.__delitem__(index)
|
del self._list[index]
|
||||||
del(self.region_manager.location_cache[location.player][location.name])
|
del(self.region_manager.location_cache[location.player][location.name])
|
||||||
|
|
||||||
def insert(self, index: int, value: Location) -> None:
|
def insert(self, index: int, value: Location) -> None:
|
||||||
@@ -1182,8 +1283,8 @@ class Region:
|
|||||||
|
|
||||||
class EntranceRegister(Register):
|
class EntranceRegister(Register):
|
||||||
def __delitem__(self, index: int) -> None:
|
def __delitem__(self, index: int) -> None:
|
||||||
entrance: Entrance = self._list.__getitem__(index)
|
entrance: Entrance = self._list[index]
|
||||||
self._list.__delitem__(index)
|
del self._list[index]
|
||||||
del(self.region_manager.entrance_cache[entrance.player][entrance.name])
|
del(self.region_manager.entrance_cache[entrance.player][entrance.name])
|
||||||
|
|
||||||
def insert(self, index: int, value: Entrance) -> None:
|
def insert(self, index: int, value: Entrance) -> None:
|
||||||
@@ -1337,8 +1438,8 @@ class Region:
|
|||||||
Connects current region to regions in exit dictionary. Passed region names must exist first.
|
Connects current region to regions in exit dictionary. Passed region names must exist first.
|
||||||
|
|
||||||
:param exits: exits from the region. format is {"connecting_region": "exit_name"}. if a non dict is provided,
|
:param exits: exits from the region. format is {"connecting_region": "exit_name"}. if a non dict is provided,
|
||||||
created entrances will be named "self.name -> connecting_region"
|
created entrances will be named "self.name -> connecting_region"
|
||||||
:param rules: rules for the exits from this region. format is {"connecting_region", rule}
|
:param rules: rules for the exits from this region. format is {"connecting_region": rule}
|
||||||
"""
|
"""
|
||||||
if not isinstance(exits, Dict):
|
if not isinstance(exits, Dict):
|
||||||
exits = dict.fromkeys(exits)
|
exits = dict.fromkeys(exits)
|
||||||
@@ -1430,31 +1531,47 @@ class Location:
|
|||||||
|
|
||||||
|
|
||||||
class ItemClassification(IntFlag):
|
class ItemClassification(IntFlag):
|
||||||
filler = 0b0000
|
filler = 0b00000
|
||||||
""" aka trash, as in filler items like ammo, currency etc """
|
""" aka trash, as in filler items like ammo, currency etc """
|
||||||
|
|
||||||
progression = 0b0001
|
progression = 0b00001
|
||||||
""" Item that is logically relevant.
|
""" Item that is logically relevant.
|
||||||
Protects this item from being placed on excluded or unreachable locations. """
|
Protects this item from being placed on excluded or unreachable locations. """
|
||||||
|
|
||||||
useful = 0b0010
|
useful = 0b00010
|
||||||
""" Item that is especially useful.
|
""" Item that is especially useful.
|
||||||
Protects this item from being placed on excluded or unreachable locations.
|
Protects this item from being placed on excluded or unreachable locations.
|
||||||
When combined with another flag like "progression", it means "an especially useful progression item". """
|
When combined with another flag like "progression", it means "an especially useful progression item". """
|
||||||
|
|
||||||
trap = 0b0100
|
trap = 0b00100
|
||||||
""" Item that is detrimental in some way. """
|
""" Item that is detrimental in some way. """
|
||||||
|
|
||||||
skip_balancing = 0b1000
|
skip_balancing = 0b01000
|
||||||
""" should technically never occur on its own
|
""" should technically never occur on its own
|
||||||
Item that is logically relevant, but progression balancing should not touch.
|
Item that is logically relevant, but progression balancing should not touch.
|
||||||
Typically currency or other counted items. """
|
|
||||||
|
Possible reasons for why an item should not be pulled ahead by progression balancing:
|
||||||
|
1. This item is quite insignificant, so pulling it earlier doesn't help (currency/etc.)
|
||||||
|
2. It is important for the player experience that this item is evenly distributed in the seed (e.g. goal items) """
|
||||||
|
|
||||||
progression_skip_balancing = 0b1001 # only progression gets balanced
|
deprioritized = 0b10000
|
||||||
|
""" Should technically never occur on its own.
|
||||||
|
Will not be considered for priority locations,
|
||||||
|
unless Priority Locations Fill runs out of regular progression items before filling all priority locations.
|
||||||
|
|
||||||
|
Should be used for items that would feel bad for the player to find on a priority location.
|
||||||
|
Usually, these are items that are plentiful or insignificant. """
|
||||||
|
|
||||||
|
progression_deprioritized_skip_balancing = 0b11001
|
||||||
|
""" Since a common case of both skip_balancing and deprioritized is "insignificant progression",
|
||||||
|
these items often want both flags. """
|
||||||
|
|
||||||
|
progression_skip_balancing = 0b01001 # only progression gets balanced
|
||||||
|
progression_deprioritized = 0b10001 # only progression can be placed during priority fill
|
||||||
|
|
||||||
def as_flag(self) -> int:
|
def as_flag(self) -> int:
|
||||||
"""As Network API flag int."""
|
"""As Network API flag int."""
|
||||||
return int(self & 0b0111)
|
return int(self & 0b00111)
|
||||||
|
|
||||||
|
|
||||||
class Item:
|
class Item:
|
||||||
@@ -1498,6 +1615,10 @@ class Item:
|
|||||||
def trap(self) -> bool:
|
def trap(self) -> bool:
|
||||||
return ItemClassification.trap in self.classification
|
return ItemClassification.trap in self.classification
|
||||||
|
|
||||||
|
@property
|
||||||
|
def deprioritized(self) -> bool:
|
||||||
|
return ItemClassification.deprioritized in self.classification
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def filler(self) -> bool:
|
def filler(self) -> bool:
|
||||||
return not (self.advancement or self.useful or self.trap)
|
return not (self.advancement or self.useful or self.trap)
|
||||||
@@ -1778,7 +1899,8 @@ class Spoiler:
|
|||||||
if self.unreachables:
|
if self.unreachables:
|
||||||
outfile.write('\n\nUnreachable Progression Items:\n\n')
|
outfile.write('\n\nUnreachable Progression Items:\n\n')
|
||||||
outfile.write(
|
outfile.write(
|
||||||
'\n'.join(['%s: %s' % (unreachable.item, unreachable) for unreachable in self.unreachables]))
|
'\n'.join(['%s: %s' % (unreachable.item, unreachable)
|
||||||
|
for unreachable in sorted(self.unreachables)]))
|
||||||
|
|
||||||
if self.paths:
|
if self.paths:
|
||||||
outfile.write('\n\nPaths:\n\n')
|
outfile.write('\n\nPaths:\n\n')
|
||||||
@@ -1805,7 +1927,7 @@ class Tutorial(NamedTuple):
|
|||||||
description: str
|
description: str
|
||||||
language: str
|
language: str
|
||||||
file_name: str
|
file_name: str
|
||||||
link: str
|
link: str # unused
|
||||||
authors: List[str]
|
authors: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
167
CommonClient.py
167
CommonClient.py
@@ -21,7 +21,7 @@ import Utils
|
|||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
Utils.init_logging("TextClient", exception_logger="Client")
|
Utils.init_logging("TextClient", exception_logger="Client")
|
||||||
|
|
||||||
from MultiServer import CommandProcessor
|
from MultiServer import CommandProcessor, mark_raw
|
||||||
from NetUtils import (Endpoint, decode, NetworkItem, encode, JSONtoTextParser, ClientStatus, Permission, NetworkSlot,
|
from NetUtils import (Endpoint, decode, NetworkItem, encode, JSONtoTextParser, ClientStatus, Permission, NetworkSlot,
|
||||||
RawJSONtoTextParser, add_json_text, add_json_location, add_json_item, JSONTypes, HintStatus, SlotType)
|
RawJSONtoTextParser, add_json_text, add_json_location, add_json_item, JSONTypes, HintStatus, SlotType)
|
||||||
from Utils import Version, stream_input, async_start
|
from Utils import Version, stream_input, async_start
|
||||||
@@ -99,6 +99,17 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
self.ctx.on_print_json({"data": parts, "cmd": "PrintJSON"})
|
self.ctx.on_print_json({"data": parts, "cmd": "PrintJSON"})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def get_current_datapackage(self) -> dict[str, typing.Any]:
|
||||||
|
"""
|
||||||
|
Return datapackage for current game if known.
|
||||||
|
|
||||||
|
:return: The datapackage for the currently registered game. If not found, an empty dictionary will be returned.
|
||||||
|
"""
|
||||||
|
if not self.ctx.game:
|
||||||
|
return {}
|
||||||
|
checksum = self.ctx.checksums[self.ctx.game]
|
||||||
|
return Utils.load_data_package_for_checksum(self.ctx.game, checksum)
|
||||||
|
|
||||||
def _cmd_missing(self, filter_text = "") -> bool:
|
def _cmd_missing(self, filter_text = "") -> bool:
|
||||||
"""List all missing location checks, from your local game state.
|
"""List all missing location checks, from your local game state.
|
||||||
Can be given text, which will be used as filter."""
|
Can be given text, which will be used as filter."""
|
||||||
@@ -107,7 +118,9 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
return False
|
return False
|
||||||
count = 0
|
count = 0
|
||||||
checked_count = 0
|
checked_count = 0
|
||||||
for location, location_id in AutoWorldRegister.world_types[self.ctx.game].location_name_to_id.items():
|
|
||||||
|
lookup = self.get_current_datapackage().get("location_name_to_id", {})
|
||||||
|
for location, location_id in lookup.items():
|
||||||
if filter_text and filter_text not in location:
|
if filter_text and filter_text not in location:
|
||||||
continue
|
continue
|
||||||
if location_id < 0:
|
if location_id < 0:
|
||||||
@@ -128,43 +141,91 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
self.output("No missing location checks found.")
|
self.output("No missing location checks found.")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _cmd_items(self):
|
def output_datapackage_part(self, key: str, name: str) -> bool:
|
||||||
|
"""
|
||||||
|
Helper to digest a specific section of this game's datapackage.
|
||||||
|
|
||||||
|
:param key: The dictionary key in the datapackage.
|
||||||
|
:param name: Printed to the user as context for the part.
|
||||||
|
|
||||||
|
:return: Whether the process was successful.
|
||||||
|
"""
|
||||||
|
if not self.ctx.game:
|
||||||
|
self.output(f"No game set, cannot determine {name}.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
lookup = self.get_current_datapackage().get(key)
|
||||||
|
if lookup is None:
|
||||||
|
self.output("datapackage not yet loaded, try again")
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.output(f"{name} for {self.ctx.game}")
|
||||||
|
for key in lookup:
|
||||||
|
self.output(key)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _cmd_items(self) -> bool:
|
||||||
"""List all item names for the currently running game."""
|
"""List all item names for the currently running game."""
|
||||||
if not self.ctx.game:
|
return self.output_datapackage_part("item_name_to_id", "Item Names")
|
||||||
self.output("No game set, cannot determine existing items.")
|
|
||||||
return False
|
|
||||||
self.output(f"Item Names for {self.ctx.game}")
|
|
||||||
for item_name in AutoWorldRegister.world_types[self.ctx.game].item_name_to_id:
|
|
||||||
self.output(item_name)
|
|
||||||
|
|
||||||
def _cmd_item_groups(self):
|
def _cmd_locations(self) -> bool:
|
||||||
"""List all item group names for the currently running game."""
|
|
||||||
if not self.ctx.game:
|
|
||||||
self.output("No game set, cannot determine existing item groups.")
|
|
||||||
return False
|
|
||||||
self.output(f"Item Group Names for {self.ctx.game}")
|
|
||||||
for group_name in AutoWorldRegister.world_types[self.ctx.game].item_name_groups:
|
|
||||||
self.output(group_name)
|
|
||||||
|
|
||||||
def _cmd_locations(self):
|
|
||||||
"""List all location names for the currently running game."""
|
"""List all location names for the currently running game."""
|
||||||
if not self.ctx.game:
|
return self.output_datapackage_part("location_name_to_id", "Location Names")
|
||||||
self.output("No game set, cannot determine existing locations.")
|
|
||||||
return False
|
|
||||||
self.output(f"Location Names for {self.ctx.game}")
|
|
||||||
for location_name in AutoWorldRegister.world_types[self.ctx.game].location_name_to_id:
|
|
||||||
self.output(location_name)
|
|
||||||
|
|
||||||
def _cmd_location_groups(self):
|
def output_group_part(self, group_key: typing.Literal["item_name_groups", "location_name_groups"],
|
||||||
"""List all location group names for the currently running game."""
|
filter_key: str,
|
||||||
if not self.ctx.game:
|
name: str) -> bool:
|
||||||
self.output("No game set, cannot determine existing location groups.")
|
"""
|
||||||
return False
|
Logs an item or location group from the player's game's datapackage.
|
||||||
self.output(f"Location Group Names for {self.ctx.game}")
|
|
||||||
for group_name in AutoWorldRegister.world_types[self.ctx.game].location_name_groups:
|
|
||||||
self.output(group_name)
|
|
||||||
|
|
||||||
def _cmd_ready(self):
|
:param group_key: Either Item or Location group to be processed.
|
||||||
|
:param filter_key: Which group key to filter to. If an empty string is passed will log all item/location groups.
|
||||||
|
:param name: Printed to the user as context for the part.
|
||||||
|
|
||||||
|
:return: Whether the process was successful.
|
||||||
|
"""
|
||||||
|
if not self.ctx.game:
|
||||||
|
self.output(f"No game set, cannot determine existing {name} Groups.")
|
||||||
|
return False
|
||||||
|
lookup = Utils.persistent_load().get("groups_by_checksum", {}).get(self.ctx.checksums[self.ctx.game], {})\
|
||||||
|
.get(self.ctx.game, {}).get(group_key, {})
|
||||||
|
if lookup is None:
|
||||||
|
self.output("datapackage not yet loaded, try again")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if filter_key:
|
||||||
|
if filter_key not in lookup:
|
||||||
|
self.output(f"Unknown {name} Group {filter_key}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.output(f"{name}s for {name} Group \"{filter_key}\"")
|
||||||
|
for entry in lookup[filter_key]:
|
||||||
|
self.output(entry)
|
||||||
|
else:
|
||||||
|
self.output(f"{name} Groups for {self.ctx.game}")
|
||||||
|
for group in lookup:
|
||||||
|
self.output(group)
|
||||||
|
return True
|
||||||
|
|
||||||
|
@mark_raw
|
||||||
|
def _cmd_item_groups(self, key: str = "") -> bool:
|
||||||
|
"""
|
||||||
|
List all item group names for the currently running game.
|
||||||
|
|
||||||
|
:param key: Which item group to filter to. Will log all groups if empty.
|
||||||
|
"""
|
||||||
|
return self.output_group_part("item_name_groups", key, "Item")
|
||||||
|
|
||||||
|
@mark_raw
|
||||||
|
def _cmd_location_groups(self, key: str = "") -> bool:
|
||||||
|
"""
|
||||||
|
List all location group names for the currently running game.
|
||||||
|
|
||||||
|
:param key: Which item group to filter to. Will log all groups if empty.
|
||||||
|
"""
|
||||||
|
return self.output_group_part("location_name_groups", key, "Location")
|
||||||
|
|
||||||
|
def _cmd_ready(self) -> bool:
|
||||||
"""Send ready status to server."""
|
"""Send ready status to server."""
|
||||||
self.ctx.ready = not self.ctx.ready
|
self.ctx.ready = not self.ctx.ready
|
||||||
if self.ctx.ready:
|
if self.ctx.ready:
|
||||||
@@ -174,6 +235,7 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
state = ClientStatus.CLIENT_CONNECTED
|
state = ClientStatus.CLIENT_CONNECTED
|
||||||
self.output("Unreadied.")
|
self.output("Unreadied.")
|
||||||
async_start(self.ctx.send_msgs([{"cmd": "StatusUpdate", "status": state}]), name="send StatusUpdate")
|
async_start(self.ctx.send_msgs([{"cmd": "StatusUpdate", "status": state}]), name="send StatusUpdate")
|
||||||
|
return True
|
||||||
|
|
||||||
def default(self, raw: str):
|
def default(self, raw: str):
|
||||||
"""The default message parser to be used when parsing any messages that do not match a command"""
|
"""The default message parser to be used when parsing any messages that do not match a command"""
|
||||||
@@ -201,6 +263,7 @@ class CommonContext:
|
|||||||
|
|
||||||
# noinspection PyTypeChecker
|
# noinspection PyTypeChecker
|
||||||
def __getitem__(self, key: str) -> typing.Mapping[int, str]:
|
def __getitem__(self, key: str) -> typing.Mapping[int, str]:
|
||||||
|
assert isinstance(key, str), f"ctx.{self.lookup_type}_names used with an id, use the lookup_in_ helpers instead"
|
||||||
return self._game_store[key]
|
return self._game_store[key]
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
@@ -210,7 +273,7 @@ class CommonContext:
|
|||||||
return iter(self._game_store)
|
return iter(self._game_store)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return self._game_store.__repr__()
|
return repr(self._game_store)
|
||||||
|
|
||||||
def lookup_in_game(self, code: int, game_name: typing.Optional[str] = None) -> str:
|
def lookup_in_game(self, code: int, game_name: typing.Optional[str] = None) -> str:
|
||||||
"""Returns the name for an item/location id in the context of a specific game or own game if `game` is
|
"""Returns the name for an item/location id in the context of a specific game or own game if `game` is
|
||||||
@@ -378,6 +441,8 @@ class CommonContext:
|
|||||||
|
|
||||||
self.jsontotextparser = JSONtoTextParser(self)
|
self.jsontotextparser = JSONtoTextParser(self)
|
||||||
self.rawjsontotextparser = RawJSONtoTextParser(self)
|
self.rawjsontotextparser = RawJSONtoTextParser(self)
|
||||||
|
if self.game:
|
||||||
|
self.checksums[self.game] = network_data_package["games"][self.game]["checksum"]
|
||||||
self.update_data_package(network_data_package)
|
self.update_data_package(network_data_package)
|
||||||
|
|
||||||
# execution
|
# execution
|
||||||
@@ -637,6 +702,24 @@ class CommonContext:
|
|||||||
for game, game_data in data_package["games"].items():
|
for game, game_data in data_package["games"].items():
|
||||||
Utils.store_data_package_for_checksum(game, game_data)
|
Utils.store_data_package_for_checksum(game, game_data)
|
||||||
|
|
||||||
|
def consume_network_item_groups(self):
|
||||||
|
data = {"item_name_groups": self.stored_data[f"_read_item_name_groups_{self.game}"]}
|
||||||
|
current_cache = Utils.persistent_load().get("groups_by_checksum", {}).get(self.checksums[self.game], {})
|
||||||
|
if self.game in current_cache:
|
||||||
|
current_cache[self.game].update(data)
|
||||||
|
else:
|
||||||
|
current_cache[self.game] = data
|
||||||
|
Utils.persistent_store("groups_by_checksum", self.checksums[self.game], current_cache)
|
||||||
|
|
||||||
|
def consume_network_location_groups(self):
|
||||||
|
data = {"location_name_groups": self.stored_data[f"_read_location_name_groups_{self.game}"]}
|
||||||
|
current_cache = Utils.persistent_load().get("groups_by_checksum", {}).get(self.checksums[self.game], {})
|
||||||
|
if self.game in current_cache:
|
||||||
|
current_cache[self.game].update(data)
|
||||||
|
else:
|
||||||
|
current_cache[self.game] = data
|
||||||
|
Utils.persistent_store("groups_by_checksum", self.checksums[self.game], current_cache)
|
||||||
|
|
||||||
# data storage
|
# data storage
|
||||||
|
|
||||||
def set_notify(self, *keys: str) -> None:
|
def set_notify(self, *keys: str) -> None:
|
||||||
@@ -937,6 +1020,12 @@ async def process_server_cmd(ctx: CommonContext, args: dict):
|
|||||||
ctx.hint_points = args.get("hint_points", 0)
|
ctx.hint_points = args.get("hint_points", 0)
|
||||||
ctx.consume_players_package(args["players"])
|
ctx.consume_players_package(args["players"])
|
||||||
ctx.stored_data_notification_keys.add(f"_read_hints_{ctx.team}_{ctx.slot}")
|
ctx.stored_data_notification_keys.add(f"_read_hints_{ctx.team}_{ctx.slot}")
|
||||||
|
if ctx.game:
|
||||||
|
game = ctx.game
|
||||||
|
else:
|
||||||
|
game = ctx.slot_info[ctx.slot][1]
|
||||||
|
ctx.stored_data_notification_keys.add(f"_read_item_name_groups_{game}")
|
||||||
|
ctx.stored_data_notification_keys.add(f"_read_location_name_groups_{game}")
|
||||||
msgs = []
|
msgs = []
|
||||||
if ctx.locations_checked:
|
if ctx.locations_checked:
|
||||||
msgs.append({"cmd": "LocationChecks",
|
msgs.append({"cmd": "LocationChecks",
|
||||||
@@ -1017,11 +1106,19 @@ async def process_server_cmd(ctx: CommonContext, args: dict):
|
|||||||
ctx.stored_data.update(args["keys"])
|
ctx.stored_data.update(args["keys"])
|
||||||
if ctx.ui and f"_read_hints_{ctx.team}_{ctx.slot}" in args["keys"]:
|
if ctx.ui and f"_read_hints_{ctx.team}_{ctx.slot}" in args["keys"]:
|
||||||
ctx.ui.update_hints()
|
ctx.ui.update_hints()
|
||||||
|
if f"_read_item_name_groups_{ctx.game}" in args["keys"]:
|
||||||
|
ctx.consume_network_item_groups()
|
||||||
|
if f"_read_location_name_groups_{ctx.game}" in args["keys"]:
|
||||||
|
ctx.consume_network_location_groups()
|
||||||
|
|
||||||
elif cmd == "SetReply":
|
elif cmd == "SetReply":
|
||||||
ctx.stored_data[args["key"]] = args["value"]
|
ctx.stored_data[args["key"]] = args["value"]
|
||||||
if ctx.ui and f"_read_hints_{ctx.team}_{ctx.slot}" == args["key"]:
|
if ctx.ui and f"_read_hints_{ctx.team}_{ctx.slot}" == args["key"]:
|
||||||
ctx.ui.update_hints()
|
ctx.ui.update_hints()
|
||||||
|
elif f"_read_item_name_groups_{ctx.game}" == args["key"]:
|
||||||
|
ctx.consume_network_item_groups()
|
||||||
|
elif f"_read_location_name_groups_{ctx.game}" == args["key"]:
|
||||||
|
ctx.consume_network_location_groups()
|
||||||
elif args["key"].startswith("EnergyLink"):
|
elif args["key"].startswith("EnergyLink"):
|
||||||
ctx.current_energy_link_value = args["value"]
|
ctx.current_energy_link_value = args["value"]
|
||||||
if ctx.ui:
|
if ctx.ui:
|
||||||
|
|||||||
100
Dockerfile
Normal file
100
Dockerfile
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# hadolint global ignore=SC1090,SC1091
|
||||||
|
|
||||||
|
# Source
|
||||||
|
FROM scratch AS release
|
||||||
|
WORKDIR /release
|
||||||
|
ADD https://github.com/Ijwu/Enemizer/releases/latest/download/ubuntu.16.04-x64.zip Enemizer.zip
|
||||||
|
|
||||||
|
# Enemizer
|
||||||
|
FROM alpine:3.21 AS enemizer
|
||||||
|
ARG TARGETARCH
|
||||||
|
WORKDIR /release
|
||||||
|
COPY --from=release /release/Enemizer.zip .
|
||||||
|
|
||||||
|
# No release for arm architecture. Skip.
|
||||||
|
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||||
|
apk add unzip=6.0-r15 --no-cache && \
|
||||||
|
unzip -u Enemizer.zip -d EnemizerCLI && \
|
||||||
|
chmod -R 777 EnemizerCLI; \
|
||||||
|
else touch EnemizerCLI; fi
|
||||||
|
|
||||||
|
# Cython builder stage
|
||||||
|
FROM python:3.12 AS cython-builder
|
||||||
|
|
||||||
|
WORKDIR /build
|
||||||
|
|
||||||
|
# Copy and install requirements first (better caching)
|
||||||
|
COPY requirements.txt WebHostLib/requirements.txt
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r \
|
||||||
|
WebHostLib/requirements.txt \
|
||||||
|
"setuptools>=75,<81"
|
||||||
|
|
||||||
|
COPY _speedups.pyx .
|
||||||
|
COPY intset.h .
|
||||||
|
|
||||||
|
RUN cythonize -b -i _speedups.pyx
|
||||||
|
|
||||||
|
# Archipelago
|
||||||
|
FROM python:3.12-slim-bookworm AS archipelago
|
||||||
|
ARG TARGETARCH
|
||||||
|
ENV VIRTUAL_ENV=/opt/venv
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install requirements
|
||||||
|
# hadolint ignore=DL3008
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
git \
|
||||||
|
gcc=4:12.2.0-3 \
|
||||||
|
libc6-dev \
|
||||||
|
libtk8.6=8.6.13-2 \
|
||||||
|
g++=4:12.2.0-3 \
|
||||||
|
curl && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create and activate venv
|
||||||
|
RUN python -m venv $VIRTUAL_ENV; \
|
||||||
|
. $VIRTUAL_ENV/bin/activate
|
||||||
|
|
||||||
|
# Copy and install requirements first (better caching)
|
||||||
|
COPY WebHostLib/requirements.txt WebHostLib/requirements.txt
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r \
|
||||||
|
WebHostLib/requirements.txt \
|
||||||
|
gunicorn==23.0.0
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
COPY --from=cython-builder /build/*.so ./
|
||||||
|
|
||||||
|
# Run ModuleUpdate
|
||||||
|
RUN python ModuleUpdate.py -y
|
||||||
|
|
||||||
|
# Purge unneeded packages
|
||||||
|
RUN apt-get purge -y \
|
||||||
|
git \
|
||||||
|
gcc \
|
||||||
|
libc6-dev \
|
||||||
|
g++ && \
|
||||||
|
apt-get autoremove -y
|
||||||
|
|
||||||
|
# Copy necessary components
|
||||||
|
COPY --from=enemizer /release/EnemizerCLI /tmp/EnemizerCLI
|
||||||
|
|
||||||
|
# No release for arm architecture. Skip.
|
||||||
|
RUN if [ "$TARGETARCH" = "amd64" ]; then \
|
||||||
|
cp -r /tmp/EnemizerCLI EnemizerCLI; \
|
||||||
|
fi; \
|
||||||
|
rm -rf /tmp/EnemizerCLI
|
||||||
|
|
||||||
|
# Define health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=40s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:${PORT:-80} || exit 1
|
||||||
|
|
||||||
|
# Ensure no runtime ModuleUpdate.
|
||||||
|
ENV SKIP_REQUIREMENTS_UPDATE=true
|
||||||
|
|
||||||
|
ENTRYPOINT [ "python", "WebHost.py" ]
|
||||||
102
Fill.py
102
Fill.py
@@ -116,6 +116,13 @@ def fill_restrictive(multiworld: MultiWorld, base_state: CollectionState, locati
|
|||||||
else:
|
else:
|
||||||
# we filled all reachable spots.
|
# we filled all reachable spots.
|
||||||
if swap:
|
if swap:
|
||||||
|
# Keep a cache of previous safe swap states that might be usable to sweep from to produce the next
|
||||||
|
# swap state, instead of sweeping from `base_state` each time.
|
||||||
|
previous_safe_swap_state_cache: typing.Deque[CollectionState] = deque()
|
||||||
|
# Almost never are more than 2 states needed. The rare cases that do are usually highly restrictive
|
||||||
|
# single_player_placement=True pre-fills which can go through more than 10 states in some seeds.
|
||||||
|
max_swap_base_state_cache_length = 3
|
||||||
|
|
||||||
# try swapping this item with previously placed items in a safe way then in an unsafe way
|
# try swapping this item with previously placed items in a safe way then in an unsafe way
|
||||||
swap_attempts = ((i, location, unsafe)
|
swap_attempts = ((i, location, unsafe)
|
||||||
for unsafe in (False, True)
|
for unsafe in (False, True)
|
||||||
@@ -130,9 +137,30 @@ def fill_restrictive(multiworld: MultiWorld, base_state: CollectionState, locati
|
|||||||
|
|
||||||
location.item = None
|
location.item = None
|
||||||
placed_item.location = None
|
placed_item.location = None
|
||||||
swap_state = sweep_from_pool(base_state, [placed_item, *item_pool] if unsafe else item_pool,
|
|
||||||
multiworld.get_filled_locations(item.player)
|
for previous_safe_swap_state in previous_safe_swap_state_cache:
|
||||||
if single_player_placement else None)
|
# If a state has already checked the location of the swap, then it cannot be used.
|
||||||
|
if location not in previous_safe_swap_state.advancements:
|
||||||
|
# Previous swap states will have collected all items in `item_pool`, so the new
|
||||||
|
# `swap_state` can skip having to collect them again.
|
||||||
|
# Previous swap states will also have already checked many locations, making the sweep
|
||||||
|
# faster.
|
||||||
|
swap_state = sweep_from_pool(previous_safe_swap_state, (placed_item,) if unsafe else (),
|
||||||
|
multiworld.get_filled_locations(item.player)
|
||||||
|
if single_player_placement else None)
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# No previous swap_state was usable as a base state to sweep from, so create a new one.
|
||||||
|
swap_state = sweep_from_pool(base_state, [placed_item, *item_pool] if unsafe else item_pool,
|
||||||
|
multiworld.get_filled_locations(item.player)
|
||||||
|
if single_player_placement else None)
|
||||||
|
# Unsafe states should not be added to the cache because they have collected `placed_item`.
|
||||||
|
if not unsafe:
|
||||||
|
if len(previous_safe_swap_state_cache) >= max_swap_base_state_cache_length:
|
||||||
|
# Remove the oldest cached state.
|
||||||
|
previous_safe_swap_state_cache.pop()
|
||||||
|
# Add the new state to the start of the cache.
|
||||||
|
previous_safe_swap_state_cache.appendleft(swap_state)
|
||||||
# unsafe means swap_state assumes we can somehow collect placed_item before item_to_place
|
# unsafe means swap_state assumes we can somehow collect placed_item before item_to_place
|
||||||
# by continuing to swap, which is not guaranteed. This is unsafe because there is no mechanic
|
# by continuing to swap, which is not guaranteed. This is unsafe because there is no mechanic
|
||||||
# to clean that up later, so there is a chance generation fails.
|
# to clean that up later, so there is a chance generation fails.
|
||||||
@@ -330,7 +358,12 @@ def fast_fill(multiworld: MultiWorld,
|
|||||||
return item_pool[placing:], fill_locations[placing:]
|
return item_pool[placing:], fill_locations[placing:]
|
||||||
|
|
||||||
|
|
||||||
def accessibility_corrections(multiworld: MultiWorld, state: CollectionState, locations, pool=[]):
|
def accessibility_corrections(multiworld: MultiWorld,
|
||||||
|
state: CollectionState,
|
||||||
|
locations: list[Location],
|
||||||
|
pool: list[Item] | None = None) -> None:
|
||||||
|
if pool is None:
|
||||||
|
pool = []
|
||||||
maximum_exploration_state = sweep_from_pool(state, pool)
|
maximum_exploration_state = sweep_from_pool(state, pool)
|
||||||
minimal_players = {player for player in multiworld.player_ids if
|
minimal_players = {player for player in multiworld.player_ids if
|
||||||
multiworld.worlds[player].options.accessibility == "minimal"}
|
multiworld.worlds[player].options.accessibility == "minimal"}
|
||||||
@@ -450,6 +483,12 @@ def distribute_early_items(multiworld: MultiWorld,
|
|||||||
|
|
||||||
def distribute_items_restrictive(multiworld: MultiWorld,
|
def distribute_items_restrictive(multiworld: MultiWorld,
|
||||||
panic_method: typing.Literal["swap", "raise", "start_inventory"] = "swap") -> None:
|
panic_method: typing.Literal["swap", "raise", "start_inventory"] = "swap") -> None:
|
||||||
|
assert all(item.location is None for item in multiworld.itempool), (
|
||||||
|
"At the start of distribute_items_restrictive, "
|
||||||
|
"there are items in the multiworld itempool that are already placed on locations:\n"
|
||||||
|
f"{[(item.location, item) for item in multiworld.itempool if item.location is not None]}"
|
||||||
|
)
|
||||||
|
|
||||||
fill_locations = sorted(multiworld.get_unfilled_locations())
|
fill_locations = sorted(multiworld.get_unfilled_locations())
|
||||||
multiworld.random.shuffle(fill_locations)
|
multiworld.random.shuffle(fill_locations)
|
||||||
# get items to distribute
|
# get items to distribute
|
||||||
@@ -492,18 +531,48 @@ def distribute_items_restrictive(multiworld: MultiWorld,
|
|||||||
single_player = multiworld.players == 1 and not multiworld.groups
|
single_player = multiworld.players == 1 and not multiworld.groups
|
||||||
|
|
||||||
if prioritylocations:
|
if prioritylocations:
|
||||||
|
regular_progression = []
|
||||||
|
deprioritized_progression = []
|
||||||
|
for item in progitempool:
|
||||||
|
if item.deprioritized:
|
||||||
|
deprioritized_progression.append(item)
|
||||||
|
else:
|
||||||
|
regular_progression.append(item)
|
||||||
|
|
||||||
# "priority fill"
|
# "priority fill"
|
||||||
maximum_exploration_state = sweep_from_pool(multiworld.state)
|
# try without deprioritized items in the mix at all. This means they need to be collected into state first.
|
||||||
fill_restrictive(multiworld, maximum_exploration_state, prioritylocations, progitempool,
|
priority_fill_state = sweep_from_pool(multiworld.state, deprioritized_progression)
|
||||||
|
fill_restrictive(multiworld, priority_fill_state, prioritylocations, regular_progression,
|
||||||
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||||
name="Priority", one_item_per_player=True, allow_partial=True)
|
name="Priority", one_item_per_player=True, allow_partial=True)
|
||||||
|
|
||||||
if prioritylocations:
|
if prioritylocations and regular_progression:
|
||||||
# retry with one_item_per_player off because some priority fills can fail to fill with that optimization
|
# retry with one_item_per_player off because some priority fills can fail to fill with that optimization
|
||||||
maximum_exploration_state = sweep_from_pool(multiworld.state)
|
# deprioritized items are still not in the mix, so they need to be collected into state first.
|
||||||
fill_restrictive(multiworld, maximum_exploration_state, prioritylocations, progitempool,
|
priority_retry_state = sweep_from_pool(multiworld.state, deprioritized_progression)
|
||||||
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
fill_restrictive(multiworld, priority_retry_state, prioritylocations, regular_progression,
|
||||||
name="Priority Retry", one_item_per_player=False)
|
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||||
|
name="Priority Retry", one_item_per_player=False, allow_partial=True)
|
||||||
|
|
||||||
|
if prioritylocations and deprioritized_progression:
|
||||||
|
# There are no more regular progression items that can be placed on any priority locations.
|
||||||
|
# We'd still prefer to place deprioritized progression items on priority locations over filler items.
|
||||||
|
# Since we're leaving out the remaining regular progression now, we need to collect it into state first.
|
||||||
|
priority_retry_2_state = sweep_from_pool(multiworld.state, regular_progression)
|
||||||
|
fill_restrictive(multiworld, priority_retry_2_state, prioritylocations, deprioritized_progression,
|
||||||
|
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||||
|
name="Priority Retry 2", one_item_per_player=True, allow_partial=True)
|
||||||
|
|
||||||
|
if prioritylocations and deprioritized_progression:
|
||||||
|
# retry with deprioritized items AND without one_item_per_player optimisation
|
||||||
|
# Since we're leaving out the remaining regular progression now, we need to collect it into state first.
|
||||||
|
priority_retry_3_state = sweep_from_pool(multiworld.state, regular_progression)
|
||||||
|
fill_restrictive(multiworld, priority_retry_3_state, prioritylocations, deprioritized_progression,
|
||||||
|
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||||
|
name="Priority Retry 3", one_item_per_player=False)
|
||||||
|
|
||||||
|
# restore original order of progitempool
|
||||||
|
progitempool[:] = [item for item in progitempool if not item.location]
|
||||||
accessibility_corrections(multiworld, multiworld.state, prioritylocations, progitempool)
|
accessibility_corrections(multiworld, multiworld.state, prioritylocations, progitempool)
|
||||||
defaultlocations = prioritylocations + defaultlocations
|
defaultlocations = prioritylocations + defaultlocations
|
||||||
|
|
||||||
@@ -890,7 +959,7 @@ def parse_planned_blocks(multiworld: MultiWorld) -> dict[int, list[PlandoItemBlo
|
|||||||
worlds = set()
|
worlds = set()
|
||||||
for listed_world in target_world:
|
for listed_world in target_world:
|
||||||
if listed_world not in world_name_lookup:
|
if listed_world not in world_name_lookup:
|
||||||
failed(f"Cannot place item to {target_world}'s world as that world does not exist.",
|
failed(f"Cannot place item to {listed_world}'s world as that world does not exist.",
|
||||||
block.force)
|
block.force)
|
||||||
continue
|
continue
|
||||||
worlds.add(world_name_lookup[listed_world])
|
worlds.add(world_name_lookup[listed_world])
|
||||||
@@ -923,9 +992,9 @@ def parse_planned_blocks(multiworld: MultiWorld) -> dict[int, list[PlandoItemBlo
|
|||||||
if isinstance(locations, str):
|
if isinstance(locations, str):
|
||||||
locations = [locations]
|
locations = [locations]
|
||||||
|
|
||||||
locations_from_groups: list[str] = []
|
|
||||||
resolved_locations: list[Location] = []
|
resolved_locations: list[Location] = []
|
||||||
for target_player in worlds:
|
for target_player in worlds:
|
||||||
|
locations_from_groups: list[str] = []
|
||||||
world_locations = multiworld.get_unfilled_locations(target_player)
|
world_locations = multiworld.get_unfilled_locations(target_player)
|
||||||
for group in multiworld.worlds[target_player].location_name_groups:
|
for group in multiworld.worlds[target_player].location_name_groups:
|
||||||
if group in locations:
|
if group in locations:
|
||||||
@@ -937,13 +1006,16 @@ def parse_planned_blocks(multiworld: MultiWorld) -> dict[int, list[PlandoItemBlo
|
|||||||
|
|
||||||
count = block.count
|
count = block.count
|
||||||
if not count:
|
if not count:
|
||||||
count = len(new_block.items)
|
count = (min(len(new_block.items), len(new_block.resolved_locations))
|
||||||
|
if new_block.resolved_locations else len(new_block.items))
|
||||||
if isinstance(count, int):
|
if isinstance(count, int):
|
||||||
count = {"min": count, "max": count}
|
count = {"min": count, "max": count}
|
||||||
if "min" not in count:
|
if "min" not in count:
|
||||||
count["min"] = 0
|
count["min"] = 0
|
||||||
if "max" not in count:
|
if "max" not in count:
|
||||||
count["max"] = len(new_block.items)
|
count["max"] = (min(len(new_block.items), len(new_block.resolved_locations))
|
||||||
|
if new_block.resolved_locations else len(new_block.items))
|
||||||
|
|
||||||
|
|
||||||
new_block.count = count
|
new_block.count = count
|
||||||
plando_blocks[player].append(new_block)
|
plando_blocks[player].append(new_block)
|
||||||
|
|||||||
@@ -32,6 +32,7 @@ GAME_ALTTP = "A Link to the Past"
|
|||||||
WINDOW_MIN_HEIGHT = 525
|
WINDOW_MIN_HEIGHT = 525
|
||||||
WINDOW_MIN_WIDTH = 425
|
WINDOW_MIN_WIDTH = 425
|
||||||
|
|
||||||
|
|
||||||
class AdjusterWorld(object):
|
class AdjusterWorld(object):
|
||||||
class AdjusterSubWorld(object):
|
class AdjusterSubWorld(object):
|
||||||
def __init__(self, random):
|
def __init__(self, random):
|
||||||
@@ -40,7 +41,6 @@ class AdjusterWorld(object):
|
|||||||
def __init__(self, sprite_pool):
|
def __init__(self, sprite_pool):
|
||||||
import random
|
import random
|
||||||
self.sprite_pool = {1: sprite_pool}
|
self.sprite_pool = {1: sprite_pool}
|
||||||
self.per_slot_randoms = {1: random}
|
|
||||||
self.worlds = {1: self.AdjusterSubWorld(random)}
|
self.worlds = {1: self.AdjusterSubWorld(random)}
|
||||||
|
|
||||||
|
|
||||||
@@ -49,6 +49,7 @@ class ArgumentDefaultsHelpFormatter(argparse.RawTextHelpFormatter):
|
|||||||
def _get_help_string(self, action):
|
def _get_help_string(self, action):
|
||||||
return textwrap.dedent(action.help)
|
return textwrap.dedent(action.help)
|
||||||
|
|
||||||
|
|
||||||
# See argparse.BooleanOptionalAction
|
# See argparse.BooleanOptionalAction
|
||||||
class BooleanOptionalActionWithDisable(argparse.Action):
|
class BooleanOptionalActionWithDisable(argparse.Action):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
@@ -364,10 +365,10 @@ def run_sprite_update():
|
|||||||
logging.info("Done updating sprites")
|
logging.info("Done updating sprites")
|
||||||
|
|
||||||
|
|
||||||
def update_sprites(task, on_finish=None):
|
def update_sprites(task, on_finish=None, repository_url: str = "https://alttpr.com/sprites"):
|
||||||
resultmessage = ""
|
resultmessage = ""
|
||||||
successful = True
|
successful = True
|
||||||
sprite_dir = user_path("data", "sprites", "alttpr")
|
sprite_dir = user_path("data", "sprites", "alttp", "remote")
|
||||||
os.makedirs(sprite_dir, exist_ok=True)
|
os.makedirs(sprite_dir, exist_ok=True)
|
||||||
ctx = get_cert_none_ssl_context()
|
ctx = get_cert_none_ssl_context()
|
||||||
|
|
||||||
@@ -377,11 +378,11 @@ def update_sprites(task, on_finish=None):
|
|||||||
on_finish(successful, resultmessage)
|
on_finish(successful, resultmessage)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
task.update_status("Downloading alttpr sprites list")
|
task.update_status("Downloading remote sprites list")
|
||||||
with urlopen('https://alttpr.com/sprites', context=ctx) as response:
|
with urlopen(repository_url, context=ctx) as response:
|
||||||
sprites_arr = json.loads(response.read().decode("utf-8"))
|
sprites_arr = json.loads(response.read().decode("utf-8"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
resultmessage = "Error getting list of alttpr sprites. Sprites not updated.\n\n%s: %s" % (type(e).__name__, e)
|
resultmessage = "Error getting list of remote sprites. Sprites not updated.\n\n%s: %s" % (type(e).__name__, e)
|
||||||
successful = False
|
successful = False
|
||||||
task.queue_event(finished)
|
task.queue_event(finished)
|
||||||
return
|
return
|
||||||
@@ -389,13 +390,13 @@ def update_sprites(task, on_finish=None):
|
|||||||
try:
|
try:
|
||||||
task.update_status("Determining needed sprites")
|
task.update_status("Determining needed sprites")
|
||||||
current_sprites = [os.path.basename(file) for file in glob(sprite_dir + '/*')]
|
current_sprites = [os.path.basename(file) for file in glob(sprite_dir + '/*')]
|
||||||
alttpr_sprites = [(sprite['file'], os.path.basename(urlparse(sprite['file']).path))
|
remote_sprites = [(sprite['file'], os.path.basename(urlparse(sprite['file']).path))
|
||||||
for sprite in sprites_arr if sprite["author"] != "Nintendo"]
|
for sprite in sprites_arr if sprite["author"] != "Nintendo"]
|
||||||
needed_sprites = [(sprite_url, filename) for (sprite_url, filename) in alttpr_sprites if
|
needed_sprites = [(sprite_url, filename) for (sprite_url, filename) in remote_sprites if
|
||||||
filename not in current_sprites]
|
filename not in current_sprites]
|
||||||
|
|
||||||
alttpr_filenames = [filename for (_, filename) in alttpr_sprites]
|
remote_filenames = [filename for (_, filename) in remote_sprites]
|
||||||
obsolete_sprites = [sprite for sprite in current_sprites if sprite not in alttpr_filenames]
|
obsolete_sprites = [sprite for sprite in current_sprites if sprite not in remote_filenames]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
resultmessage = "Error Determining which sprites to update. Sprites not updated.\n\n%s: %s" % (
|
resultmessage = "Error Determining which sprites to update. Sprites not updated.\n\n%s: %s" % (
|
||||||
type(e).__name__, e)
|
type(e).__name__, e)
|
||||||
@@ -447,7 +448,7 @@ def update_sprites(task, on_finish=None):
|
|||||||
successful = False
|
successful = False
|
||||||
|
|
||||||
if successful:
|
if successful:
|
||||||
resultmessage = "alttpr sprites updated successfully"
|
resultmessage = "Remote sprites updated successfully"
|
||||||
|
|
||||||
task.queue_event(finished)
|
task.queue_event(finished)
|
||||||
|
|
||||||
@@ -868,7 +869,7 @@ class SpriteSelector():
|
|||||||
def open_custom_sprite_dir(_evt):
|
def open_custom_sprite_dir(_evt):
|
||||||
open_file(self.custom_sprite_dir)
|
open_file(self.custom_sprite_dir)
|
||||||
|
|
||||||
alttpr_frametitle = Label(self.window, text='ALTTPR Sprites')
|
remote_frametitle = Label(self.window, text='Remote Sprites')
|
||||||
|
|
||||||
custom_frametitle = Frame(self.window)
|
custom_frametitle = Frame(self.window)
|
||||||
title_text = Label(custom_frametitle, text="Custom Sprites")
|
title_text = Label(custom_frametitle, text="Custom Sprites")
|
||||||
@@ -877,8 +878,8 @@ class SpriteSelector():
|
|||||||
title_link.pack(side=LEFT)
|
title_link.pack(side=LEFT)
|
||||||
title_link.bind("<Button-1>", open_custom_sprite_dir)
|
title_link.bind("<Button-1>", open_custom_sprite_dir)
|
||||||
|
|
||||||
self.icon_section(alttpr_frametitle, self.alttpr_sprite_dir,
|
self.icon_section(remote_frametitle, self.remote_sprite_dir,
|
||||||
'ALTTPR sprites not found. Click "Update alttpr sprites" to download them.')
|
'Remote sprites not found. Click "Update remote sprites" to download them.')
|
||||||
self.icon_section(custom_frametitle, self.custom_sprite_dir,
|
self.icon_section(custom_frametitle, self.custom_sprite_dir,
|
||||||
'Put sprites in the custom sprites folder (see open link above) to have them appear here.')
|
'Put sprites in the custom sprites folder (see open link above) to have them appear here.')
|
||||||
if not randomOnEvent:
|
if not randomOnEvent:
|
||||||
@@ -891,11 +892,18 @@ class SpriteSelector():
|
|||||||
button = Button(frame, text="Browse for file...", command=self.browse_for_sprite)
|
button = Button(frame, text="Browse for file...", command=self.browse_for_sprite)
|
||||||
button.pack(side=RIGHT, padx=(5, 0))
|
button.pack(side=RIGHT, padx=(5, 0))
|
||||||
|
|
||||||
button = Button(frame, text="Update alttpr sprites", command=self.update_alttpr_sprites)
|
button = Button(frame, text="Update remote sprites", command=self.update_remote_sprites)
|
||||||
button.pack(side=RIGHT, padx=(5, 0))
|
button.pack(side=RIGHT, padx=(5, 0))
|
||||||
|
|
||||||
|
repository_label = Label(frame, text='Sprite Repository:')
|
||||||
|
self.repository_url = StringVar(frame, "https://alttpr.com/sprites")
|
||||||
|
repository_entry = Entry(frame, textvariable=self.repository_url)
|
||||||
|
|
||||||
|
repository_entry.pack(side=RIGHT, expand=True, fill=BOTH, pady=1)
|
||||||
|
repository_label.pack(side=RIGHT, expand=False, padx=(0, 5))
|
||||||
|
|
||||||
button = Button(frame, text="Do not adjust sprite",command=self.use_default_sprite)
|
button = Button(frame, text="Do not adjust sprite",command=self.use_default_sprite)
|
||||||
button.pack(side=LEFT,padx=(0,5))
|
button.pack(side=LEFT, padx=(0, 5))
|
||||||
|
|
||||||
button = Button(frame, text="Default Link sprite", command=self.use_default_link_sprite)
|
button = Button(frame, text="Default Link sprite", command=self.use_default_link_sprite)
|
||||||
button.pack(side=LEFT, padx=(0, 5))
|
button.pack(side=LEFT, padx=(0, 5))
|
||||||
@@ -1055,7 +1063,7 @@ class SpriteSelector():
|
|||||||
for i, button in enumerate(frame.buttons):
|
for i, button in enumerate(frame.buttons):
|
||||||
button.grid(row=i // self.spritesPerRow, column=i % self.spritesPerRow)
|
button.grid(row=i // self.spritesPerRow, column=i % self.spritesPerRow)
|
||||||
|
|
||||||
def update_alttpr_sprites(self):
|
def update_remote_sprites(self):
|
||||||
# need to wrap in try catch. We don't want errors getting the json or downloading the files to break us.
|
# need to wrap in try catch. We don't want errors getting the json or downloading the files to break us.
|
||||||
self.window.destroy()
|
self.window.destroy()
|
||||||
self.parent.update()
|
self.parent.update()
|
||||||
@@ -1068,7 +1076,8 @@ class SpriteSelector():
|
|||||||
messagebox.showerror("Sprite Updater", resultmessage)
|
messagebox.showerror("Sprite Updater", resultmessage)
|
||||||
SpriteSelector(self.parent, self.callback, self.adjuster)
|
SpriteSelector(self.parent, self.callback, self.adjuster)
|
||||||
|
|
||||||
BackgroundTaskProgress(self.parent, update_sprites, "Updating Sprites", on_finish)
|
BackgroundTaskProgress(self.parent, update_sprites, "Updating Sprites",
|
||||||
|
on_finish, self.repository_url.get())
|
||||||
|
|
||||||
def browse_for_sprite(self):
|
def browse_for_sprite(self):
|
||||||
sprite = filedialog.askopenfilename(
|
sprite = filedialog.askopenfilename(
|
||||||
@@ -1158,12 +1167,13 @@ class SpriteSelector():
|
|||||||
os.makedirs(self.custom_sprite_dir)
|
os.makedirs(self.custom_sprite_dir)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def alttpr_sprite_dir(self):
|
def remote_sprite_dir(self):
|
||||||
return user_path("data", "sprites", "alttpr")
|
return user_path("data", "sprites", "alttp", "remote")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def custom_sprite_dir(self):
|
def custom_sprite_dir(self):
|
||||||
return user_path("data", "sprites", "custom")
|
return user_path("data", "sprites", "alttp", "custom")
|
||||||
|
|
||||||
|
|
||||||
def get_image_for_sprite(sprite, gif_only: bool = False):
|
def get_image_for_sprite(sprite, gif_only: bool = False):
|
||||||
if not sprite.valid:
|
if not sprite.valid:
|
||||||
|
|||||||
@@ -286,16 +286,14 @@ async def gba_sync_task(ctx: MMBN3Context):
|
|||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
logger.debug("Connection Refused, Trying Again")
|
logger.debug("Connection Refused, Trying Again")
|
||||||
ctx.gba_status = CONNECTION_REFUSED_STATUS
|
ctx.gba_status = CONNECTION_REFUSED_STATUS
|
||||||
|
await asyncio.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
async def run_game(romfile):
|
async def run_game(romfile):
|
||||||
options = Utils.get_options().get("mmbn3_options", None)
|
from worlds.mmbn3 import MMBN3World
|
||||||
if options is None:
|
auto_start = MMBN3World.settings.rom_start
|
||||||
auto_start = True
|
if auto_start is True:
|
||||||
else:
|
|
||||||
auto_start = options.get("rom_start", True)
|
|
||||||
if auto_start:
|
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(romfile)
|
webbrowser.open(romfile)
|
||||||
elif os.path.isfile(auto_start):
|
elif os.path.isfile(auto_start):
|
||||||
|
|||||||
53
Main.py
53
Main.py
@@ -1,10 +1,11 @@
|
|||||||
import collections
|
import collections
|
||||||
|
from collections.abc import Mapping
|
||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
from typing import Any
|
||||||
import zipfile
|
import zipfile
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
@@ -12,8 +13,9 @@ import worlds
|
|||||||
from BaseClasses import CollectionState, Item, Location, LocationProgressType, MultiWorld
|
from BaseClasses import CollectionState, Item, Location, LocationProgressType, MultiWorld
|
||||||
from Fill import FillError, balance_multiworld_progression, distribute_items_restrictive, flood_items, \
|
from Fill import FillError, balance_multiworld_progression, distribute_items_restrictive, flood_items, \
|
||||||
parse_planned_blocks, distribute_planned_blocks, resolve_early_locations_for_planned
|
parse_planned_blocks, distribute_planned_blocks, resolve_early_locations_for_planned
|
||||||
|
from NetUtils import convert_to_base_types
|
||||||
from Options import StartInventoryPool
|
from Options import StartInventoryPool
|
||||||
from Utils import __version__, output_path, version_tuple
|
from Utils import __version__, output_path, restricted_dumps, version_tuple
|
||||||
from settings import get_settings
|
from settings import get_settings
|
||||||
from worlds import AutoWorld
|
from worlds import AutoWorld
|
||||||
from worlds.generic.Rules import exclusion_rules, locality_rules
|
from worlds.generic.Rules import exclusion_rules, locality_rules
|
||||||
@@ -92,6 +94,15 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
del local_early
|
del local_early
|
||||||
del early
|
del early
|
||||||
|
|
||||||
|
# items can't be both local and non-local, prefer local
|
||||||
|
multiworld.worlds[player].options.non_local_items.value -= multiworld.worlds[player].options.local_items.value
|
||||||
|
multiworld.worlds[player].options.non_local_items.value -= set(multiworld.local_early_items[player])
|
||||||
|
|
||||||
|
# Clear non-applicable local and non-local items.
|
||||||
|
if multiworld.players == 1:
|
||||||
|
multiworld.worlds[1].options.non_local_items.value = set()
|
||||||
|
multiworld.worlds[1].options.local_items.value = set()
|
||||||
|
|
||||||
logger.info('Creating MultiWorld.')
|
logger.info('Creating MultiWorld.')
|
||||||
AutoWorld.call_all(multiworld, "create_regions")
|
AutoWorld.call_all(multiworld, "create_regions")
|
||||||
|
|
||||||
@@ -99,12 +110,6 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
AutoWorld.call_all(multiworld, "create_items")
|
AutoWorld.call_all(multiworld, "create_items")
|
||||||
|
|
||||||
logger.info('Calculating Access Rules.')
|
logger.info('Calculating Access Rules.')
|
||||||
|
|
||||||
for player in multiworld.player_ids:
|
|
||||||
# items can't be both local and non-local, prefer local
|
|
||||||
multiworld.worlds[player].options.non_local_items.value -= multiworld.worlds[player].options.local_items.value
|
|
||||||
multiworld.worlds[player].options.non_local_items.value -= set(multiworld.local_early_items[player])
|
|
||||||
|
|
||||||
AutoWorld.call_all(multiworld, "set_rules")
|
AutoWorld.call_all(multiworld, "set_rules")
|
||||||
|
|
||||||
for player in multiworld.player_ids:
|
for player in multiworld.player_ids:
|
||||||
@@ -125,11 +130,9 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
multiworld.worlds[player].options.priority_locations.value -= world_excluded_locations
|
multiworld.worlds[player].options.priority_locations.value -= world_excluded_locations
|
||||||
|
|
||||||
# Set local and non-local item rules.
|
# Set local and non-local item rules.
|
||||||
|
# This function is called so late because worlds might otherwise overwrite item_rules which are how locality works
|
||||||
if multiworld.players > 1:
|
if multiworld.players > 1:
|
||||||
locality_rules(multiworld)
|
locality_rules(multiworld)
|
||||||
else:
|
|
||||||
multiworld.worlds[1].options.non_local_items.value = set()
|
|
||||||
multiworld.worlds[1].options.local_items.value = set()
|
|
||||||
|
|
||||||
multiworld.plando_item_blocks = parse_planned_blocks(multiworld)
|
multiworld.plando_item_blocks = parse_planned_blocks(multiworld)
|
||||||
|
|
||||||
@@ -173,7 +176,7 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
|
|
||||||
multiworld.link_items()
|
multiworld.link_items()
|
||||||
|
|
||||||
if any(multiworld.item_links.values()):
|
if any(world.options.item_links for world in multiworld.worlds.values()):
|
||||||
multiworld._all_state = None
|
multiworld._all_state = None
|
||||||
|
|
||||||
logger.info("Running Item Plando.")
|
logger.info("Running Item Plando.")
|
||||||
@@ -238,11 +241,13 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
def write_multidata():
|
def write_multidata():
|
||||||
import NetUtils
|
import NetUtils
|
||||||
from NetUtils import HintStatus
|
from NetUtils import HintStatus
|
||||||
slot_data = {}
|
slot_data: dict[int, Mapping[str, Any]] = {}
|
||||||
client_versions = {}
|
client_versions: dict[int, tuple[int, int, int]] = {}
|
||||||
games = {}
|
games: dict[int, str] = {}
|
||||||
minimum_versions = {"server": AutoWorld.World.required_server_version, "clients": client_versions}
|
minimum_versions: NetUtils.MinimumVersions = {
|
||||||
slot_info = {}
|
"server": AutoWorld.World.required_server_version, "clients": client_versions
|
||||||
|
}
|
||||||
|
slot_info: dict[int, NetUtils.NetworkSlot] = {}
|
||||||
names = [[name for player, name in sorted(multiworld.player_name.items())]]
|
names = [[name for player, name in sorted(multiworld.player_name.items())]]
|
||||||
for slot in multiworld.player_ids:
|
for slot in multiworld.player_ids:
|
||||||
player_world: AutoWorld.World = multiworld.worlds[slot]
|
player_world: AutoWorld.World = multiworld.worlds[slot]
|
||||||
@@ -257,7 +262,9 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
group_members=sorted(group["players"]))
|
group_members=sorted(group["players"]))
|
||||||
precollected_items = {player: [item.code for item in world_precollected if type(item.code) == int]
|
precollected_items = {player: [item.code for item in world_precollected if type(item.code) == int]
|
||||||
for player, world_precollected in multiworld.precollected_items.items()}
|
for player, world_precollected in multiworld.precollected_items.items()}
|
||||||
precollected_hints = {player: set() for player in range(1, multiworld.players + 1 + len(multiworld.groups))}
|
precollected_hints: dict[int, set[NetUtils.Hint]] = {
|
||||||
|
player: set() for player in range(1, multiworld.players + 1 + len(multiworld.groups))
|
||||||
|
}
|
||||||
|
|
||||||
for slot in multiworld.player_ids:
|
for slot in multiworld.player_ids:
|
||||||
slot_data[slot] = multiworld.worlds[slot].fill_slot_data()
|
slot_data[slot] = multiworld.worlds[slot].fill_slot_data()
|
||||||
@@ -314,7 +321,7 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
if current_sphere:
|
if current_sphere:
|
||||||
spheres.append(dict(current_sphere))
|
spheres.append(dict(current_sphere))
|
||||||
|
|
||||||
multidata = {
|
multidata: NetUtils.MultiData | bytes = {
|
||||||
"slot_data": slot_data,
|
"slot_data": slot_data,
|
||||||
"slot_info": slot_info,
|
"slot_info": slot_info,
|
||||||
"connect_names": {name: (0, player) for player, name in multiworld.player_name.items()},
|
"connect_names": {name: (0, player) for player, name in multiworld.player_name.items()},
|
||||||
@@ -324,7 +331,7 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
"er_hint_data": er_hint_data,
|
"er_hint_data": er_hint_data,
|
||||||
"precollected_items": precollected_items,
|
"precollected_items": precollected_items,
|
||||||
"precollected_hints": precollected_hints,
|
"precollected_hints": precollected_hints,
|
||||||
"version": tuple(version_tuple),
|
"version": (version_tuple.major, version_tuple.minor, version_tuple.build),
|
||||||
"tags": ["AP"],
|
"tags": ["AP"],
|
||||||
"minimum_versions": minimum_versions,
|
"minimum_versions": minimum_versions,
|
||||||
"seed_name": multiworld.seed_name,
|
"seed_name": multiworld.seed_name,
|
||||||
@@ -332,9 +339,13 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
"datapackage": data_package,
|
"datapackage": data_package,
|
||||||
"race_mode": int(multiworld.is_race),
|
"race_mode": int(multiworld.is_race),
|
||||||
}
|
}
|
||||||
|
# TODO: change to `"version": version_tuple` after getting better serialization
|
||||||
AutoWorld.call_all(multiworld, "modify_multidata", multidata)
|
AutoWorld.call_all(multiworld, "modify_multidata", multidata)
|
||||||
|
|
||||||
multidata = zlib.compress(pickle.dumps(multidata), 9)
|
for key in ("slot_data", "er_hint_data"):
|
||||||
|
multidata[key] = convert_to_base_types(multidata[key])
|
||||||
|
|
||||||
|
multidata = zlib.compress(restricted_dumps(multidata), 9)
|
||||||
|
|
||||||
with open(os.path.join(temp_dir, f'{outfilebase}.archipelago'), 'wb') as f:
|
with open(os.path.join(temp_dir, f'{outfilebase}.archipelago'), 'wb') as f:
|
||||||
f.write(bytes([3])) # version of format
|
f.write(bytes([3])) # version of format
|
||||||
|
|||||||
@@ -1,347 +0,0 @@
|
|||||||
import argparse
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import atexit
|
|
||||||
import shutil
|
|
||||||
from subprocess import Popen
|
|
||||||
from shutil import copyfile
|
|
||||||
from time import strftime
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
import Utils
|
|
||||||
from Utils import is_windows
|
|
||||||
from settings import get_settings
|
|
||||||
|
|
||||||
atexit.register(input, "Press enter to exit.")
|
|
||||||
|
|
||||||
# 1 or more digits followed by m or g, then optional b
|
|
||||||
max_heap_re = re.compile(r"^\d+[mMgG][bB]?$")
|
|
||||||
|
|
||||||
|
|
||||||
def prompt_yes_no(prompt):
|
|
||||||
yes_inputs = {'yes', 'ye', 'y'}
|
|
||||||
no_inputs = {'no', 'n'}
|
|
||||||
while True:
|
|
||||||
choice = input(prompt + " [y/n] ").lower()
|
|
||||||
if choice in yes_inputs:
|
|
||||||
return True
|
|
||||||
elif choice in no_inputs:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
print('Please respond with "y" or "n".')
|
|
||||||
|
|
||||||
|
|
||||||
def find_ap_randomizer_jar(forge_dir):
|
|
||||||
"""Create mods folder if needed; find AP randomizer jar; return None if not found."""
|
|
||||||
mods_dir = os.path.join(forge_dir, 'mods')
|
|
||||||
if os.path.isdir(mods_dir):
|
|
||||||
for entry in os.scandir(mods_dir):
|
|
||||||
if entry.name.startswith("aprandomizer") and entry.name.endswith(".jar"):
|
|
||||||
logging.info(f"Found AP randomizer mod: {entry.name}")
|
|
||||||
return entry.name
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
os.mkdir(mods_dir)
|
|
||||||
logging.info(f"Created mods folder in {forge_dir}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def replace_apmc_files(forge_dir, apmc_file):
|
|
||||||
"""Create APData folder if needed; clean .apmc files from APData; copy given .apmc into directory."""
|
|
||||||
if apmc_file is None:
|
|
||||||
return
|
|
||||||
apdata_dir = os.path.join(forge_dir, 'APData')
|
|
||||||
copy_apmc = True
|
|
||||||
if not os.path.isdir(apdata_dir):
|
|
||||||
os.mkdir(apdata_dir)
|
|
||||||
logging.info(f"Created APData folder in {forge_dir}")
|
|
||||||
for entry in os.scandir(apdata_dir):
|
|
||||||
if entry.name.endswith(".apmc") and entry.is_file():
|
|
||||||
if not os.path.samefile(apmc_file, entry.path):
|
|
||||||
os.remove(entry.path)
|
|
||||||
logging.info(f"Removed {entry.name} in {apdata_dir}")
|
|
||||||
else: # apmc already in apdata
|
|
||||||
copy_apmc = False
|
|
||||||
if copy_apmc:
|
|
||||||
copyfile(apmc_file, os.path.join(apdata_dir, os.path.basename(apmc_file)))
|
|
||||||
logging.info(f"Copied {os.path.basename(apmc_file)} to {apdata_dir}")
|
|
||||||
|
|
||||||
|
|
||||||
def read_apmc_file(apmc_file):
|
|
||||||
from base64 import b64decode
|
|
||||||
|
|
||||||
with open(apmc_file, 'r') as f:
|
|
||||||
return json.loads(b64decode(f.read()))
|
|
||||||
|
|
||||||
|
|
||||||
def update_mod(forge_dir, url: str):
|
|
||||||
"""Check mod version, download new mod from GitHub releases page if needed. """
|
|
||||||
ap_randomizer = find_ap_randomizer_jar(forge_dir)
|
|
||||||
os.path.basename(url)
|
|
||||||
if ap_randomizer is not None:
|
|
||||||
logging.info(f"Your current mod is {ap_randomizer}.")
|
|
||||||
else:
|
|
||||||
logging.info(f"You do not have the AP randomizer mod installed.")
|
|
||||||
|
|
||||||
if ap_randomizer != os.path.basename(url):
|
|
||||||
logging.info(f"A new release of the Minecraft AP randomizer mod was found: "
|
|
||||||
f"{os.path.basename(url)}")
|
|
||||||
if prompt_yes_no("Would you like to update?"):
|
|
||||||
old_ap_mod = os.path.join(forge_dir, 'mods', ap_randomizer) if ap_randomizer is not None else None
|
|
||||||
new_ap_mod = os.path.join(forge_dir, 'mods', os.path.basename(url))
|
|
||||||
logging.info("Downloading AP randomizer mod. This may take a moment...")
|
|
||||||
apmod_resp = requests.get(url)
|
|
||||||
if apmod_resp.status_code == 200:
|
|
||||||
with open(new_ap_mod, 'wb') as f:
|
|
||||||
f.write(apmod_resp.content)
|
|
||||||
logging.info(f"Wrote new mod file to {new_ap_mod}")
|
|
||||||
if old_ap_mod is not None:
|
|
||||||
os.remove(old_ap_mod)
|
|
||||||
logging.info(f"Removed old mod file from {old_ap_mod}")
|
|
||||||
else:
|
|
||||||
logging.error(f"Error retrieving the randomizer mod (status code {apmod_resp.status_code}).")
|
|
||||||
logging.error(f"Please report this issue on the Archipelago Discord server.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def check_eula(forge_dir):
|
|
||||||
"""Check if the EULA is agreed to, and prompt the user to read and agree if necessary."""
|
|
||||||
eula_path = os.path.join(forge_dir, "eula.txt")
|
|
||||||
if not os.path.isfile(eula_path):
|
|
||||||
# Create eula.txt
|
|
||||||
with open(eula_path, 'w') as f:
|
|
||||||
f.write("#By changing the setting below to TRUE you are indicating your agreement to our EULA (https://account.mojang.com/documents/minecraft_eula).\n")
|
|
||||||
f.write(f"#{strftime('%a %b %d %X %Z %Y')}\n")
|
|
||||||
f.write("eula=false\n")
|
|
||||||
with open(eula_path, 'r+') as f:
|
|
||||||
text = f.read()
|
|
||||||
if 'false' in text:
|
|
||||||
# Prompt user to agree to the EULA
|
|
||||||
logging.info("You need to agree to the Minecraft EULA in order to run the server.")
|
|
||||||
logging.info("The EULA can be found at https://account.mojang.com/documents/minecraft_eula")
|
|
||||||
if prompt_yes_no("Do you agree to the EULA?"):
|
|
||||||
f.seek(0)
|
|
||||||
f.write(text.replace('false', 'true'))
|
|
||||||
f.truncate()
|
|
||||||
logging.info(f"Set {eula_path} to true")
|
|
||||||
else:
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def find_jdk_dir(version: str) -> str:
|
|
||||||
"""get the specified versions jdk directory"""
|
|
||||||
for entry in os.listdir():
|
|
||||||
if os.path.isdir(entry) and entry.startswith(f"jdk{version}"):
|
|
||||||
return os.path.abspath(entry)
|
|
||||||
|
|
||||||
|
|
||||||
def find_jdk(version: str) -> str:
|
|
||||||
"""get the java exe location"""
|
|
||||||
|
|
||||||
if is_windows:
|
|
||||||
jdk = find_jdk_dir(version)
|
|
||||||
jdk_exe = os.path.join(jdk, "bin", "java.exe")
|
|
||||||
if os.path.isfile(jdk_exe):
|
|
||||||
return jdk_exe
|
|
||||||
else:
|
|
||||||
jdk_exe = shutil.which(options.java)
|
|
||||||
if not jdk_exe:
|
|
||||||
jdk_exe = shutil.which("java") # try to fall back to system java
|
|
||||||
if not jdk_exe:
|
|
||||||
raise Exception("Could not find Java. Is Java installed on the system?")
|
|
||||||
return jdk_exe
|
|
||||||
|
|
||||||
|
|
||||||
def download_java(java: str):
|
|
||||||
"""Download Corretto (Amazon JDK)"""
|
|
||||||
|
|
||||||
jdk = find_jdk_dir(java)
|
|
||||||
if jdk is not None:
|
|
||||||
print(f"Removing old JDK...")
|
|
||||||
from shutil import rmtree
|
|
||||||
rmtree(jdk)
|
|
||||||
|
|
||||||
print(f"Downloading Java...")
|
|
||||||
jdk_url = f"https://corretto.aws/downloads/latest/amazon-corretto-{java}-x64-windows-jdk.zip"
|
|
||||||
resp = requests.get(jdk_url)
|
|
||||||
if resp.status_code == 200: # OK
|
|
||||||
print(f"Extracting...")
|
|
||||||
import zipfile
|
|
||||||
from io import BytesIO
|
|
||||||
with zipfile.ZipFile(BytesIO(resp.content)) as zf:
|
|
||||||
zf.extractall()
|
|
||||||
else:
|
|
||||||
print(f"Error downloading Java (status code {resp.status_code}).")
|
|
||||||
print(f"If this was not expected, please report this issue on the Archipelago Discord server.")
|
|
||||||
if not prompt_yes_no("Continue anyways?"):
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def install_forge(directory: str, forge_version: str, java_version: str):
|
|
||||||
"""download and install forge"""
|
|
||||||
|
|
||||||
java_exe = find_jdk(java_version)
|
|
||||||
if java_exe is not None:
|
|
||||||
print(f"Downloading Forge {forge_version}...")
|
|
||||||
forge_url = f"https://maven.minecraftforge.net/net/minecraftforge/forge/{forge_version}/forge-{forge_version}-installer.jar"
|
|
||||||
resp = requests.get(forge_url)
|
|
||||||
if resp.status_code == 200: # OK
|
|
||||||
forge_install_jar = os.path.join(directory, "forge_install.jar")
|
|
||||||
if not os.path.exists(directory):
|
|
||||||
os.mkdir(directory)
|
|
||||||
with open(forge_install_jar, 'wb') as f:
|
|
||||||
f.write(resp.content)
|
|
||||||
print(f"Installing Forge...")
|
|
||||||
install_process = Popen([java_exe, "-jar", forge_install_jar, "--installServer", directory])
|
|
||||||
install_process.wait()
|
|
||||||
os.remove(forge_install_jar)
|
|
||||||
|
|
||||||
|
|
||||||
def run_forge_server(forge_dir: str, java_version: str, heap_arg: str) -> Popen:
|
|
||||||
"""Run the Forge server."""
|
|
||||||
|
|
||||||
java_exe = find_jdk(java_version)
|
|
||||||
if not os.path.isfile(java_exe):
|
|
||||||
java_exe = "java" # try to fall back on java in the PATH
|
|
||||||
|
|
||||||
heap_arg = max_heap_re.match(heap_arg).group()
|
|
||||||
if heap_arg[-1] in ['b', 'B']:
|
|
||||||
heap_arg = heap_arg[:-1]
|
|
||||||
heap_arg = "-Xmx" + heap_arg
|
|
||||||
|
|
||||||
os_args = "win_args.txt" if is_windows else "unix_args.txt"
|
|
||||||
args_file = os.path.join(forge_dir, "libraries", "net", "minecraftforge", "forge", forge_version, os_args)
|
|
||||||
forge_args = []
|
|
||||||
with open(args_file) as argfile:
|
|
||||||
for line in argfile:
|
|
||||||
forge_args.extend(line.strip().split(" "))
|
|
||||||
|
|
||||||
args = [java_exe, heap_arg, *forge_args, "-nogui"]
|
|
||||||
logging.info(f"Running Forge server: {args}")
|
|
||||||
os.chdir(forge_dir)
|
|
||||||
return Popen(args)
|
|
||||||
|
|
||||||
|
|
||||||
def get_minecraft_versions(version, release_channel="release"):
|
|
||||||
version_file_endpoint = "https://raw.githubusercontent.com/KonoTyran/Minecraft_AP_Randomizer/master/versions/minecraft_versions.json"
|
|
||||||
resp = requests.get(version_file_endpoint)
|
|
||||||
local = False
|
|
||||||
if resp.status_code == 200: # OK
|
|
||||||
try:
|
|
||||||
data = resp.json()
|
|
||||||
except requests.exceptions.JSONDecodeError:
|
|
||||||
logging.warning(f"Unable to fetch version update file, using local version. (status code {resp.status_code}).")
|
|
||||||
local = True
|
|
||||||
else:
|
|
||||||
logging.warning(f"Unable to fetch version update file, using local version. (status code {resp.status_code}).")
|
|
||||||
local = True
|
|
||||||
|
|
||||||
if local:
|
|
||||||
with open(Utils.user_path("minecraft_versions.json"), 'r') as f:
|
|
||||||
data = json.load(f)
|
|
||||||
else:
|
|
||||||
with open(Utils.user_path("minecraft_versions.json"), 'w') as f:
|
|
||||||
json.dump(data, f)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if version:
|
|
||||||
return next(filter(lambda entry: entry["version"] == version, data[release_channel]))
|
|
||||||
else:
|
|
||||||
return resp.json()[release_channel][0]
|
|
||||||
except (StopIteration, KeyError):
|
|
||||||
logging.error(f"No compatible mod version found for client version {version} on \"{release_channel}\" channel.")
|
|
||||||
if release_channel != "release":
|
|
||||||
logging.error("Consider switching \"release_channel\" to \"release\" in your Host.yaml file")
|
|
||||||
else:
|
|
||||||
logging.error("No suitable mod found on the \"release\" channel. Please Contact us on discord to report this error.")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
def is_correct_forge(forge_dir) -> bool:
|
|
||||||
if os.path.isdir(os.path.join(forge_dir, "libraries", "net", "minecraftforge", "forge", forge_version)):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
Utils.init_logging("MinecraftClient")
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument("apmc_file", default=None, nargs='?', help="Path to an Archipelago Minecraft data file (.apmc)")
|
|
||||||
parser.add_argument('--install', '-i', dest='install', default=False, action='store_true',
|
|
||||||
help="Download and install Java and the Forge server. Does not launch the client afterwards.")
|
|
||||||
parser.add_argument('--release_channel', '-r', dest="channel", type=str, action='store',
|
|
||||||
help="Specify release channel to use.")
|
|
||||||
parser.add_argument('--java', '-j', metavar='17', dest='java', type=str, default=False, action='store',
|
|
||||||
help="specify java version.")
|
|
||||||
parser.add_argument('--forge', '-f', metavar='1.18.2-40.1.0', dest='forge', type=str, default=False, action='store',
|
|
||||||
help="specify forge version. (Minecraft Version-Forge Version)")
|
|
||||||
parser.add_argument('--version', '-v', metavar='9', dest='data_version', type=int, action='store',
|
|
||||||
help="specify Mod data version to download.")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
apmc_file = os.path.abspath(args.apmc_file) if args.apmc_file else None
|
|
||||||
|
|
||||||
# Change to executable's working directory
|
|
||||||
os.chdir(os.path.abspath(os.path.dirname(sys.argv[0])))
|
|
||||||
|
|
||||||
options = get_settings().minecraft_options
|
|
||||||
channel = args.channel or options.release_channel
|
|
||||||
apmc_data = None
|
|
||||||
data_version = args.data_version or None
|
|
||||||
|
|
||||||
if apmc_file is None and not args.install:
|
|
||||||
apmc_file = Utils.open_filename('Select APMC file', (('APMC File', ('.apmc',)),))
|
|
||||||
|
|
||||||
if apmc_file is not None and data_version is None:
|
|
||||||
apmc_data = read_apmc_file(apmc_file)
|
|
||||||
data_version = apmc_data.get('client_version', '')
|
|
||||||
|
|
||||||
versions = get_minecraft_versions(data_version, channel)
|
|
||||||
|
|
||||||
forge_dir = options.forge_directory
|
|
||||||
max_heap = options.max_heap_size
|
|
||||||
forge_version = args.forge or versions["forge"]
|
|
||||||
java_version = args.java or versions["java"]
|
|
||||||
mod_url = versions["url"]
|
|
||||||
java_dir = find_jdk_dir(java_version)
|
|
||||||
|
|
||||||
if args.install:
|
|
||||||
if is_windows:
|
|
||||||
print("Installing Java")
|
|
||||||
download_java(java_version)
|
|
||||||
if not is_correct_forge(forge_dir):
|
|
||||||
print("Installing Minecraft Forge")
|
|
||||||
install_forge(forge_dir, forge_version, java_version)
|
|
||||||
else:
|
|
||||||
print("Correct Forge version already found, skipping install.")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
if apmc_data is None:
|
|
||||||
raise FileNotFoundError(f"APMC file does not exist or is inaccessible at the given location ({apmc_file})")
|
|
||||||
|
|
||||||
if is_windows:
|
|
||||||
if java_dir is None or not os.path.isdir(java_dir):
|
|
||||||
if prompt_yes_no("Did not find java directory. Download and install java now?"):
|
|
||||||
download_java(java_version)
|
|
||||||
java_dir = find_jdk_dir(java_version)
|
|
||||||
if java_dir is None or not os.path.isdir(java_dir):
|
|
||||||
raise NotADirectoryError(f"Path {java_dir} does not exist or could not be accessed.")
|
|
||||||
|
|
||||||
if not is_correct_forge(forge_dir):
|
|
||||||
if prompt_yes_no(f"Did not find forge version {forge_version} download and install it now?"):
|
|
||||||
install_forge(forge_dir, forge_version, java_version)
|
|
||||||
if not os.path.isdir(forge_dir):
|
|
||||||
raise NotADirectoryError(f"Path {forge_dir} does not exist or could not be accessed.")
|
|
||||||
|
|
||||||
if not max_heap_re.match(max_heap):
|
|
||||||
raise Exception(f"Max heap size {max_heap} in incorrect format. Use a number followed by M or G, e.g. 512M or 2G.")
|
|
||||||
|
|
||||||
update_mod(forge_dir, mod_url)
|
|
||||||
replace_apmc_files(forge_dir, apmc_file)
|
|
||||||
check_eula(forge_dir)
|
|
||||||
server_process = run_forge_server(forge_dir, java_version, max_heap)
|
|
||||||
server_process.wait()
|
|
||||||
@@ -5,18 +5,22 @@ import multiprocessing
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 11):
|
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 9):
|
||||||
# Official micro version updates. This should match the number in docs/running from source.md.
|
# Official micro version updates. This should match the number in docs/running from source.md.
|
||||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.10.15+ is supported.")
|
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.11.9+ is supported.")
|
||||||
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 15):
|
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 13):
|
||||||
# There are known security issues, but no easy way to install fixed versions on Windows for testing.
|
# There are known security issues, but no easy way to install fixed versions on Windows for testing.
|
||||||
warnings.warn(f"Python Version {sys.version_info} has security issues. Don't use in production.")
|
warnings.warn(f"Python Version {sys.version_info} has security issues. Don't use in production.")
|
||||||
elif sys.version_info < (3, 10, 1):
|
elif sys.version_info < (3, 11, 0):
|
||||||
# Other platforms may get security backports instead of micro updates, so the number is unreliable.
|
# Other platforms may get security backports instead of micro updates, so the number is unreliable.
|
||||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.10.1+ is supported.")
|
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.11.0+ is supported.")
|
||||||
|
|
||||||
# don't run update if environment is frozen/compiled or if not the parent process (skip in subprocess)
|
# don't run update if environment is frozen/compiled or if not the parent process (skip in subprocess)
|
||||||
_skip_update = bool(getattr(sys, "frozen", False) or multiprocessing.parent_process())
|
_skip_update = bool(
|
||||||
|
getattr(sys, "frozen", False) or
|
||||||
|
multiprocessing.parent_process() or
|
||||||
|
os.environ.get("SKIP_REQUIREMENTS_UPDATE", "").lower() in ("1", "true", "yes")
|
||||||
|
)
|
||||||
update_ran = _skip_update
|
update_ran = _skip_update
|
||||||
|
|
||||||
|
|
||||||
@@ -70,11 +74,11 @@ def update_command():
|
|||||||
def install_pkg_resources(yes=False):
|
def install_pkg_resources(yes=False):
|
||||||
try:
|
try:
|
||||||
import pkg_resources # noqa: F401
|
import pkg_resources # noqa: F401
|
||||||
except ImportError:
|
except (AttributeError, ImportError):
|
||||||
check_pip()
|
check_pip()
|
||||||
if not yes:
|
if not yes:
|
||||||
confirm("pkg_resources not found, press enter to install it")
|
confirm("pkg_resources not found, press enter to install it")
|
||||||
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools"])
|
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools>=75,<81"])
|
||||||
|
|
||||||
|
|
||||||
def update(yes: bool = False, force: bool = False) -> None:
|
def update(yes: bool = False, force: bool = False) -> None:
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ import NetUtils
|
|||||||
import Utils
|
import Utils
|
||||||
from Utils import version_tuple, restricted_loads, Version, async_start, get_intended_text
|
from Utils import version_tuple, restricted_loads, Version, async_start, get_intended_text
|
||||||
from NetUtils import Endpoint, ClientStatus, NetworkItem, decode, encode, NetworkPlayer, Permission, NetworkSlot, \
|
from NetUtils import Endpoint, ClientStatus, NetworkItem, decode, encode, NetworkPlayer, Permission, NetworkSlot, \
|
||||||
SlotType, LocationStore, Hint, HintStatus
|
SlotType, LocationStore, MultiData, Hint, HintStatus
|
||||||
from BaseClasses import ItemClassification
|
from BaseClasses import ItemClassification
|
||||||
|
|
||||||
|
|
||||||
@@ -445,7 +445,7 @@ class Context:
|
|||||||
raise Utils.VersionException("Incompatible multidata.")
|
raise Utils.VersionException("Incompatible multidata.")
|
||||||
return restricted_loads(zlib.decompress(data[1:]))
|
return restricted_loads(zlib.decompress(data[1:]))
|
||||||
|
|
||||||
def _load(self, decoded_obj: dict, game_data_packages: typing.Dict[str, typing.Any],
|
def _load(self, decoded_obj: MultiData, game_data_packages: typing.Dict[str, typing.Any],
|
||||||
use_embedded_server_options: bool):
|
use_embedded_server_options: bool):
|
||||||
|
|
||||||
self.read_data = {}
|
self.read_data = {}
|
||||||
@@ -546,6 +546,7 @@ class Context:
|
|||||||
|
|
||||||
def _save(self, exit_save: bool = False) -> bool:
|
def _save(self, exit_save: bool = False) -> bool:
|
||||||
try:
|
try:
|
||||||
|
# Does not use Utils.restricted_dumps because we'd rather make a save than not make one
|
||||||
encoded_save = pickle.dumps(self.get_save())
|
encoded_save = pickle.dumps(self.get_save())
|
||||||
with open(self.save_filename, "wb") as f:
|
with open(self.save_filename, "wb") as f:
|
||||||
f.write(zlib.compress(encoded_save))
|
f.write(zlib.compress(encoded_save))
|
||||||
@@ -752,7 +753,7 @@ class Context:
|
|||||||
return self.player_names[team, slot]
|
return self.player_names[team, slot]
|
||||||
|
|
||||||
def notify_hints(self, team: int, hints: typing.List[Hint], only_new: bool = False,
|
def notify_hints(self, team: int, hints: typing.List[Hint], only_new: bool = False,
|
||||||
recipients: typing.Sequence[int] = None):
|
persist_even_if_found: bool = False, recipients: typing.Sequence[int] = None):
|
||||||
"""Send and remember hints."""
|
"""Send and remember hints."""
|
||||||
if only_new:
|
if only_new:
|
||||||
hints = [hint for hint in hints if hint not in self.hints[team, hint.finding_player]]
|
hints = [hint for hint in hints if hint not in self.hints[team, hint.finding_player]]
|
||||||
@@ -767,8 +768,9 @@ class Context:
|
|||||||
if not hint.local and data not in concerns[hint.finding_player]:
|
if not hint.local and data not in concerns[hint.finding_player]:
|
||||||
concerns[hint.finding_player].append(data)
|
concerns[hint.finding_player].append(data)
|
||||||
|
|
||||||
# only remember hints that were not already found at the time of creation
|
# For !hint use cases, only hints that were not already found at the time of creation should be remembered
|
||||||
if not hint.found:
|
# For LocationScouts use-cases, all hints should be remembered
|
||||||
|
if not hint.found or persist_even_if_found:
|
||||||
# since hints are bidirectional, finding player and receiving player,
|
# since hints are bidirectional, finding player and receiving player,
|
||||||
# we can check once if hint already exists
|
# we can check once if hint already exists
|
||||||
if hint not in self.hints[team, hint.finding_player]:
|
if hint not in self.hints[team, hint.finding_player]:
|
||||||
@@ -1946,10 +1948,52 @@ async def process_client_cmd(ctx: Context, client: Client, args: dict):
|
|||||||
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location,
|
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location,
|
||||||
HintStatus.HINT_UNSPECIFIED))
|
HintStatus.HINT_UNSPECIFIED))
|
||||||
locs.append(NetworkItem(target_item, location, target_player, flags))
|
locs.append(NetworkItem(target_item, location, target_player, flags))
|
||||||
ctx.notify_hints(client.team, hints, only_new=create_as_hint == 2)
|
ctx.notify_hints(client.team, hints, only_new=create_as_hint == 2, persist_even_if_found=True)
|
||||||
if locs and create_as_hint:
|
if locs and create_as_hint:
|
||||||
ctx.save()
|
ctx.save()
|
||||||
await ctx.send_msgs(client, [{'cmd': 'LocationInfo', 'locations': locs}])
|
await ctx.send_msgs(client, [{'cmd': 'LocationInfo', 'locations': locs}])
|
||||||
|
|
||||||
|
elif cmd == 'CreateHints':
|
||||||
|
location_player = args.get("player", client.slot)
|
||||||
|
locations = args["locations"]
|
||||||
|
status = args.get("status", HintStatus.HINT_UNSPECIFIED)
|
||||||
|
|
||||||
|
if not locations:
|
||||||
|
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
|
"text": "CreateHints: No locations specified.", "original_cmd": cmd}])
|
||||||
|
|
||||||
|
hints = []
|
||||||
|
|
||||||
|
for location in locations:
|
||||||
|
if location_player != client.slot and location not in ctx.locations[location_player]:
|
||||||
|
error_text = (
|
||||||
|
"CreateHints: One or more of the locations do not exist for the specified off-world player. "
|
||||||
|
"Please refrain from hinting other slot's locations that you don't know contain your items."
|
||||||
|
)
|
||||||
|
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
|
"text": error_text, "original_cmd": cmd}])
|
||||||
|
return
|
||||||
|
|
||||||
|
target_item, item_player, flags = ctx.locations[location_player][location]
|
||||||
|
|
||||||
|
if client.slot not in ctx.slot_set(item_player):
|
||||||
|
if status != HintStatus.HINT_UNSPECIFIED:
|
||||||
|
error_text = 'CreateHints: Must use "unspecified"/None status for items from other players.'
|
||||||
|
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
|
"text": error_text, "original_cmd": cmd}])
|
||||||
|
return
|
||||||
|
|
||||||
|
if client.slot != location_player:
|
||||||
|
error_text = "CreateHints: Can only create hints for own items or own locations."
|
||||||
|
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
|
"text": error_text, "original_cmd": cmd}])
|
||||||
|
return
|
||||||
|
|
||||||
|
hints += collect_hint_location_id(ctx, client.team, location_player, location, status)
|
||||||
|
|
||||||
|
# As of writing this code, only_new=True does not update status for existing hints
|
||||||
|
ctx.notify_hints(client.team, hints, only_new=True, persist_even_if_found=True)
|
||||||
|
ctx.save()
|
||||||
|
|
||||||
elif cmd == 'UpdateHint':
|
elif cmd == 'UpdateHint':
|
||||||
location = args["location"]
|
location = args["location"]
|
||||||
|
|||||||
60
NetUtils.py
60
NetUtils.py
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping, Sequence
|
||||||
import typing
|
import typing
|
||||||
import enum
|
import enum
|
||||||
import warnings
|
import warnings
|
||||||
@@ -83,7 +84,7 @@ class NetworkSlot(typing.NamedTuple):
|
|||||||
name: str
|
name: str
|
||||||
game: str
|
game: str
|
||||||
type: SlotType
|
type: SlotType
|
||||||
group_members: typing.Union[typing.List[int], typing.Tuple] = () # only populated if type == group
|
group_members: Sequence[int] = () # only populated if type == group
|
||||||
|
|
||||||
|
|
||||||
class NetworkItem(typing.NamedTuple):
|
class NetworkItem(typing.NamedTuple):
|
||||||
@@ -106,6 +107,27 @@ def _scan_for_TypedTuples(obj: typing.Any) -> typing.Any:
|
|||||||
return obj
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
_base_types = str | int | bool | float | None | tuple["_base_types", ...] | dict["_base_types", "base_types"]
|
||||||
|
|
||||||
|
|
||||||
|
def convert_to_base_types(obj: typing.Any) -> _base_types:
|
||||||
|
if isinstance(obj, (tuple, list, set, frozenset)):
|
||||||
|
return tuple(convert_to_base_types(o) for o in obj)
|
||||||
|
elif isinstance(obj, dict):
|
||||||
|
return {convert_to_base_types(key): convert_to_base_types(value) for key, value in obj.items()}
|
||||||
|
elif obj is None or type(obj) in (str, int, float, bool):
|
||||||
|
return obj
|
||||||
|
# unwrap simple types to their base, such as StrEnum
|
||||||
|
elif isinstance(obj, str):
|
||||||
|
return str(obj)
|
||||||
|
elif isinstance(obj, int):
|
||||||
|
return int(obj)
|
||||||
|
elif isinstance(obj, float):
|
||||||
|
return float(obj)
|
||||||
|
else:
|
||||||
|
raise Exception(f"Cannot handle {type(obj)}")
|
||||||
|
|
||||||
|
|
||||||
_encode = JSONEncoder(
|
_encode = JSONEncoder(
|
||||||
ensure_ascii=False,
|
ensure_ascii=False,
|
||||||
check_circular=False,
|
check_circular=False,
|
||||||
@@ -450,6 +472,42 @@ class _LocationStore(dict, typing.MutableMapping[int, typing.Dict[int, typing.Tu
|
|||||||
location_id not in checked])
|
location_id not in checked])
|
||||||
|
|
||||||
|
|
||||||
|
class MinimumVersions(typing.TypedDict):
|
||||||
|
server: tuple[int, int, int]
|
||||||
|
clients: dict[int, tuple[int, int, int]]
|
||||||
|
|
||||||
|
|
||||||
|
class GamesPackage(typing.TypedDict, total=False):
|
||||||
|
item_name_groups: dict[str, list[str]]
|
||||||
|
item_name_to_id: dict[str, int]
|
||||||
|
location_name_groups: dict[str, list[str]]
|
||||||
|
location_name_to_id: dict[str, int]
|
||||||
|
checksum: str
|
||||||
|
|
||||||
|
|
||||||
|
class DataPackage(typing.TypedDict):
|
||||||
|
games: dict[str, GamesPackage]
|
||||||
|
|
||||||
|
|
||||||
|
class MultiData(typing.TypedDict):
|
||||||
|
slot_data: dict[int, Mapping[str, typing.Any]]
|
||||||
|
slot_info: dict[int, NetworkSlot]
|
||||||
|
connect_names: dict[str, tuple[int, int]]
|
||||||
|
locations: dict[int, dict[int, tuple[int, int, int]]]
|
||||||
|
checks_in_area: dict[int, dict[str, int | list[int]]]
|
||||||
|
server_options: dict[str, object]
|
||||||
|
er_hint_data: dict[int, dict[int, str]]
|
||||||
|
precollected_items: dict[int, list[int]]
|
||||||
|
precollected_hints: dict[int, set[Hint]]
|
||||||
|
version: tuple[int, int, int]
|
||||||
|
tags: list[str]
|
||||||
|
minimum_versions: MinimumVersions
|
||||||
|
seed_name: str
|
||||||
|
spheres: list[dict[int, set[int]]]
|
||||||
|
datapackage: dict[str, GamesPackage]
|
||||||
|
race_mode: int
|
||||||
|
|
||||||
|
|
||||||
if typing.TYPE_CHECKING: # type-check with pure python implementation until we have a typing stub
|
if typing.TYPE_CHECKING: # type-check with pure python implementation until we have a typing stub
|
||||||
LocationStore = _LocationStore
|
LocationStore = _LocationStore
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -277,6 +277,7 @@ async def n64_sync_task(ctx: OoTContext):
|
|||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
logger.debug("Connection Refused, Trying Again")
|
logger.debug("Connection Refused, Trying Again")
|
||||||
ctx.n64_status = CONNECTION_REFUSED_STATUS
|
ctx.n64_status = CONNECTION_REFUSED_STATUS
|
||||||
|
await asyncio.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
41
Options.py
41
Options.py
@@ -494,6 +494,30 @@ class Choice(NumericOption):
|
|||||||
else:
|
else:
|
||||||
raise TypeError(f"Can't compare {self.__class__.__name__} with {other.__class__.__name__}")
|
raise TypeError(f"Can't compare {self.__class__.__name__} with {other.__class__.__name__}")
|
||||||
|
|
||||||
|
def __lt__(self, other: typing.Union[Choice, int, str]):
|
||||||
|
if isinstance(other, str):
|
||||||
|
assert other in self.options, f"compared against an unknown string. {self} < {other}"
|
||||||
|
other = self.options[other]
|
||||||
|
return super(Choice, self).__lt__(other)
|
||||||
|
|
||||||
|
def __gt__(self, other: typing.Union[Choice, int, str]):
|
||||||
|
if isinstance(other, str):
|
||||||
|
assert other in self.options, f"compared against an unknown string. {self} > {other}"
|
||||||
|
other = self.options[other]
|
||||||
|
return super(Choice, self).__gt__(other)
|
||||||
|
|
||||||
|
def __le__(self, other: typing.Union[Choice, int, str]):
|
||||||
|
if isinstance(other, str):
|
||||||
|
assert other in self.options, f"compared against an unknown string. {self} <= {other}"
|
||||||
|
other = self.options[other]
|
||||||
|
return super(Choice, self).__le__(other)
|
||||||
|
|
||||||
|
def __ge__(self, other: typing.Union[Choice, int, str]):
|
||||||
|
if isinstance(other, str):
|
||||||
|
assert other in self.options, f"compared against an unknown string. {self} >= {other}"
|
||||||
|
other = self.options[other]
|
||||||
|
return super(Choice, self).__ge__(other)
|
||||||
|
|
||||||
__hash__ = Option.__hash__ # see https://docs.python.org/3/reference/datamodel.html#object.__hash__
|
__hash__ = Option.__hash__ # see https://docs.python.org/3/reference/datamodel.html#object.__hash__
|
||||||
|
|
||||||
|
|
||||||
@@ -865,13 +889,13 @@ class OptionDict(Option[typing.Dict[str, typing.Any]], VerifyKeys, typing.Mappin
|
|||||||
return ", ".join(f"{key}: {v}" for key, v in value.items())
|
return ", ".join(f"{key}: {v}" for key, v in value.items())
|
||||||
|
|
||||||
def __getitem__(self, item: str) -> typing.Any:
|
def __getitem__(self, item: str) -> typing.Any:
|
||||||
return self.value.__getitem__(item)
|
return self.value[item]
|
||||||
|
|
||||||
def __iter__(self) -> typing.Iterator[str]:
|
def __iter__(self) -> typing.Iterator[str]:
|
||||||
return self.value.__iter__()
|
return iter(self.value)
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return self.value.__len__()
|
return len(self.value)
|
||||||
|
|
||||||
# __getitem__ fallback fails for Counters, so we define this explicitly
|
# __getitem__ fallback fails for Counters, so we define this explicitly
|
||||||
def __contains__(self, item) -> bool:
|
def __contains__(self, item) -> bool:
|
||||||
@@ -1067,10 +1091,10 @@ class PlandoTexts(Option[typing.List[PlandoText]], VerifyKeys):
|
|||||||
yield from self.value
|
yield from self.value
|
||||||
|
|
||||||
def __getitem__(self, index: typing.SupportsIndex) -> PlandoText:
|
def __getitem__(self, index: typing.SupportsIndex) -> PlandoText:
|
||||||
return self.value.__getitem__(index)
|
return self.value[index]
|
||||||
|
|
||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return self.value.__len__()
|
return len(self.value)
|
||||||
|
|
||||||
|
|
||||||
class ConnectionsMeta(AssembleOptions):
|
class ConnectionsMeta(AssembleOptions):
|
||||||
@@ -1094,7 +1118,7 @@ class PlandoConnection(typing.NamedTuple):
|
|||||||
|
|
||||||
entrance: str
|
entrance: str
|
||||||
exit: str
|
exit: str
|
||||||
direction: typing.Literal["entrance", "exit", "both"] # TODO: convert Direction to StrEnum once 3.8 is dropped
|
direction: typing.Literal["entrance", "exit", "both"] # TODO: convert Direction to StrEnum once 3.10 is dropped
|
||||||
percentage: int = 100
|
percentage: int = 100
|
||||||
|
|
||||||
|
|
||||||
@@ -1217,7 +1241,7 @@ class PlandoConnections(Option[typing.List[PlandoConnection]], metaclass=Connect
|
|||||||
connection.exit) for connection in value])
|
connection.exit) for connection in value])
|
||||||
|
|
||||||
def __getitem__(self, index: typing.SupportsIndex) -> PlandoConnection:
|
def __getitem__(self, index: typing.SupportsIndex) -> PlandoConnection:
|
||||||
return self.value.__getitem__(index)
|
return self.value[index]
|
||||||
|
|
||||||
def __iter__(self) -> typing.Iterator[PlandoConnection]:
|
def __iter__(self) -> typing.Iterator[PlandoConnection]:
|
||||||
yield from self.value
|
yield from self.value
|
||||||
@@ -1315,6 +1339,7 @@ class CommonOptions(metaclass=OptionsMetaProperty):
|
|||||||
will be returned as a sorted list.
|
will be returned as a sorted list.
|
||||||
"""
|
"""
|
||||||
assert option_names, "options.as_dict() was used without any option names."
|
assert option_names, "options.as_dict() was used without any option names."
|
||||||
|
assert len(option_names) < len(self.__class__.type_hints), "Specify only options you need."
|
||||||
option_results = {}
|
option_results = {}
|
||||||
for option_name in option_names:
|
for option_name in option_names:
|
||||||
if option_name not in type(self).type_hints:
|
if option_name not in type(self).type_hints:
|
||||||
@@ -1643,7 +1668,7 @@ class OptionGroup(typing.NamedTuple):
|
|||||||
|
|
||||||
|
|
||||||
item_and_loc_options = [LocalItems, NonLocalItems, StartInventory, StartInventoryPool, StartHints,
|
item_and_loc_options = [LocalItems, NonLocalItems, StartInventory, StartInventoryPool, StartHints,
|
||||||
StartLocationHints, ExcludeLocations, PriorityLocations, ItemLinks]
|
StartLocationHints, ExcludeLocations, PriorityLocations, ItemLinks, PlandoItems]
|
||||||
"""
|
"""
|
||||||
Options that are always populated in "Item & Location Options" Option Group. Cannot be moved to another group.
|
Options that are always populated in "Item & Location Options" Option Group. Cannot be moved to another group.
|
||||||
If desired, a custom "Item & Location Options" Option Group can be defined, but only for adding additional options to
|
If desired, a custom "Item & Location Options" Option Group can be defined, but only for adding additional options to
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ Currently, the following games are supported:
|
|||||||
|
|
||||||
* The Legend of Zelda: A Link to the Past
|
* The Legend of Zelda: A Link to the Past
|
||||||
* Factorio
|
* Factorio
|
||||||
* Minecraft
|
|
||||||
* Subnautica
|
* Subnautica
|
||||||
* Risk of Rain 2
|
* Risk of Rain 2
|
||||||
* The Legend of Zelda: Ocarina of Time
|
* The Legend of Zelda: Ocarina of Time
|
||||||
@@ -15,7 +14,6 @@ Currently, the following games are supported:
|
|||||||
* Super Metroid
|
* Super Metroid
|
||||||
* Secret of Evermore
|
* Secret of Evermore
|
||||||
* Final Fantasy
|
* Final Fantasy
|
||||||
* Rogue Legacy
|
|
||||||
* VVVVVV
|
* VVVVVV
|
||||||
* Raft
|
* Raft
|
||||||
* Super Mario 64
|
* Super Mario 64
|
||||||
@@ -42,7 +40,6 @@ Currently, the following games are supported:
|
|||||||
* The Messenger
|
* The Messenger
|
||||||
* Kingdom Hearts 2
|
* Kingdom Hearts 2
|
||||||
* The Legend of Zelda: Link's Awakening DX
|
* The Legend of Zelda: Link's Awakening DX
|
||||||
* Clique
|
|
||||||
* Adventure
|
* Adventure
|
||||||
* DLC Quest
|
* DLC Quest
|
||||||
* Noita
|
* Noita
|
||||||
@@ -83,6 +80,7 @@ Currently, the following games are supported:
|
|||||||
* Jak and Daxter: The Precursor Legacy
|
* Jak and Daxter: The Precursor Legacy
|
||||||
* Super Mario Land 2: 6 Golden Coins
|
* Super Mario Land 2: 6 Golden Coins
|
||||||
* shapez
|
* shapez
|
||||||
|
* Paint
|
||||||
|
|
||||||
For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
|
For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
|
||||||
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
|
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from json import loads, dumps
|
|||||||
from CommonClient import CommonContext, server_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
from CommonClient import CommonContext, server_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
|
from settings import Settings
|
||||||
from Utils import async_start
|
from Utils import async_start
|
||||||
from MultiServer import mark_raw
|
from MultiServer import mark_raw
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@@ -285,7 +286,7 @@ class SNESState(enum.IntEnum):
|
|||||||
|
|
||||||
|
|
||||||
def launch_sni() -> None:
|
def launch_sni() -> None:
|
||||||
sni_path = Utils.get_settings()["sni_options"]["sni_path"]
|
sni_path = Settings.sni_options.sni_path
|
||||||
|
|
||||||
if not os.path.isdir(sni_path):
|
if not os.path.isdir(sni_path):
|
||||||
sni_path = Utils.local_path(sni_path)
|
sni_path = Utils.local_path(sni_path)
|
||||||
@@ -668,8 +669,7 @@ async def game_watcher(ctx: SNIContext) -> None:
|
|||||||
|
|
||||||
|
|
||||||
async def run_game(romfile: str) -> None:
|
async def run_game(romfile: str) -> None:
|
||||||
auto_start = typing.cast(typing.Union[bool, str],
|
auto_start = Settings.sni_options.snes_rom_start
|
||||||
Utils.get_settings()["sni_options"].get("snes_rom_start", True))
|
|
||||||
if auto_start is True:
|
if auto_start is True:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(romfile)
|
webbrowser.open(romfile)
|
||||||
|
|||||||
47
Utils.py
47
Utils.py
@@ -47,7 +47,7 @@ class Version(typing.NamedTuple):
|
|||||||
return ".".join(str(item) for item in self)
|
return ".".join(str(item) for item in self)
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.6.2"
|
__version__ = "0.6.4"
|
||||||
version_tuple = tuplize_version(__version__)
|
version_tuple = tuplize_version(__version__)
|
||||||
|
|
||||||
is_linux = sys.platform.startswith("linux")
|
is_linux = sys.platform.startswith("linux")
|
||||||
@@ -166,6 +166,10 @@ def home_path(*path: str) -> str:
|
|||||||
os.symlink(home_path.cached_path, legacy_home_path)
|
os.symlink(home_path.cached_path, legacy_home_path)
|
||||||
else:
|
else:
|
||||||
os.makedirs(home_path.cached_path, 0o700, exist_ok=True)
|
os.makedirs(home_path.cached_path, 0o700, exist_ok=True)
|
||||||
|
elif sys.platform == 'darwin':
|
||||||
|
import platformdirs
|
||||||
|
home_path.cached_path = platformdirs.user_data_dir("Archipelago", False)
|
||||||
|
os.makedirs(home_path.cached_path, 0o700, exist_ok=True)
|
||||||
else:
|
else:
|
||||||
# not implemented
|
# not implemented
|
||||||
home_path.cached_path = local_path() # this will generate the same exceptions we got previously
|
home_path.cached_path = local_path() # this will generate the same exceptions we got previously
|
||||||
@@ -177,7 +181,7 @@ def user_path(*path: str) -> str:
|
|||||||
"""Returns either local_path or home_path based on write permissions."""
|
"""Returns either local_path or home_path based on write permissions."""
|
||||||
if hasattr(user_path, "cached_path"):
|
if hasattr(user_path, "cached_path"):
|
||||||
pass
|
pass
|
||||||
elif os.access(local_path(), os.W_OK):
|
elif os.access(local_path(), os.W_OK) and not (is_macos and is_frozen()):
|
||||||
user_path.cached_path = local_path()
|
user_path.cached_path = local_path()
|
||||||
else:
|
else:
|
||||||
user_path.cached_path = home_path()
|
user_path.cached_path = home_path()
|
||||||
@@ -409,13 +413,26 @@ def get_adjuster_settings(game_name: str) -> Namespace:
|
|||||||
|
|
||||||
@cache_argsless
|
@cache_argsless
|
||||||
def get_unique_identifier():
|
def get_unique_identifier():
|
||||||
uuid = persistent_load().get("client", {}).get("uuid", None)
|
common_path = cache_path("common.json")
|
||||||
|
try:
|
||||||
|
with open(common_path) as f:
|
||||||
|
common_file = json.load(f)
|
||||||
|
uuid = common_file.get("uuid", None)
|
||||||
|
except FileNotFoundError:
|
||||||
|
common_file = {}
|
||||||
|
uuid = None
|
||||||
|
|
||||||
if uuid:
|
if uuid:
|
||||||
return uuid
|
return uuid
|
||||||
|
|
||||||
import uuid
|
from uuid import uuid4
|
||||||
uuid = uuid.getnode()
|
uuid = str(uuid4())
|
||||||
persistent_store("client", "uuid", uuid)
|
common_file["uuid"] = uuid
|
||||||
|
|
||||||
|
cache_folder = os.path.dirname(common_path)
|
||||||
|
os.makedirs(cache_folder, exist_ok=True)
|
||||||
|
with open(common_path, "w") as f:
|
||||||
|
json.dump(common_file, f, separators=(",", ":"))
|
||||||
return uuid
|
return uuid
|
||||||
|
|
||||||
|
|
||||||
@@ -438,6 +455,7 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
if module == "builtins" and name in safe_builtins:
|
if module == "builtins" and name in safe_builtins:
|
||||||
return getattr(builtins, name)
|
return getattr(builtins, name)
|
||||||
# used by OptionCounter
|
# used by OptionCounter
|
||||||
|
# necessary because the actual Options class instances are pickled when transfered to WebHost generation pool
|
||||||
if module == "collections" and name == "Counter":
|
if module == "collections" and name == "Counter":
|
||||||
return collections.Counter
|
return collections.Counter
|
||||||
# used by MultiServer -> savegame/multidata
|
# used by MultiServer -> savegame/multidata
|
||||||
@@ -468,6 +486,18 @@ def restricted_loads(s: bytes) -> Any:
|
|||||||
return RestrictedUnpickler(io.BytesIO(s)).load()
|
return RestrictedUnpickler(io.BytesIO(s)).load()
|
||||||
|
|
||||||
|
|
||||||
|
def restricted_dumps(obj: Any) -> bytes:
|
||||||
|
"""Helper function analogous to pickle.dumps()."""
|
||||||
|
s = pickle.dumps(obj)
|
||||||
|
# Assert that the string can be successfully loaded by restricted_loads
|
||||||
|
try:
|
||||||
|
restricted_loads(s)
|
||||||
|
except pickle.UnpicklingError as e:
|
||||||
|
raise pickle.PicklingError(e) from e
|
||||||
|
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
class ByValue:
|
class ByValue:
|
||||||
"""
|
"""
|
||||||
Mixin for enums to pickle value instead of name (restores pre-3.11 behavior). Use as left-most parent.
|
Mixin for enums to pickle value instead of name (restores pre-3.11 behavior). Use as left-most parent.
|
||||||
@@ -873,7 +903,7 @@ def async_start(co: Coroutine[None, None, typing.Any], name: Optional[str] = Non
|
|||||||
Use this to start a task when you don't keep a reference to it or immediately await it,
|
Use this to start a task when you don't keep a reference to it or immediately await it,
|
||||||
to prevent early garbage collection. "fire-and-forget"
|
to prevent early garbage collection. "fire-and-forget"
|
||||||
"""
|
"""
|
||||||
# https://docs.python.org/3.10/library/asyncio-task.html#asyncio.create_task
|
# https://docs.python.org/3.11/library/asyncio-task.html#asyncio.create_task
|
||||||
# Python docs:
|
# Python docs:
|
||||||
# ```
|
# ```
|
||||||
# Important: Save a reference to the result of [asyncio.create_task],
|
# Important: Save a reference to the result of [asyncio.create_task],
|
||||||
@@ -926,8 +956,7 @@ def _extend_freeze_support() -> None:
|
|||||||
# Handle the first process that MP will create
|
# Handle the first process that MP will create
|
||||||
if (
|
if (
|
||||||
len(sys.argv) >= 2 and sys.argv[-2] == '-c' and sys.argv[-1].startswith((
|
len(sys.argv) >= 2 and sys.argv[-2] == '-c' and sys.argv[-1].startswith((
|
||||||
'from multiprocessing.semaphore_tracker import main', # Py<3.8
|
'from multiprocessing.resource_tracker import main',
|
||||||
'from multiprocessing.resource_tracker import main', # Py>=3.8
|
|
||||||
'from multiprocessing.forkserver import main'
|
'from multiprocessing.forkserver import main'
|
||||||
)) and set(sys.argv[1:-2]) == set(_args_from_interpreter_flags())
|
)) and set(sys.argv[1:-2]) == set(_args_from_interpreter_flags())
|
||||||
):
|
):
|
||||||
|
|||||||
48
WebHost.py
48
WebHost.py
@@ -54,16 +54,15 @@ def get_app() -> "Flask":
|
|||||||
return app
|
return app
|
||||||
|
|
||||||
|
|
||||||
def create_ordered_tutorials_file() -> typing.List[typing.Dict[str, typing.Any]]:
|
def copy_tutorials_files_to_static() -> None:
|
||||||
import json
|
|
||||||
import shutil
|
import shutil
|
||||||
import zipfile
|
import zipfile
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
zfile: zipfile.ZipInfo
|
zfile: zipfile.ZipInfo
|
||||||
|
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
worlds = {}
|
worlds = {}
|
||||||
data = []
|
|
||||||
for game, world in AutoWorldRegister.world_types.items():
|
for game, world in AutoWorldRegister.world_types.items():
|
||||||
if hasattr(world.web, 'tutorials') and (not world.hidden or game == 'Archipelago'):
|
if hasattr(world.web, 'tutorials') and (not world.hidden or game == 'Archipelago'):
|
||||||
worlds[game] = world
|
worlds[game] = world
|
||||||
@@ -72,7 +71,7 @@ def create_ordered_tutorials_file() -> typing.List[typing.Dict[str, typing.Any]]
|
|||||||
shutil.rmtree(base_target_path, ignore_errors=True)
|
shutil.rmtree(base_target_path, ignore_errors=True)
|
||||||
for game, world in worlds.items():
|
for game, world in worlds.items():
|
||||||
# copy files from world's docs folder to the generated folder
|
# copy files from world's docs folder to the generated folder
|
||||||
target_path = os.path.join(base_target_path, get_file_safe_name(game))
|
target_path = os.path.join(base_target_path, secure_filename(game))
|
||||||
os.makedirs(target_path, exist_ok=True)
|
os.makedirs(target_path, exist_ok=True)
|
||||||
|
|
||||||
if world.zip_path:
|
if world.zip_path:
|
||||||
@@ -85,45 +84,14 @@ def create_ordered_tutorials_file() -> typing.List[typing.Dict[str, typing.Any]]
|
|||||||
for zfile in zf.infolist():
|
for zfile in zf.infolist():
|
||||||
if not zfile.is_dir() and "/docs/" in zfile.filename:
|
if not zfile.is_dir() and "/docs/" in zfile.filename:
|
||||||
zfile.filename = os.path.basename(zfile.filename)
|
zfile.filename = os.path.basename(zfile.filename)
|
||||||
zf.extract(zfile, target_path)
|
with open(os.path.join(target_path, secure_filename(zfile.filename)), "wb") as f:
|
||||||
|
f.write(zf.read(zfile))
|
||||||
else:
|
else:
|
||||||
source_path = Utils.local_path(os.path.dirname(world.__file__), "docs")
|
source_path = Utils.local_path(os.path.dirname(world.__file__), "docs")
|
||||||
files = os.listdir(source_path)
|
files = os.listdir(source_path)
|
||||||
for file in files:
|
for file in files:
|
||||||
shutil.copyfile(Utils.local_path(source_path, file), Utils.local_path(target_path, file))
|
shutil.copyfile(Utils.local_path(source_path, file),
|
||||||
|
Utils.local_path(target_path, secure_filename(file)))
|
||||||
# build a json tutorial dict per game
|
|
||||||
game_data = {'gameTitle': game, 'tutorials': []}
|
|
||||||
for tutorial in world.web.tutorials:
|
|
||||||
# build dict for the json file
|
|
||||||
current_tutorial = {
|
|
||||||
'name': tutorial.tutorial_name,
|
|
||||||
'description': tutorial.description,
|
|
||||||
'files': [{
|
|
||||||
'language': tutorial.language,
|
|
||||||
'filename': game + '/' + tutorial.file_name,
|
|
||||||
'link': f'{game}/{tutorial.link}',
|
|
||||||
'authors': tutorial.authors
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
# check if the name of the current guide exists already
|
|
||||||
for guide in game_data['tutorials']:
|
|
||||||
if guide and tutorial.tutorial_name == guide['name']:
|
|
||||||
guide['files'].append(current_tutorial['files'][0])
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
game_data['tutorials'].append(current_tutorial)
|
|
||||||
|
|
||||||
data.append(game_data)
|
|
||||||
with open(Utils.local_path("WebHostLib", "static", "generated", "tutorials.json"), 'w', encoding='utf-8-sig') as json_target:
|
|
||||||
generic_data = {}
|
|
||||||
for games in data:
|
|
||||||
if 'Archipelago' in games['gameTitle']:
|
|
||||||
generic_data = data.pop(data.index(games))
|
|
||||||
sorted_data = [generic_data] + Utils.title_sorted(data, key=lambda entry: entry["gameTitle"])
|
|
||||||
json.dump(sorted_data, json_target, indent=2, ensure_ascii=False)
|
|
||||||
return sorted_data
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
@@ -142,7 +110,7 @@ if __name__ == "__main__":
|
|||||||
logging.warning("Could not update LttP sprites.")
|
logging.warning("Could not update LttP sprites.")
|
||||||
app = get_app()
|
app = get_app()
|
||||||
create_options_files()
|
create_options_files()
|
||||||
create_ordered_tutorials_file()
|
copy_tutorials_files_to_static()
|
||||||
if app.config["SELFLAUNCH"]:
|
if app.config["SELFLAUNCH"]:
|
||||||
autohost(app.config)
|
autohost(app.config)
|
||||||
if app.config["SELFGEN"]:
|
if app.config["SELFGEN"]:
|
||||||
|
|||||||
@@ -61,30 +61,43 @@ cache = Cache()
|
|||||||
Compress(app)
|
Compress(app)
|
||||||
|
|
||||||
|
|
||||||
|
def to_python(value):
|
||||||
|
return uuid.UUID(bytes=base64.urlsafe_b64decode(value + '=='))
|
||||||
|
|
||||||
|
|
||||||
|
def to_url(value):
|
||||||
|
return base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
class B64UUIDConverter(BaseConverter):
|
class B64UUIDConverter(BaseConverter):
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value):
|
||||||
return uuid.UUID(bytes=base64.urlsafe_b64decode(value + '=='))
|
return to_python(value)
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value):
|
||||||
return base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
return to_url(value)
|
||||||
|
|
||||||
|
|
||||||
# short UUID
|
# short UUID
|
||||||
app.url_map.converters["suuid"] = B64UUIDConverter
|
app.url_map.converters["suuid"] = B64UUIDConverter
|
||||||
app.jinja_env.filters['suuid'] = lambda value: base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
app.jinja_env.filters["suuid"] = to_url
|
||||||
app.jinja_env.filters["title_sorted"] = title_sorted
|
app.jinja_env.filters["title_sorted"] = title_sorted
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register():
|
||||||
"""Import submodules, triggering their registering on flask routing.
|
"""Import submodules, triggering their registering on flask routing.
|
||||||
Note: initializes worlds subsystem."""
|
Note: initializes worlds subsystem."""
|
||||||
|
import importlib
|
||||||
|
|
||||||
|
from werkzeug.utils import find_modules
|
||||||
# has automatic patch integration
|
# has automatic patch integration
|
||||||
import worlds.Files
|
import worlds.Files
|
||||||
app.jinja_env.filters['is_applayercontainer'] = worlds.Files.is_ap_player_container
|
app.jinja_env.filters['is_applayercontainer'] = worlds.Files.is_ap_player_container
|
||||||
|
|
||||||
from WebHostLib.customserver import run_server_process
|
from WebHostLib.customserver import run_server_process
|
||||||
# to trigger app routing picking up on it
|
|
||||||
from . import tracker, upload, landing, check, generate, downloads, api, stats, misc, robots, options, session
|
|
||||||
|
|
||||||
|
for module in find_modules("WebHostLib", include_packages=True):
|
||||||
|
importlib.import_module(module)
|
||||||
|
|
||||||
|
from . import api
|
||||||
app.register_blueprint(api.api_endpoints)
|
app.register_blueprint(api.api_endpoints)
|
||||||
|
|||||||
@@ -11,5 +11,5 @@ api_endpoints = Blueprint('api', __name__, url_prefix="/api")
|
|||||||
def get_players(seed: Seed) -> List[Tuple[str, str]]:
|
def get_players(seed: Seed) -> List[Tuple[str, str]]:
|
||||||
return [(slot.player_name, slot.game) for slot in seed.slots.order_by(Slot.player_id)]
|
return [(slot.player_name, slot.game) for slot in seed.slots.order_by(Slot.player_id)]
|
||||||
|
|
||||||
|
# trigger endpoint registration
|
||||||
from . import datapackage, generate, room, user # trigger registration
|
from . import datapackage, generate, room, tracker, user
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import json
|
import json
|
||||||
import pickle
|
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
from flask import request, session, url_for
|
from flask import request, session, url_for
|
||||||
from markupsafe import Markup
|
from markupsafe import Markup
|
||||||
from pony.orm import commit
|
from pony.orm import commit
|
||||||
|
|
||||||
|
from Utils import restricted_dumps
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
from WebHostLib.check import get_yaml_data, roll_options
|
from WebHostLib.check import get_yaml_data, roll_options
|
||||||
from WebHostLib.generate import get_meta
|
from WebHostLib.generate import get_meta
|
||||||
@@ -56,7 +56,7 @@ def generate_api():
|
|||||||
"detail": results}, 400
|
"detail": results}, 400
|
||||||
else:
|
else:
|
||||||
gen = Generation(
|
gen = Generation(
|
||||||
options=pickle.dumps({name: vars(options) for name, options in gen_options.items()}),
|
options=restricted_dumps({name: vars(options) for name, options in gen_options.items()}),
|
||||||
# convert to json compatible
|
# convert to json compatible
|
||||||
meta=json.dumps(meta), state=STATE_QUEUED,
|
meta=json.dumps(meta), state=STATE_QUEUED,
|
||||||
owner=session["_id"])
|
owner=session["_id"])
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ from uuid import UUID
|
|||||||
|
|
||||||
from flask import abort, url_for
|
from flask import abort, url_for
|
||||||
|
|
||||||
|
from WebHostLib import to_url
|
||||||
import worlds.Files
|
import worlds.Files
|
||||||
from . import api_endpoints, get_players
|
from . import api_endpoints, get_players
|
||||||
from ..models import Room
|
from ..models import Room
|
||||||
@@ -33,7 +34,7 @@ def room_info(room_id: UUID) -> Dict[str, Any]:
|
|||||||
downloads.append(slot_download)
|
downloads.append(slot_download)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"tracker": room.tracker,
|
"tracker": to_url(room.tracker),
|
||||||
"players": get_players(room.seed),
|
"players": get_players(room.seed),
|
||||||
"last_port": room.last_port,
|
"last_port": room.last_port,
|
||||||
"last_activity": room.last_activity,
|
"last_activity": room.last_activity,
|
||||||
|
|||||||
230
WebHostLib/api/tracker.py
Normal file
230
WebHostLib/api/tracker.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, TypedDict
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from flask import abort
|
||||||
|
|
||||||
|
from NetUtils import ClientStatus, Hint, NetworkItem, SlotType
|
||||||
|
from WebHostLib import cache
|
||||||
|
from WebHostLib.api import api_endpoints
|
||||||
|
from WebHostLib.models import Room
|
||||||
|
from WebHostLib.tracker import TrackerData
|
||||||
|
|
||||||
|
|
||||||
|
@api_endpoints.route("/tracker/<suuid:tracker>")
|
||||||
|
@cache.memoize(timeout=60)
|
||||||
|
def tracker_data(tracker: UUID) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Outputs json data to <root_path>/api/tracker/<id of current session tracker>.
|
||||||
|
|
||||||
|
:param tracker: UUID of current session tracker.
|
||||||
|
|
||||||
|
:return: Tracking data for all players in the room. Typing and docstrings describe the format of each value.
|
||||||
|
"""
|
||||||
|
room: Room | None = Room.get(tracker=tracker)
|
||||||
|
if not room:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
tracker_data = TrackerData(room)
|
||||||
|
|
||||||
|
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||||
|
|
||||||
|
class PlayerAlias(TypedDict):
|
||||||
|
player: int
|
||||||
|
name: str | None
|
||||||
|
|
||||||
|
player_aliases: list[dict[str, int | list[PlayerAlias]]] = []
|
||||||
|
"""Slot aliases of all players."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
team_player_aliases: list[PlayerAlias] = []
|
||||||
|
team_aliases = {"team": team, "players": team_player_aliases}
|
||||||
|
player_aliases.append(team_aliases)
|
||||||
|
for player in players:
|
||||||
|
team_player_aliases.append({"player": player, "alias": tracker_data.get_player_alias(team, player)})
|
||||||
|
|
||||||
|
class PlayerItemsReceived(TypedDict):
|
||||||
|
player: int
|
||||||
|
items: list[NetworkItem]
|
||||||
|
|
||||||
|
player_items_received: list[dict[str, int | list[PlayerItemsReceived]]] = []
|
||||||
|
"""Items received by each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_received_items: list[PlayerItemsReceived] = []
|
||||||
|
team_items_received = {"team": team, "players": player_received_items}
|
||||||
|
player_items_received.append(team_items_received)
|
||||||
|
for player in players:
|
||||||
|
player_received_items.append(
|
||||||
|
{"player": player, "items": tracker_data.get_player_received_items(team, player)})
|
||||||
|
|
||||||
|
class PlayerChecksDone(TypedDict):
|
||||||
|
player: int
|
||||||
|
locations: list[int]
|
||||||
|
|
||||||
|
player_checks_done: list[dict[str, int | list[PlayerChecksDone]]] = []
|
||||||
|
"""ID of all locations checked by each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
per_player_checks: list[PlayerChecksDone] = []
|
||||||
|
team_checks_done = {"team": team, "players": per_player_checks}
|
||||||
|
player_checks_done.append(team_checks_done)
|
||||||
|
for player in players:
|
||||||
|
per_player_checks.append(
|
||||||
|
{"player": player, "locations": sorted(tracker_data.get_player_checked_locations(team, player))})
|
||||||
|
|
||||||
|
total_checks_done: list[dict[str, int]] = [
|
||||||
|
{"team": team, "checks_done": checks_done}
|
||||||
|
for team, checks_done in tracker_data.get_team_locations_checked_count().items()
|
||||||
|
]
|
||||||
|
"""Total number of locations checked for the entire multiworld per team."""
|
||||||
|
|
||||||
|
class PlayerHints(TypedDict):
|
||||||
|
player: int
|
||||||
|
hints: list[Hint]
|
||||||
|
|
||||||
|
hints: list[dict[str, int | list[PlayerHints]]] = []
|
||||||
|
"""Hints that all players have used or received."""
|
||||||
|
for team, players in tracker_data.get_all_slots().items():
|
||||||
|
per_player_hints: list[PlayerHints] = []
|
||||||
|
team_hints = {"team": team, "players": per_player_hints}
|
||||||
|
hints.append(team_hints)
|
||||||
|
for player in players:
|
||||||
|
player_hints = sorted(tracker_data.get_player_hints(team, player))
|
||||||
|
per_player_hints.append({"player": player, "hints": player_hints})
|
||||||
|
slot_info = tracker_data.get_slot_info(team, player)
|
||||||
|
# this assumes groups are always after players
|
||||||
|
if slot_info.type != SlotType.group:
|
||||||
|
continue
|
||||||
|
for member in slot_info.group_members:
|
||||||
|
team_hints[member]["hints"] += player_hints
|
||||||
|
|
||||||
|
class PlayerTimer(TypedDict):
|
||||||
|
player: int
|
||||||
|
time: datetime | None
|
||||||
|
|
||||||
|
activity_timers: list[dict[str, int | list[PlayerTimer]]] = []
|
||||||
|
"""Time of last activity per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_timers: list[PlayerTimer] = []
|
||||||
|
team_timers = {"team": team, "players": player_timers}
|
||||||
|
activity_timers.append(team_timers)
|
||||||
|
for player in players:
|
||||||
|
player_timers.append({"player": player, "time": None})
|
||||||
|
|
||||||
|
client_activity_timers: tuple[tuple[int, int], float] = tracker_data._multisave.get("client_activity_timers", ())
|
||||||
|
for (team, player), timestamp in client_activity_timers:
|
||||||
|
# use index since we can rely on order
|
||||||
|
activity_timers[team]["player_timers"][player - 1]["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||||
|
|
||||||
|
connection_timers: list[dict[str, int | list[PlayerTimer]]] = []
|
||||||
|
"""Time of last connection per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_timers: list[PlayerTimer] = []
|
||||||
|
team_connection_timers = {"team": team, "players": player_timers}
|
||||||
|
connection_timers.append(team_connection_timers)
|
||||||
|
for player in players:
|
||||||
|
player_timers.append({"player": player, "time": None})
|
||||||
|
|
||||||
|
client_connection_timers: tuple[tuple[int, int], float] = tracker_data._multisave.get(
|
||||||
|
"client_connection_timers", ())
|
||||||
|
for (team, player), timestamp in client_connection_timers:
|
||||||
|
connection_timers[team]["players"][player - 1]["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||||
|
|
||||||
|
class PlayerStatus(TypedDict):
|
||||||
|
player: int
|
||||||
|
status: ClientStatus
|
||||||
|
|
||||||
|
player_status: list[dict[str, int | list[PlayerStatus]]] = []
|
||||||
|
"""The current client status for each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_statuses: list[PlayerStatus] = []
|
||||||
|
team_status = {"team": team, "players": player_statuses}
|
||||||
|
player_status.append(team_status)
|
||||||
|
for player in players:
|
||||||
|
player_statuses.append({"player": player, "status": tracker_data.get_player_client_status(team, player)})
|
||||||
|
|
||||||
|
return {
|
||||||
|
**get_static_tracker_data(room),
|
||||||
|
"aliases": player_aliases,
|
||||||
|
"player_items_received": player_items_received,
|
||||||
|
"player_checks_done": player_checks_done,
|
||||||
|
"total_checks_done": total_checks_done,
|
||||||
|
"hints": hints,
|
||||||
|
"activity_timers": activity_timers,
|
||||||
|
"connection_timers": connection_timers,
|
||||||
|
"player_status": player_status,
|
||||||
|
"datapackage": tracker_data._multidata["datapackage"],
|
||||||
|
}
|
||||||
|
|
||||||
|
@cache.memoize()
|
||||||
|
def get_static_tracker_data(room: Room) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Builds and caches the static data for this active session tracker, so that it doesn't need to be recalculated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
tracker_data = TrackerData(room)
|
||||||
|
|
||||||
|
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||||
|
|
||||||
|
class PlayerGroups(TypedDict):
|
||||||
|
slot: int
|
||||||
|
name: str
|
||||||
|
members: list[int]
|
||||||
|
|
||||||
|
groups: list[dict[str, int | list[PlayerGroups]]] = []
|
||||||
|
"""The Slot ID of groups and the IDs of the group's members."""
|
||||||
|
for team, players in tracker_data.get_all_slots().items():
|
||||||
|
groups_in_team: list[PlayerGroups] = []
|
||||||
|
team_groups = {"team": team, "groups": groups_in_team}
|
||||||
|
groups.append(team_groups)
|
||||||
|
for player in players:
|
||||||
|
slot_info = tracker_data.get_slot_info(team, player)
|
||||||
|
if slot_info.type != SlotType.group or not slot_info.group_members:
|
||||||
|
continue
|
||||||
|
groups_in_team.append(
|
||||||
|
{
|
||||||
|
"slot": player,
|
||||||
|
"name": slot_info.name,
|
||||||
|
"members": list(slot_info.group_members),
|
||||||
|
})
|
||||||
|
class PlayerName(TypedDict):
|
||||||
|
player: int
|
||||||
|
name: str
|
||||||
|
|
||||||
|
player_names: list[dict[str, str | list[PlayerName]]] = []
|
||||||
|
"""Slot names of all players."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
per_team_player_names: list[PlayerName] = []
|
||||||
|
team_names = {"team": team, "players": per_team_player_names}
|
||||||
|
player_names.append(team_names)
|
||||||
|
for player in players:
|
||||||
|
per_team_player_names.append({"player": player, "name": tracker_data.get_player_name(team, player)})
|
||||||
|
|
||||||
|
class PlayerGame(TypedDict):
|
||||||
|
player: int
|
||||||
|
game: str
|
||||||
|
|
||||||
|
games: list[dict[str, int | list[PlayerGame]]] = []
|
||||||
|
"""The game each player is playing."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_games: list[PlayerGame] = []
|
||||||
|
team_games = {"team": team, "players": player_games}
|
||||||
|
games.append(team_games)
|
||||||
|
for player in players:
|
||||||
|
player_games.append({"player": player, "game": tracker_data.get_player_game(team, player)})
|
||||||
|
|
||||||
|
class PlayerSlotData(TypedDict):
|
||||||
|
player: int
|
||||||
|
slot_data: dict[str, Any]
|
||||||
|
|
||||||
|
slot_data: list[dict[str, int | list[PlayerSlotData]]] = []
|
||||||
|
"""Slot data for each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
player_slot_data: list[PlayerSlotData] = []
|
||||||
|
team_slot_data = {"team": team, "players": player_slot_data}
|
||||||
|
slot_data.append(team_slot_data)
|
||||||
|
for player in players:
|
||||||
|
player_slot_data.append({"player": player, "slot_data": tracker_data.get_slot_data(team, player)})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"groups": groups,
|
||||||
|
"slot_data": slot_data,
|
||||||
|
}
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from flask import session, jsonify
|
from flask import session, jsonify
|
||||||
from pony.orm import select
|
from pony.orm import select
|
||||||
|
|
||||||
|
from WebHostLib import to_url
|
||||||
from WebHostLib.models import Room, Seed
|
from WebHostLib.models import Room, Seed
|
||||||
from . import api_endpoints, get_players
|
from . import api_endpoints, get_players
|
||||||
|
|
||||||
@@ -10,13 +11,13 @@ def get_rooms():
|
|||||||
response = []
|
response = []
|
||||||
for room in select(room for room in Room if room.owner == session["_id"]):
|
for room in select(room for room in Room if room.owner == session["_id"]):
|
||||||
response.append({
|
response.append({
|
||||||
"room_id": room.id,
|
"room_id": to_url(room.id),
|
||||||
"seed_id": room.seed.id,
|
"seed_id": to_url(room.seed.id),
|
||||||
"creation_time": room.creation_time,
|
"creation_time": room.creation_time,
|
||||||
"last_activity": room.last_activity,
|
"last_activity": room.last_activity,
|
||||||
"last_port": room.last_port,
|
"last_port": room.last_port,
|
||||||
"timeout": room.timeout,
|
"timeout": room.timeout,
|
||||||
"tracker": room.tracker,
|
"tracker": to_url(room.tracker),
|
||||||
})
|
})
|
||||||
return jsonify(response)
|
return jsonify(response)
|
||||||
|
|
||||||
@@ -26,7 +27,7 @@ def get_seeds():
|
|||||||
response = []
|
response = []
|
||||||
for seed in select(seed for seed in Seed if seed.owner == session["_id"]):
|
for seed in select(seed for seed in Seed if seed.owner == session["_id"]):
|
||||||
response.append({
|
response.append({
|
||||||
"seed_id": seed.id,
|
"seed_id": to_url(seed.id),
|
||||||
"creation_time": seed.creation_time,
|
"creation_time": seed.creation_time,
|
||||||
"players": get_players(seed),
|
"players": get_players(seed),
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -164,9 +164,6 @@ def autogen(config: dict):
|
|||||||
Thread(target=keep_running, name="AP_Autogen").start()
|
Thread(target=keep_running, name="AP_Autogen").start()
|
||||||
|
|
||||||
|
|
||||||
multiworlds: typing.Dict[type(Room.id), MultiworldInstance] = {}
|
|
||||||
|
|
||||||
|
|
||||||
class MultiworldInstance():
|
class MultiworldInstance():
|
||||||
def __init__(self, config: dict, id: int):
|
def __init__(self, config: dict, id: int):
|
||||||
self.room_ids = set()
|
self.room_ids = set()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import os
|
import os
|
||||||
import zipfile
|
import zipfile
|
||||||
import base64
|
import base64
|
||||||
from typing import Union, Dict, Set, Tuple
|
from collections.abc import Set
|
||||||
|
|
||||||
from flask import request, flash, redirect, url_for, render_template
|
from flask import request, flash, redirect, url_for, render_template
|
||||||
from markupsafe import Markup
|
from markupsafe import Markup
|
||||||
@@ -43,7 +43,7 @@ def mysterycheck():
|
|||||||
return redirect(url_for("check"), 301)
|
return redirect(url_for("check"), 301)
|
||||||
|
|
||||||
|
|
||||||
def get_yaml_data(files) -> Union[Dict[str, str], str, Markup]:
|
def get_yaml_data(files) -> dict[str, str] | str | Markup:
|
||||||
options = {}
|
options = {}
|
||||||
for uploaded_file in files:
|
for uploaded_file in files:
|
||||||
if banned_file(uploaded_file.filename):
|
if banned_file(uploaded_file.filename):
|
||||||
@@ -84,12 +84,12 @@ def get_yaml_data(files) -> Union[Dict[str, str], str, Markup]:
|
|||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
def roll_options(options: Dict[str, Union[dict, str]],
|
def roll_options(options: dict[str, dict | str],
|
||||||
plando_options: Set[str] = frozenset({"bosses", "items", "connections", "texts"})) -> \
|
plando_options: Set[str] = frozenset({"bosses", "items", "connections", "texts"})) -> \
|
||||||
Tuple[Dict[str, Union[str, bool]], Dict[str, dict]]:
|
tuple[dict[str, str | bool], dict[str, dict]]:
|
||||||
plando_options = PlandoOptions.from_set(set(plando_options))
|
plando_options = PlandoOptions.from_set(set(plando_options))
|
||||||
results = {}
|
results: dict[str, str | bool] = {}
|
||||||
rolled_results = {}
|
rolled_results: dict[str, dict] = {}
|
||||||
for filename, text in options.items():
|
for filename, text in options.items():
|
||||||
try:
|
try:
|
||||||
if type(text) is dict:
|
if type(text) is dict:
|
||||||
|
|||||||
@@ -129,7 +129,7 @@ class WebHostContext(Context):
|
|||||||
else:
|
else:
|
||||||
row = GameDataPackage.get(checksum=game_data["checksum"])
|
row = GameDataPackage.get(checksum=game_data["checksum"])
|
||||||
if row: # None if rolled on >= 0.3.9 but uploaded to <= 0.3.8. multidata should be complete
|
if row: # None if rolled on >= 0.3.9 but uploaded to <= 0.3.8. multidata should be complete
|
||||||
game_data_packages[game] = Utils.restricted_loads(row.data)
|
game_data_packages[game] = restricted_loads(row.data)
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"Did not find game_data_package for {game}: {game_data['checksum']}")
|
self.logger.warning(f"Did not find game_data_package for {game}: {game_data['checksum']}")
|
||||||
@@ -159,6 +159,7 @@ class WebHostContext(Context):
|
|||||||
@db_session
|
@db_session
|
||||||
def _save(self, exit_save: bool = False) -> bool:
|
def _save(self, exit_save: bool = False) -> bool:
|
||||||
room = Room.get(id=self.room_id)
|
room = Room.get(id=self.room_id)
|
||||||
|
# Does not use Utils.restricted_dumps because we'd rather make a save than not make one
|
||||||
room.multisave = pickle.dumps(self.get_save())
|
room.multisave = pickle.dumps(self.get_save())
|
||||||
# saving only occurs on activity, so we can "abuse" this information to mark this as last_activity
|
# saving only occurs on activity, so we can "abuse" this information to mark this as last_activity
|
||||||
if not exit_save: # we don't want to count a shutdown as activity, which would restart the server again
|
if not exit_save: # we don't want to count a shutdown as activity, which would restart the server again
|
||||||
|
|||||||
@@ -61,12 +61,7 @@ def download_slot_file(room_id, player_id: int):
|
|||||||
else:
|
else:
|
||||||
import io
|
import io
|
||||||
|
|
||||||
if slot_data.game == "Minecraft":
|
if slot_data.game == "Factorio":
|
||||||
from worlds.minecraft import mc_update_output
|
|
||||||
fname = f"AP_{app.jinja_env.filters['suuid'](room_id)}_P{slot_data.player_id}_{slot_data.player_name}.apmc"
|
|
||||||
data = mc_update_output(slot_data.data, server=app.config['HOST_ADDRESS'], port=room.last_port)
|
|
||||||
return send_file(io.BytesIO(data), as_attachment=True, download_name=fname)
|
|
||||||
elif slot_data.game == "Factorio":
|
|
||||||
with zipfile.ZipFile(io.BytesIO(slot_data.data)) as zf:
|
with zipfile.ZipFile(io.BytesIO(slot_data.data)) as zf:
|
||||||
for name in zf.namelist():
|
for name in zf.namelist():
|
||||||
if name.endswith("info.json"):
|
if name.endswith("info.json"):
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import concurrent.futures
|
import concurrent.futures
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
import pickle
|
|
||||||
import random
|
import random
|
||||||
import tempfile
|
import tempfile
|
||||||
import zipfile
|
import zipfile
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
from typing import Any, Dict, List, Optional, Union, Set
|
from pickle import PicklingError
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from flask import flash, redirect, render_template, request, session, url_for
|
from flask import flash, redirect, render_template, request, session, url_for
|
||||||
from pony.orm import commit, db_session
|
from pony.orm import commit, db_session
|
||||||
@@ -14,7 +14,7 @@ from pony.orm import commit, db_session
|
|||||||
from BaseClasses import get_seed, seeddigits
|
from BaseClasses import get_seed, seeddigits
|
||||||
from Generate import PlandoOptions, handle_name
|
from Generate import PlandoOptions, handle_name
|
||||||
from Main import main as ERmain
|
from Main import main as ERmain
|
||||||
from Utils import __version__
|
from Utils import __version__, restricted_dumps
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
from settings import ServerOptions, GeneratorOptions
|
from settings import ServerOptions, GeneratorOptions
|
||||||
from worlds.alttp.EntranceRandomizer import parse_arguments
|
from worlds.alttp.EntranceRandomizer import parse_arguments
|
||||||
@@ -23,8 +23,8 @@ from .models import Generation, STATE_ERROR, STATE_QUEUED, Seed, UUID
|
|||||||
from .upload import upload_zip_to_db
|
from .upload import upload_zip_to_db
|
||||||
|
|
||||||
|
|
||||||
def get_meta(options_source: dict, race: bool = False) -> Dict[str, Union[List[str], Dict[str, Any]]]:
|
def get_meta(options_source: dict, race: bool = False) -> dict[str, list[str] | dict[str, Any]]:
|
||||||
plando_options: Set[str] = set()
|
plando_options: set[str] = set()
|
||||||
for substr in ("bosses", "items", "connections", "texts"):
|
for substr in ("bosses", "items", "connections", "texts"):
|
||||||
if options_source.get(f"plando_{substr}", substr in GeneratorOptions.plando_options):
|
if options_source.get(f"plando_{substr}", substr in GeneratorOptions.plando_options):
|
||||||
plando_options.add(substr)
|
plando_options.add(substr)
|
||||||
@@ -73,7 +73,7 @@ def generate(race=False):
|
|||||||
return render_template("generate.html", race=race, version=__version__)
|
return render_template("generate.html", race=race, version=__version__)
|
||||||
|
|
||||||
|
|
||||||
def start_generation(options: Dict[str, Union[dict, str]], meta: Dict[str, Any]):
|
def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
||||||
results, gen_options = roll_options(options, set(meta["plando_options"]))
|
results, gen_options = roll_options(options, set(meta["plando_options"]))
|
||||||
|
|
||||||
if any(type(result) == str for result in results.values()):
|
if any(type(result) == str for result in results.values()):
|
||||||
@@ -83,12 +83,18 @@ def start_generation(options: Dict[str, Union[dict, str]], meta: Dict[str, Any])
|
|||||||
f"If you have a larger group, please generate it yourself and upload it.")
|
f"If you have a larger group, please generate it yourself and upload it.")
|
||||||
return redirect(url_for(request.endpoint, **(request.view_args or {})))
|
return redirect(url_for(request.endpoint, **(request.view_args or {})))
|
||||||
elif len(gen_options) >= app.config["JOB_THRESHOLD"]:
|
elif len(gen_options) >= app.config["JOB_THRESHOLD"]:
|
||||||
gen = Generation(
|
try:
|
||||||
options=pickle.dumps({name: vars(options) for name, options in gen_options.items()}),
|
gen = Generation(
|
||||||
# convert to json compatible
|
options=restricted_dumps({name: vars(options) for name, options in gen_options.items()}),
|
||||||
meta=json.dumps(meta),
|
# convert to json compatible
|
||||||
state=STATE_QUEUED,
|
meta=json.dumps(meta),
|
||||||
owner=session["_id"])
|
state=STATE_QUEUED,
|
||||||
|
owner=session["_id"])
|
||||||
|
except PicklingError as e:
|
||||||
|
from .autolauncher import handle_generation_failure
|
||||||
|
handle_generation_failure(e)
|
||||||
|
return render_template("seedError.html", seed_error=("PicklingError: " + str(e)))
|
||||||
|
|
||||||
commit()
|
commit()
|
||||||
|
|
||||||
return redirect(url_for("wait_seed", seed=gen.id))
|
return redirect(url_for("wait_seed", seed=gen.id))
|
||||||
@@ -104,9 +110,9 @@ def start_generation(options: Dict[str, Union[dict, str]], meta: Dict[str, Any])
|
|||||||
return redirect(url_for("view_seed", seed=seed_id))
|
return redirect(url_for("view_seed", seed=seed_id))
|
||||||
|
|
||||||
|
|
||||||
def gen_game(gen_options: dict, meta: Optional[Dict[str, Any]] = None, owner=None, sid=None):
|
def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None):
|
||||||
if not meta:
|
if meta is None:
|
||||||
meta: Dict[str, Any] = {}
|
meta = {}
|
||||||
|
|
||||||
meta.setdefault("server_options", {}).setdefault("hint_cost", 10)
|
meta.setdefault("server_options", {}).setdefault("hint_cost", 10)
|
||||||
race = meta.setdefault("generator_options", {}).setdefault("race", False)
|
race = meta.setdefault("generator_options", {}).setdefault("race", False)
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ def update_sprites_lttp():
|
|||||||
from LttPAdjuster import update_sprites
|
from LttPAdjuster import update_sprites
|
||||||
|
|
||||||
# Target directories
|
# Target directories
|
||||||
input_dir = user_path("data", "sprites", "alttpr")
|
input_dir = user_path("data", "sprites", "alttp", "remote")
|
||||||
output_dir = local_path("WebHostLib", "static", "generated") # TODO: move to user_path
|
output_dir = local_path("WebHostLib", "static", "generated") # TODO: move to user_path
|
||||||
|
|
||||||
os.makedirs(os.path.join(output_dir, "sprites"), exist_ok=True)
|
os.makedirs(os.path.join(output_dir, "sprites"), exist_ok=True)
|
||||||
|
|||||||
@@ -7,17 +7,69 @@ from flask import request, redirect, url_for, render_template, Response, session
|
|||||||
from pony.orm import count, commit, db_session
|
from pony.orm import count, commit, db_session
|
||||||
from werkzeug.utils import secure_filename
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister, World
|
||||||
from . import app, cache
|
from . import app, cache
|
||||||
from .models import Seed, Room, Command, UUID, uuid4
|
from .models import Seed, Room, Command, UUID, uuid4
|
||||||
|
from Utils import title_sorted
|
||||||
|
|
||||||
|
|
||||||
def get_world_theme(game_name: str):
|
def get_world_theme(game_name: str) -> str:
|
||||||
if game_name in AutoWorldRegister.world_types:
|
if game_name in AutoWorldRegister.world_types:
|
||||||
return AutoWorldRegister.world_types[game_name].web.theme
|
return AutoWorldRegister.world_types[game_name].web.theme
|
||||||
return 'grass'
|
return 'grass'
|
||||||
|
|
||||||
|
|
||||||
|
def get_visible_worlds() -> dict[str, type(World)]:
|
||||||
|
worlds = {}
|
||||||
|
for game, world in AutoWorldRegister.world_types.items():
|
||||||
|
if not world.hidden:
|
||||||
|
worlds[game] = world
|
||||||
|
return worlds
|
||||||
|
|
||||||
|
|
||||||
|
def render_markdown(path: str) -> str:
|
||||||
|
import mistune
|
||||||
|
from collections import Counter
|
||||||
|
|
||||||
|
markdown = mistune.create_markdown(
|
||||||
|
escape=False,
|
||||||
|
plugins=[
|
||||||
|
"strikethrough",
|
||||||
|
"footnotes",
|
||||||
|
"table",
|
||||||
|
"speedup",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
heading_id_count: Counter[str] = Counter()
|
||||||
|
|
||||||
|
def heading_id(text: str) -> str:
|
||||||
|
nonlocal heading_id_count
|
||||||
|
import re # there is no good way to do this without regex
|
||||||
|
|
||||||
|
s = re.sub(r"[^\w\- ]", "", text.lower()).replace(" ", "-").strip("-")
|
||||||
|
n = heading_id_count[s]
|
||||||
|
heading_id_count[s] += 1
|
||||||
|
if n > 0:
|
||||||
|
s += f"-{n}"
|
||||||
|
return s
|
||||||
|
|
||||||
|
def id_hook(_: mistune.Markdown, state: mistune.BlockState) -> None:
|
||||||
|
for tok in state.tokens:
|
||||||
|
if tok["type"] == "heading" and tok["attrs"]["level"] < 4:
|
||||||
|
text = tok["text"]
|
||||||
|
assert isinstance(text, str)
|
||||||
|
unique_id = heading_id(text)
|
||||||
|
tok["attrs"]["id"] = unique_id
|
||||||
|
tok["text"] = f"<a href=\"#{unique_id}\">{text}</a>" # make header link to itself
|
||||||
|
|
||||||
|
markdown.before_render_hooks.append(id_hook)
|
||||||
|
|
||||||
|
with open(path, encoding="utf-8-sig") as f:
|
||||||
|
document = f.read()
|
||||||
|
return markdown(document)
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
@app.errorhandler(jinja2.exceptions.TemplateNotFound)
|
@app.errorhandler(jinja2.exceptions.TemplateNotFound)
|
||||||
def page_not_found(err):
|
def page_not_found(err):
|
||||||
@@ -31,83 +83,103 @@ def start_playing():
|
|||||||
return render_template(f"startPlaying.html")
|
return render_template(f"startPlaying.html")
|
||||||
|
|
||||||
|
|
||||||
# Game Info Pages
|
|
||||||
@app.route('/games/<string:game>/info/<string:lang>')
|
@app.route('/games/<string:game>/info/<string:lang>')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def game_info(game, lang):
|
def game_info(game, lang):
|
||||||
|
"""Game Info Pages"""
|
||||||
try:
|
try:
|
||||||
world = AutoWorldRegister.world_types[game]
|
theme = get_world_theme(game)
|
||||||
if lang not in world.web.game_info_languages:
|
secure_game_name = secure_filename(game)
|
||||||
raise KeyError("Sorry, this game's info page is not available in that language yet.")
|
lang = secure_filename(lang)
|
||||||
except KeyError:
|
document = render_markdown(os.path.join(
|
||||||
|
app.static_folder, "generated", "docs",
|
||||||
|
secure_game_name, f"{lang}_{secure_game_name}.md"
|
||||||
|
))
|
||||||
|
return render_template(
|
||||||
|
"markdown_document.html",
|
||||||
|
title=f"{game} Guide",
|
||||||
|
html_from_markdown=document,
|
||||||
|
theme=theme,
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
return abort(404)
|
return abort(404)
|
||||||
return render_template('gameInfo.html', game=game, lang=lang, theme=get_world_theme(game))
|
|
||||||
|
|
||||||
|
|
||||||
# List of supported games
|
|
||||||
@app.route('/games')
|
@app.route('/games')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def games():
|
def games():
|
||||||
worlds = {}
|
"""List of supported games"""
|
||||||
for game, world in AutoWorldRegister.world_types.items():
|
return render_template("supportedGames.html", worlds=get_visible_worlds())
|
||||||
if not world.hidden:
|
|
||||||
worlds[game] = world
|
|
||||||
return render_template("supportedGames.html", worlds=worlds)
|
@app.route('/tutorial/<string:game>/<string:file>')
|
||||||
|
@cache.cached()
|
||||||
|
def tutorial(game: str, file: str):
|
||||||
|
try:
|
||||||
|
theme = get_world_theme(game)
|
||||||
|
secure_game_name = secure_filename(game)
|
||||||
|
file = secure_filename(file)
|
||||||
|
document = render_markdown(os.path.join(
|
||||||
|
app.static_folder, "generated", "docs",
|
||||||
|
secure_game_name, file+".md"
|
||||||
|
))
|
||||||
|
return render_template(
|
||||||
|
"markdown_document.html",
|
||||||
|
title=f"{game} Guide",
|
||||||
|
html_from_markdown=document,
|
||||||
|
theme=theme,
|
||||||
|
)
|
||||||
|
except FileNotFoundError:
|
||||||
|
return abort(404)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/tutorial/<string:game>/<string:file>/<string:lang>')
|
@app.route('/tutorial/<string:game>/<string:file>/<string:lang>')
|
||||||
@cache.cached()
|
def tutorial_redirect(game: str, file: str, lang: str):
|
||||||
def tutorial(game, file, lang):
|
"""
|
||||||
try:
|
Permanent redirect old tutorial URLs to new ones to keep search engines happy.
|
||||||
world = AutoWorldRegister.world_types[game]
|
e.g. /tutorial/Archipelago/setup/en -> /tutorial/Archipelago/setup_en
|
||||||
if lang not in [tut.link.split("/")[1] for tut in world.web.tutorials]:
|
"""
|
||||||
raise KeyError("Sorry, the tutorial is not available in that language yet.")
|
return redirect(url_for("tutorial", game=game, file=f"{file}_{lang}"), code=301)
|
||||||
except KeyError:
|
|
||||||
return abort(404)
|
|
||||||
return render_template("tutorial.html", game=game, file=file, lang=lang, theme=get_world_theme(game))
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/tutorial/')
|
@app.route('/tutorial/')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def tutorial_landing():
|
def tutorial_landing():
|
||||||
return render_template("tutorialLanding.html")
|
tutorials = {}
|
||||||
|
worlds = AutoWorldRegister.world_types
|
||||||
|
for world_name, world_type in worlds.items():
|
||||||
|
current_world = tutorials[world_name] = {}
|
||||||
|
for tutorial in world_type.web.tutorials:
|
||||||
|
current_tutorial = current_world.setdefault(tutorial.tutorial_name, {
|
||||||
|
"description": tutorial.description, "files": {}})
|
||||||
|
current_tutorial["files"][secure_filename(tutorial.file_name).rsplit(".", 1)[0]] = {
|
||||||
|
"authors": tutorial.authors,
|
||||||
|
"language": tutorial.language
|
||||||
|
}
|
||||||
|
tutorials = {world_name: tutorials for world_name, tutorials in title_sorted(
|
||||||
|
tutorials.items(), key=lambda element: "\x00" if element[0] == "Archipelago" else worlds[element[0]].game)}
|
||||||
|
return render_template("tutorialLanding.html", worlds=worlds, tutorials=tutorials)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/faq/<string:lang>/')
|
@app.route('/faq/<string:lang>/')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def faq(lang: str):
|
def faq(lang: str):
|
||||||
import markdown
|
document = render_markdown(os.path.join(app.static_folder, "assets", "faq", secure_filename(lang)+".md"))
|
||||||
with open(os.path.join(app.static_folder, "assets", "faq", secure_filename(lang)+".md")) as f:
|
|
||||||
document = f.read()
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"markdown_document.html",
|
"markdown_document.html",
|
||||||
title="Frequently Asked Questions",
|
title="Frequently Asked Questions",
|
||||||
html_from_markdown=markdown.markdown(
|
html_from_markdown=document,
|
||||||
document,
|
|
||||||
extensions=["toc", "mdx_breakless_lists"],
|
|
||||||
extension_configs={
|
|
||||||
"toc": {"anchorlink": True}
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/glossary/<string:lang>/')
|
@app.route('/glossary/<string:lang>/')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def glossary(lang: str):
|
def glossary(lang: str):
|
||||||
import markdown
|
document = render_markdown(os.path.join(app.static_folder, "assets", "glossary", secure_filename(lang)+".md"))
|
||||||
with open(os.path.join(app.static_folder, "assets", "glossary", secure_filename(lang)+".md")) as f:
|
|
||||||
document = f.read()
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"markdown_document.html",
|
"markdown_document.html",
|
||||||
title="Glossary",
|
title="Glossary",
|
||||||
html_from_markdown=markdown.markdown(
|
html_from_markdown=document,
|
||||||
document,
|
|
||||||
extensions=["toc", "mdx_breakless_lists"],
|
|
||||||
extension_configs={
|
|
||||||
"toc": {"anchorlink": True}
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,5 @@ Flask-Compress>=1.17
|
|||||||
Flask-Limiter>=3.12
|
Flask-Limiter>=3.12
|
||||||
bokeh>=3.6.3
|
bokeh>=3.6.3
|
||||||
markupsafe>=3.0.2
|
markupsafe>=3.0.2
|
||||||
Markdown>=3.7
|
|
||||||
mdx-breakless-lists>=1.0.1
|
|
||||||
setproctitle>=1.3.5
|
setproctitle>=1.3.5
|
||||||
|
mistune>=3.1.3
|
||||||
|
|||||||
@@ -1,45 +0,0 @@
|
|||||||
window.addEventListener('load', () => {
|
|
||||||
const gameInfo = document.getElementById('game-info');
|
|
||||||
new Promise((resolve, reject) => {
|
|
||||||
const ajax = new XMLHttpRequest();
|
|
||||||
ajax.onreadystatechange = () => {
|
|
||||||
if (ajax.readyState !== 4) { return; }
|
|
||||||
if (ajax.status === 404) {
|
|
||||||
reject("Sorry, this game's info page is not available in that language yet.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (ajax.status !== 200) {
|
|
||||||
reject("Something went wrong while loading the info page.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(ajax.responseText);
|
|
||||||
};
|
|
||||||
ajax.open('GET', `${window.location.origin}/static/generated/docs/${gameInfo.getAttribute('data-game')}/` +
|
|
||||||
`${gameInfo.getAttribute('data-lang')}_${gameInfo.getAttribute('data-game')}.md`, true);
|
|
||||||
ajax.send();
|
|
||||||
}).then((results) => {
|
|
||||||
// Populate page with HTML generated from markdown
|
|
||||||
showdown.setOption('tables', true);
|
|
||||||
showdown.setOption('strikethrough', true);
|
|
||||||
showdown.setOption('literalMidWordUnderscores', true);
|
|
||||||
gameInfo.innerHTML += (new showdown.Converter()).makeHtml(results);
|
|
||||||
|
|
||||||
// Reset the id of all header divs to something nicer
|
|
||||||
for (const header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) {
|
|
||||||
const headerId = header.innerText.replace(/\s+/g, '-').toLowerCase();
|
|
||||||
header.setAttribute('id', headerId);
|
|
||||||
header.addEventListener('click', () => {
|
|
||||||
window.location.hash = `#${headerId}`;
|
|
||||||
header.scrollIntoView();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Manually scroll the user to the appropriate header if anchor navigation is used
|
|
||||||
document.fonts.ready.finally(() => {
|
|
||||||
if (window.location.hash) {
|
|
||||||
const scrollTarget = document.getElementById(window.location.hash.substring(1));
|
|
||||||
scrollTarget?.scrollIntoView();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
window.addEventListener('load', () => {
|
|
||||||
// Reload tracker every 15 seconds
|
|
||||||
const url = window.location;
|
|
||||||
setInterval(() => {
|
|
||||||
const ajax = new XMLHttpRequest();
|
|
||||||
ajax.onreadystatechange = () => {
|
|
||||||
if (ajax.readyState !== 4) { return; }
|
|
||||||
|
|
||||||
// Create a fake DOM using the returned HTML
|
|
||||||
const domParser = new DOMParser();
|
|
||||||
const fakeDOM = domParser.parseFromString(ajax.responseText, 'text/html');
|
|
||||||
|
|
||||||
// Update item tracker
|
|
||||||
document.getElementById('inventory-table').innerHTML = fakeDOM.getElementById('inventory-table').innerHTML;
|
|
||||||
// Update only counters in the location-table
|
|
||||||
let counters = document.getElementsByClassName('counter');
|
|
||||||
const fakeCounters = fakeDOM.getElementsByClassName('counter');
|
|
||||||
for (let i = 0; i < counters.length; i++) {
|
|
||||||
counters[i].innerHTML = fakeCounters[i].innerHTML;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
ajax.open('GET', url);
|
|
||||||
ajax.send();
|
|
||||||
}, 15000)
|
|
||||||
|
|
||||||
// Collapsible advancement sections
|
|
||||||
const categories = document.getElementsByClassName("location-category");
|
|
||||||
for (let i = 0; i < categories.length; i++) {
|
|
||||||
let hide_id = categories[i].id.split('-')[0];
|
|
||||||
if (hide_id == 'Total') {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
categories[i].addEventListener('click', function() {
|
|
||||||
// Toggle the advancement list
|
|
||||||
document.getElementById(hide_id).classList.toggle("hide");
|
|
||||||
// Change text of the header
|
|
||||||
const tab_header = document.getElementById(hide_id+'-header').children[0];
|
|
||||||
const orig_text = tab_header.innerHTML;
|
|
||||||
let new_text;
|
|
||||||
if (orig_text.includes("▼")) {
|
|
||||||
new_text = orig_text.replace("▼", "▲");
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
new_text = orig_text.replace("▲", "▼");
|
|
||||||
}
|
|
||||||
tab_header.innerHTML = new_text;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
@@ -1,52 +0,0 @@
|
|||||||
window.addEventListener('load', () => {
|
|
||||||
const tutorialWrapper = document.getElementById('tutorial-wrapper');
|
|
||||||
new Promise((resolve, reject) => {
|
|
||||||
const ajax = new XMLHttpRequest();
|
|
||||||
ajax.onreadystatechange = () => {
|
|
||||||
if (ajax.readyState !== 4) { return; }
|
|
||||||
if (ajax.status === 404) {
|
|
||||||
reject("Sorry, the tutorial is not available in that language yet.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (ajax.status !== 200) {
|
|
||||||
reject("Something went wrong while loading the tutorial.");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
resolve(ajax.responseText);
|
|
||||||
};
|
|
||||||
ajax.open('GET', `${window.location.origin}/static/generated/docs/` +
|
|
||||||
`${tutorialWrapper.getAttribute('data-game')}/${tutorialWrapper.getAttribute('data-file')}_` +
|
|
||||||
`${tutorialWrapper.getAttribute('data-lang')}.md`, true);
|
|
||||||
ajax.send();
|
|
||||||
}).then((results) => {
|
|
||||||
// Populate page with HTML generated from markdown
|
|
||||||
showdown.setOption('tables', true);
|
|
||||||
showdown.setOption('strikethrough', true);
|
|
||||||
showdown.setOption('literalMidWordUnderscores', true);
|
|
||||||
showdown.setOption('disableForced4SpacesIndentedSublists', true);
|
|
||||||
tutorialWrapper.innerHTML += (new showdown.Converter()).makeHtml(results);
|
|
||||||
|
|
||||||
const title = document.querySelector('h1')
|
|
||||||
if (title) {
|
|
||||||
document.title = title.textContent;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reset the id of all header divs to something nicer
|
|
||||||
for (const header of document.querySelectorAll('h1, h2, h3, h4, h5, h6')) {
|
|
||||||
const headerId = header.innerText.replace(/\s+/g, '-').toLowerCase();
|
|
||||||
header.setAttribute('id', headerId);
|
|
||||||
header.addEventListener('click', () => {
|
|
||||||
window.location.hash = `#${headerId}`;
|
|
||||||
header.scrollIntoView();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Manually scroll the user to the appropriate header if anchor navigation is used
|
|
||||||
document.fonts.ready.finally(() => {
|
|
||||||
if (window.location.hash) {
|
|
||||||
const scrollTarget = document.getElementById(window.location.hash.substring(1));
|
|
||||||
scrollTarget?.scrollIntoView();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
const showError = () => {
|
|
||||||
const tutorial = document.getElementById('tutorial-landing');
|
|
||||||
document.getElementById('page-title').innerText = 'This page is out of logic!';
|
|
||||||
tutorial.removeChild(document.getElementById('loading'));
|
|
||||||
const userMessage = document.createElement('h3');
|
|
||||||
const homepageLink = document.createElement('a');
|
|
||||||
homepageLink.innerText = 'Click here';
|
|
||||||
homepageLink.setAttribute('href', '/');
|
|
||||||
userMessage.append(homepageLink);
|
|
||||||
userMessage.append(' to go back to safety!');
|
|
||||||
tutorial.append(userMessage);
|
|
||||||
};
|
|
||||||
|
|
||||||
window.addEventListener('load', () => {
|
|
||||||
const ajax = new XMLHttpRequest();
|
|
||||||
ajax.onreadystatechange = () => {
|
|
||||||
if (ajax.readyState !== 4) { return; }
|
|
||||||
const tutorialDiv = document.getElementById('tutorial-landing');
|
|
||||||
if (ajax.status !== 200) { return showError(); }
|
|
||||||
|
|
||||||
try {
|
|
||||||
const games = JSON.parse(ajax.responseText);
|
|
||||||
games.forEach((game) => {
|
|
||||||
const gameTitle = document.createElement('h2');
|
|
||||||
gameTitle.innerText = game.gameTitle;
|
|
||||||
gameTitle.id = `${encodeURIComponent(game.gameTitle)}`;
|
|
||||||
tutorialDiv.appendChild(gameTitle);
|
|
||||||
|
|
||||||
game.tutorials.forEach((tutorial) => {
|
|
||||||
const tutorialName = document.createElement('h3');
|
|
||||||
tutorialName.innerText = tutorial.name;
|
|
||||||
tutorialDiv.appendChild(tutorialName);
|
|
||||||
|
|
||||||
const tutorialDescription = document.createElement('p');
|
|
||||||
tutorialDescription.innerText = tutorial.description;
|
|
||||||
tutorialDiv.appendChild(tutorialDescription);
|
|
||||||
|
|
||||||
const intro = document.createElement('p');
|
|
||||||
intro.innerText = 'This guide is available in the following languages:';
|
|
||||||
tutorialDiv.appendChild(intro);
|
|
||||||
|
|
||||||
const fileList = document.createElement('ul');
|
|
||||||
tutorial.files.forEach((file) => {
|
|
||||||
const listItem = document.createElement('li');
|
|
||||||
const anchor = document.createElement('a');
|
|
||||||
anchor.innerText = file.language;
|
|
||||||
anchor.setAttribute('href', `${window.location.origin}/tutorial/${file.link}`);
|
|
||||||
listItem.appendChild(anchor);
|
|
||||||
|
|
||||||
listItem.append(' by ');
|
|
||||||
for (let author of file.authors) {
|
|
||||||
listItem.append(author);
|
|
||||||
if (file.authors.indexOf(author) !== (file.authors.length -1)) {
|
|
||||||
listItem.append(', ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fileList.appendChild(listItem);
|
|
||||||
});
|
|
||||||
tutorialDiv.appendChild(fileList);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
tutorialDiv.removeChild(document.getElementById('loading'));
|
|
||||||
} catch (error) {
|
|
||||||
showError();
|
|
||||||
console.error(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if we are on an anchor when coming in, and scroll to it.
|
|
||||||
const hash = window.location.hash;
|
|
||||||
if (hash) {
|
|
||||||
const offset = 128; // To account for navbar banner at top of page.
|
|
||||||
window.scrollTo(0, 0);
|
|
||||||
const rect = document.getElementById(hash.slice(1)).getBoundingClientRect();
|
|
||||||
window.scrollTo(rect.left, rect.top - offset);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
ajax.open('GET', `${window.location.origin}/static/generated/tutorials.json`, true);
|
|
||||||
ajax.send();
|
|
||||||
});
|
|
||||||
@@ -28,7 +28,6 @@
|
|||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-family: LondrinaSolid-Regular, sans-serif;
|
font-family: LondrinaSolid-Regular, sans-serif;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
text-shadow: 1px 1px 4px #000000;
|
text-shadow: 1px 1px 4px #000000;
|
||||||
}
|
}
|
||||||
@@ -37,7 +36,6 @@
|
|||||||
font-size: 38px;
|
font-size: 38px;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-family: LondrinaSolid-Light, sans-serif;
|
font-family: LondrinaSolid-Light, sans-serif;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
margin-top: 20px;
|
margin-top: 20px;
|
||||||
margin-bottom: 0.5rem;
|
margin-bottom: 0.5rem;
|
||||||
@@ -50,7 +48,6 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
margin-bottom: 0.5rem;
|
margin-bottom: 0.5rem;
|
||||||
}
|
}
|
||||||
@@ -59,7 +56,6 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 24px;
|
font-size: 24px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
margin-bottom: 24px;
|
margin-bottom: 24px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,14 +63,12 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 22px;
|
font-size: 22px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.markdown h6, .markdown details summary.h6{
|
.markdown h6, .markdown details summary.h6{
|
||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 20px;
|
font-size: 20px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.markdown h4, .markdown h5, .markdown h6{
|
.markdown h4, .markdown h5, .markdown h6{
|
||||||
|
|||||||
@@ -1,102 +0,0 @@
|
|||||||
#player-tracker-wrapper{
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table{
|
|
||||||
border-top: 2px solid #000000;
|
|
||||||
border-left: 2px solid #000000;
|
|
||||||
border-right: 2px solid #000000;
|
|
||||||
border-top-left-radius: 4px;
|
|
||||||
border-top-right-radius: 4px;
|
|
||||||
padding: 3px 3px 10px;
|
|
||||||
width: 384px;
|
|
||||||
background-color: #42b149;
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table td{
|
|
||||||
width: 40px;
|
|
||||||
height: 40px;
|
|
||||||
text-align: center;
|
|
||||||
vertical-align: middle;
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table img{
|
|
||||||
height: 100%;
|
|
||||||
max-width: 40px;
|
|
||||||
max-height: 40px;
|
|
||||||
filter: grayscale(100%) contrast(75%) brightness(30%);
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table img.acquired{
|
|
||||||
filter: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table div.counted-item {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
#inventory-table div.item-count {
|
|
||||||
position: absolute;
|
|
||||||
color: white;
|
|
||||||
font-family: "Minecraftia", monospace;
|
|
||||||
font-weight: bold;
|
|
||||||
bottom: 0;
|
|
||||||
right: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table{
|
|
||||||
width: 384px;
|
|
||||||
border-left: 2px solid #000000;
|
|
||||||
border-right: 2px solid #000000;
|
|
||||||
border-bottom: 2px solid #000000;
|
|
||||||
border-bottom-left-radius: 4px;
|
|
||||||
border-bottom-right-radius: 4px;
|
|
||||||
background-color: #42b149;
|
|
||||||
padding: 0 3px 3px;
|
|
||||||
font-family: "Minecraftia", monospace;
|
|
||||||
font-size: 14px;
|
|
||||||
cursor: default;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table th{
|
|
||||||
vertical-align: middle;
|
|
||||||
text-align: left;
|
|
||||||
padding-right: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td{
|
|
||||||
padding-top: 2px;
|
|
||||||
padding-bottom: 2px;
|
|
||||||
line-height: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.counter {
|
|
||||||
text-align: right;
|
|
||||||
font-size: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.toggle-arrow {
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table tr#Total-header {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table img{
|
|
||||||
height: 100%;
|
|
||||||
max-width: 30px;
|
|
||||||
max-height: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table tbody.locations {
|
|
||||||
font-size: 12px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.location-name {
|
|
||||||
padding-left: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hide {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
import typing
|
|
||||||
from collections import Counter, defaultdict
|
from collections import Counter, defaultdict
|
||||||
from colorsys import hsv_to_rgb
|
from colorsys import hsv_to_rgb
|
||||||
from datetime import datetime, timedelta, date
|
from datetime import datetime, timedelta, date
|
||||||
@@ -18,21 +17,23 @@ from .models import Room
|
|||||||
PLOT_WIDTH = 600
|
PLOT_WIDTH = 600
|
||||||
|
|
||||||
|
|
||||||
def get_db_data(known_games: typing.Set[str]) -> typing.Tuple[typing.Counter[str],
|
def get_db_data(known_games: set[str]) -> tuple[Counter[str], defaultdict[date, dict[str, int]]]:
|
||||||
typing.DefaultDict[datetime.date, typing.Dict[str, int]]]:
|
games_played: defaultdict[date, dict[str, int]] = defaultdict(Counter)
|
||||||
games_played = defaultdict(Counter)
|
total_games: Counter[str] = Counter()
|
||||||
total_games = Counter()
|
|
||||||
cutoff = date.today() - timedelta(days=30)
|
cutoff = date.today() - timedelta(days=30)
|
||||||
room: Room
|
room: Room
|
||||||
for room in select(room for room in Room if room.creation_time >= cutoff):
|
for room in select(room for room in Room if room.creation_time >= cutoff):
|
||||||
for slot in room.seed.slots:
|
for slot in room.seed.slots:
|
||||||
if slot.game in known_games:
|
if slot.game in known_games:
|
||||||
total_games[slot.game] += 1
|
current_game = slot.game
|
||||||
games_played[room.creation_time.date()][slot.game] += 1
|
else:
|
||||||
|
current_game = "Other"
|
||||||
|
total_games[current_game] += 1
|
||||||
|
games_played[room.creation_time.date()][current_game] += 1
|
||||||
return total_games, games_played
|
return total_games, games_played
|
||||||
|
|
||||||
|
|
||||||
def get_color_palette(colors_needed: int) -> typing.List[RGB]:
|
def get_color_palette(colors_needed: int) -> list[RGB]:
|
||||||
colors = []
|
colors = []
|
||||||
# colors_needed +1 to prevent first and last color being too close to each other
|
# colors_needed +1 to prevent first and last color being too close to each other
|
||||||
colors_needed += 1
|
colors_needed += 1
|
||||||
@@ -47,8 +48,7 @@ def get_color_palette(colors_needed: int) -> typing.List[RGB]:
|
|||||||
return colors
|
return colors
|
||||||
|
|
||||||
|
|
||||||
def create_game_played_figure(all_games_data: typing.Dict[datetime.date, typing.Dict[str, int]],
|
def create_game_played_figure(all_games_data: dict[date, dict[str, int]], game: str, color: RGB) -> figure:
|
||||||
game: str, color: RGB) -> figure:
|
|
||||||
occurences = []
|
occurences = []
|
||||||
days = [day for day, game_data in all_games_data.items() if game_data[game]]
|
days = [day for day, game_data in all_games_data.items() if game_data[game]]
|
||||||
for day in days:
|
for day in days:
|
||||||
@@ -84,7 +84,7 @@ def stats():
|
|||||||
days = sorted(games_played)
|
days = sorted(games_played)
|
||||||
|
|
||||||
color_palette = get_color_palette(len(total_games))
|
color_palette = get_color_palette(len(total_games))
|
||||||
game_to_color: typing.Dict[str, RGB] = {game: color for game, color in zip(total_games, color_palette)}
|
game_to_color: dict[str, RGB] = {game: color for game, color in zip(total_games, color_palette)}
|
||||||
|
|
||||||
for game in sorted(total_games):
|
for game in sorted(total_games):
|
||||||
occurences = []
|
occurences = []
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
{% extends 'pageWrapper.html' %}
|
|
||||||
|
|
||||||
{% block head %}
|
|
||||||
<title>{{ game }} Info</title>
|
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}" />
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/showdown/1.9.1/showdown.min.js"
|
|
||||||
integrity="sha512-L03kznCrNOfVxOUovR6ESfCz9Gfny7gihUX/huVbQB9zjODtYpxaVtIaAkpetoiyV2eqWbvxMH9fiSv5enX7bw=="
|
|
||||||
crossorigin="anonymous"></script>
|
|
||||||
<script type="application/ecmascript" src="{{ url_for('static', filename="assets/gameInfo.js") }}"></script>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block body %}
|
|
||||||
{% include 'header/'+theme+'Header.html' %}
|
|
||||||
<div id="game-info" class="markdown" data-lang="{{ lang }}" data-game="{{ game | get_file_safe_name }}">
|
|
||||||
<!-- Populated my JS / MD -->
|
|
||||||
</div>
|
|
||||||
{% endblock %}
|
|
||||||
@@ -26,15 +26,15 @@
|
|||||||
<td>{{ patch.game }}</td>
|
<td>{{ patch.game }}</td>
|
||||||
<td>
|
<td>
|
||||||
{% if patch.data %}
|
{% if patch.data %}
|
||||||
{% if patch.game == "Minecraft" %}
|
{% if patch.game == "VVVVVV" and room.seed.slots|length == 1 %}
|
||||||
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
|
||||||
Download APMC File...</a>
|
|
||||||
{% elif patch.game == "VVVVVV" and room.seed.slots|length == 1 %}
|
|
||||||
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
||||||
Download APV6 File...</a>
|
Download APV6 File...</a>
|
||||||
{% elif patch.game == "Super Mario 64" and room.seed.slots|length == 1 %}
|
{% elif patch.game == "Super Mario 64" and room.seed.slots|length == 1 %}
|
||||||
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
||||||
Download APSM64EX File...</a>
|
Download APSM64EX File...</a>
|
||||||
|
{% elif patch.game == "Factorio" %}
|
||||||
|
<a href="{{ url_for("download_slot_file", room_id=room.id, player_id=patch.player_id) }}" download>
|
||||||
|
Download Factorio Mod...</a>
|
||||||
{% elif patch.game | is_applayercontainer(patch.data, patch.player_id) %}
|
{% elif patch.game | is_applayercontainer(patch.data, patch.player_id) %}
|
||||||
<a href="{{ url_for("download_patch", patch_id=patch.id, room_id=room.id) }}" download>
|
<a href="{{ url_for("download_patch", patch_id=patch.id, room_id=room.id) }}" download>
|
||||||
Download Patch File...</a>
|
Download Patch File...</a>
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
{% extends 'pageWrapper.html' %}
|
{% extends 'pageWrapper.html' %}
|
||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
{% include 'header/grassHeader.html' %}
|
{% set theme_name = theme|default("grass", true) %}
|
||||||
|
{% include "header/"+theme_name+"Header.html" %}
|
||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}" />
|
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}" />
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -11,32 +11,32 @@
|
|||||||
<h1>Site Map</h1>
|
<h1>Site Map</h1>
|
||||||
<h2>Base Pages</h2>
|
<h2>Base Pages</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="/discord">Discord Link</a></li>
|
<li><a href="{{ url_for('discord') }}">Discord Link</a></li>
|
||||||
<li><a href="/faq/en">F.A.Q. Page</a></li>
|
<li><a href="{{ url_for('faq', lang='en') }}">F.A.Q. Page</a></li>
|
||||||
<li><a href="/favicon.ico">Favicon</a></li>
|
<li><a href="{{ url_for('favicon') }}">Favicon</a></li>
|
||||||
<li><a href="/generate">Generate Game Page</a></li>
|
<li><a href="{{ url_for('generate') }}">Generate Game Page</a></li>
|
||||||
<li><a href="/">Homepage</a></li>
|
<li><a href="{{ url_for('landing') }}">Homepage</a></li>
|
||||||
<li><a href="/uploads">Host Game Page</a></li>
|
<li><a href="{{ url_for('uploads') }}">Host Game Page</a></li>
|
||||||
<li><a href="/datapackage">Raw Data Package</a></li>
|
<li><a href="{{ url_for('get_datapackage') }}">Raw Data Package</a></li>
|
||||||
<li><a href="{{ url_for('check')}}">Settings Validator</a></li>
|
<li><a href="{{ url_for('check') }}">Settings Validator</a></li>
|
||||||
<li><a href="/sitemap">Site Map</a></li>
|
<li><a href="{{ url_for('get_sitemap') }}">Site Map</a></li>
|
||||||
<li><a href="/start-playing">Start Playing</a></li>
|
<li><a href="{{ url_for('start_playing') }}">Start Playing</a></li>
|
||||||
<li><a href="/games">Supported Games Page</a></li>
|
<li><a href="{{ url_for('games') }}">Supported Games Page</a></li>
|
||||||
<li><a href="/tutorial">Tutorials Page</a></li>
|
<li><a href="{{ url_for('tutorial_landing') }}">Tutorials Page</a></li>
|
||||||
<li><a href="/user-content">User Content</a></li>
|
<li><a href="{{ url_for('user_content') }}">User Content</a></li>
|
||||||
<li><a href="{{url_for('stats')}}">Game Statistics</a></li>
|
<li><a href="{{ url_for('stats') }}">Game Statistics</a></li>
|
||||||
<li><a href="/glossary/en">Glossary</a></li>
|
<li><a href="{{ url_for('glossary', lang='en') }}">Glossary</a></li>
|
||||||
<li><a href="{{url_for("show_session")}}">Session / Login</a></li>
|
<li><a href="{{ url_for('show_session') }}">Session / Login</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h2>Tutorials</h2>
|
<h2>Tutorials</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="/tutorial/Archipelago/setup/en">Multiworld Setup Tutorial</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='setup_en') }}">Multiworld Setup Tutorial</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/mac/en">Setup Guide for Mac</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='mac_en') }}">Setup Guide for Mac</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/commands/en">Server and Client Commands</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='commands_en') }}">Server and Client Commands</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/advanced_settings/en">Advanced YAML Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='advanced_settings_en') }}">Advanced YAML Guide</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/triggers/en">Triggers Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='triggers_en') }}">Triggers Guide</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/plando/en">Plando Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='plando_en') }}">Plando Guide</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h2>Game Info Pages</h2>
|
<h2>Game Info Pages</h2>
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<title>{{ player_name }}'s Tracker</title>
|
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='styles/minecraftTracker.css') }}"/>
|
|
||||||
<script type="application/ecmascript" src="{{ url_for('static', filename='assets/minecraftTracker.js') }}"></script>
|
|
||||||
<link rel="stylesheet" media="screen" href="https://fontlibrary.org//face/minecraftia" type="text/css"/>
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
{# TODO: Replace this with a proper wrapper for each tracker when developing TrackerAPI. #}
|
|
||||||
<div style="margin-bottom: 0.5rem">
|
|
||||||
<a href="{{ url_for("get_generic_game_tracker", tracker=room.tracker, tracked_team=team, tracked_player=player) }}">Switch To Generic Tracker</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="player-tracker-wrapper" data-tracker="{{ room.tracker|suuid }}">
|
|
||||||
<table id="inventory-table">
|
|
||||||
<tr>
|
|
||||||
<td><img src="{{ tools_url }}" class="{{ 'acquired' }}" title="Progressive Tools" /></td>
|
|
||||||
<td><img src="{{ weapons_url }}" class="{{ 'acquired' }}" title="Progressive Weapons" /></td>
|
|
||||||
<td><img src="{{ armor_url }}" class="{{ 'acquired' }}" title="Progressive Armor" /></td>
|
|
||||||
<td><img src="{{ resource_crafting_url }}" class="{{ 'acquired' if 'Progressive Resource Crafting' in acquired_items }}"
|
|
||||||
title="Progressive Resource Crafting" /></td>
|
|
||||||
<td><img src="{{ icons['Brewing Stand'] }}" class="{{ 'acquired' if 'Brewing' in acquired_items }}" title="Brewing" /></td>
|
|
||||||
<td>
|
|
||||||
<div class="counted-item">
|
|
||||||
<img src="{{ icons['Ender Pearl'] }}" class="{{ 'acquired' if '3 Ender Pearls' in acquired_items }}" title="Ender Pearls" />
|
|
||||||
<div class="item-count">{{ pearls_count }}</div>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><img src="{{ icons['Bucket'] }}" class="{{ 'acquired' if 'Bucket' in acquired_items }}" title="Bucket" /></td>
|
|
||||||
<td><img src="{{ icons['Bow'] }}" class="{{ 'acquired' if 'Archery' in acquired_items }}" title="Archery" /></td>
|
|
||||||
<td><img src="{{ icons['Shield'] }}" class="{{ 'acquired' if 'Shield' in acquired_items }}" title="Shield" /></td>
|
|
||||||
<td><img src="{{ icons['Red Bed'] }}" class="{{ 'acquired' if 'Bed' in acquired_items }}" title="Bed" /></td>
|
|
||||||
<td><img src="{{ icons['Water Bottle'] }}" class="{{ 'acquired' if 'Bottles' in acquired_items }}" title="Bottles" /></td>
|
|
||||||
<td>
|
|
||||||
<div class="counted-item">
|
|
||||||
<img src="{{ icons['Netherite Scrap'] }}" class="{{ 'acquired' if '8 Netherite Scrap' in acquired_items }}" title="Netherite Scrap" />
|
|
||||||
<div class="item-count">{{ scrap_count }}</div>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><img src="{{ icons['Flint and Steel'] }}" class="{{ 'acquired' if 'Flint and Steel' in acquired_items }}" title="Flint and Steel" /></td>
|
|
||||||
<td><img src="{{ icons['Enchanting Table'] }}" class="{{ 'acquired' if 'Enchanting' in acquired_items }}" title="Enchanting" /></td>
|
|
||||||
<td><img src="{{ icons['Fishing Rod'] }}" class="{{ 'acquired' if 'Fishing Rod' in acquired_items }}" title="Fishing Rod" /></td>
|
|
||||||
<td><img src="{{ icons['Campfire'] }}" class="{{ 'acquired' if 'Campfire' in acquired_items }}" title="Campfire" /></td>
|
|
||||||
<td><img src="{{ icons['Spyglass'] }}" class="{{ 'acquired' if 'Spyglass' in acquired_items }}" title="Spyglass" /></td>
|
|
||||||
<td>
|
|
||||||
<div class="counted-item">
|
|
||||||
<img src="{{ icons['Dragon Egg Shard'] }}" class="{{ 'acquired' if 'Dragon Egg Shard' in acquired_items }}" title="Dragon Egg Shard" />
|
|
||||||
<div class="item-count">{{ shard_count }}</div>
|
|
||||||
</div>
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td><img src="{{ icons['Lead'] }}" class="{{ 'acquired' if 'Lead' in acquired_items }}" title="Lead" /></td>
|
|
||||||
<td><img src="{{ icons['Saddle'] }}" class="{{ 'acquired' if 'Saddle' in acquired_items }}" title="Saddle" /></td>
|
|
||||||
<td><img src="{{ icons['Channeling Book'] }}" class="{{ 'acquired' if 'Channeling Book' in acquired_items }}" title="Channeling Book" /></td>
|
|
||||||
<td><img src="{{ icons['Silk Touch Book'] }}" class="{{ 'acquired' if 'Silk Touch Book' in acquired_items }}" title="Silk Touch Book" /></td>
|
|
||||||
<td><img src="{{ icons['Piercing IV Book'] }}" class="{{ 'acquired' if 'Piercing IV Book' in acquired_items }}" title="Piercing IV Book" /></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
<table id="location-table">
|
|
||||||
{% for area in checks_done %}
|
|
||||||
<tr class="location-category" id="{{area}}-header">
|
|
||||||
<td>{{ area }} {{'▼' if area != 'Total'}}</td>
|
|
||||||
<td class="counter">{{ checks_done[area] }} / {{ checks_in_area[area] }}</td>
|
|
||||||
</tr>
|
|
||||||
<tbody class="locations hide" id="{{area}}">
|
|
||||||
{% for location in location_info[area] %}
|
|
||||||
<tr>
|
|
||||||
<td class="location-name">{{ location }}</td>
|
|
||||||
<td class="counter">{{ '✔' if location_info[area][location] else '' }}</td>
|
|
||||||
</tr>
|
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
|
||||||
{% endfor %}
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
{% extends 'pageWrapper.html' %}
|
|
||||||
|
|
||||||
{% block head %}
|
|
||||||
{% include 'header/'+theme+'Header.html' %}
|
|
||||||
<title>Archipelago</title>
|
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}" />
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/showdown/1.9.1/showdown.min.js"
|
|
||||||
integrity="sha512-L03kznCrNOfVxOUovR6ESfCz9Gfny7gihUX/huVbQB9zjODtYpxaVtIaAkpetoiyV2eqWbvxMH9fiSv5enX7bw=="
|
|
||||||
crossorigin="anonymous"></script>
|
|
||||||
<script type="application/ecmascript" src="{{ url_for('static', filename="assets/tutorial.js") }}"></script>
|
|
||||||
{% endblock %}
|
|
||||||
|
|
||||||
{% block body %}
|
|
||||||
<div id="tutorial-wrapper" class="markdown" data-game="{{ game | get_file_safe_name }}" data-file="{{ file | get_file_safe_name }}" data-lang="{{ lang }}">
|
|
||||||
<!-- Content generated by JavaScript -->
|
|
||||||
</div>
|
|
||||||
{% endblock %}
|
|
||||||
@@ -3,14 +3,32 @@
|
|||||||
{% block head %}
|
{% block head %}
|
||||||
{% include 'header/grassHeader.html' %}
|
{% include 'header/grassHeader.html' %}
|
||||||
<title>Archipelago Guides</title>
|
<title>Archipelago Guides</title>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}" />
|
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/markdown.css") }}"/>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/tutorialLanding.css") }}" />
|
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/tutorialLanding.css") }}"/>
|
||||||
<script type="application/ecmascript" src="{{ url_for('static', filename="assets/tutorialLanding.js") }}"></script>
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
<div id="tutorial-landing" class="markdown" data-game="{{ game }}" data-file="{{ file }}" data-lang="{{ lang }}">
|
<div id="tutorial-landing" class="markdown">
|
||||||
<h1 id="page-title">Archipelago Guides</h1>
|
<h1>Archipelago Guides</h1>
|
||||||
<p id="loading">Loading...</p>
|
{% for world_name, world_type in worlds.items() %}
|
||||||
|
<h2 id="{{ world_type.game | urlencode }}">{{ world_type.game }}</h2>
|
||||||
|
{% for tutorial_name, tutorial_data in tutorials[world_name].items() %}
|
||||||
|
<h3>{{ tutorial_name }}</h3>
|
||||||
|
<p>{{ tutorial_data.description }}</p>
|
||||||
|
<p>This guide is available in the following languages:</p>
|
||||||
|
<ul>
|
||||||
|
{% for file_name, file_data in tutorial_data.files.items() %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ url_for("tutorial", game=world_name, file=file_name) }}">{{ file_data.language }}</a>
|
||||||
|
by
|
||||||
|
{% for author in file_data.authors %}
|
||||||
|
{{ author }}
|
||||||
|
{% if not loop.last %}, {% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</li>
|
||||||
|
{% endfor %}
|
||||||
|
</ul>
|
||||||
|
{% endfor %}
|
||||||
|
{% endfor %}
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -706,127 +706,6 @@ if "A Link to the Past" in network_data_package["games"]:
|
|||||||
_multiworld_trackers["A Link to the Past"] = render_ALinkToThePast_multiworld_tracker
|
_multiworld_trackers["A Link to the Past"] = render_ALinkToThePast_multiworld_tracker
|
||||||
_player_trackers["A Link to the Past"] = render_ALinkToThePast_tracker
|
_player_trackers["A Link to the Past"] = render_ALinkToThePast_tracker
|
||||||
|
|
||||||
if "Minecraft" in network_data_package["games"]:
|
|
||||||
def render_Minecraft_tracker(tracker_data: TrackerData, team: int, player: int) -> str:
|
|
||||||
icons = {
|
|
||||||
"Wooden Pickaxe": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/d/d2/Wooden_Pickaxe_JE3_BE3.png",
|
|
||||||
"Stone Pickaxe": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/c/c4/Stone_Pickaxe_JE2_BE2.png",
|
|
||||||
"Iron Pickaxe": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/d/d1/Iron_Pickaxe_JE3_BE2.png",
|
|
||||||
"Diamond Pickaxe": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/e/e7/Diamond_Pickaxe_JE3_BE3.png",
|
|
||||||
"Wooden Sword": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/d/d5/Wooden_Sword_JE2_BE2.png",
|
|
||||||
"Stone Sword": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/b/b1/Stone_Sword_JE2_BE2.png",
|
|
||||||
"Iron Sword": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/8/8e/Iron_Sword_JE2_BE2.png",
|
|
||||||
"Diamond Sword": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/4/44/Diamond_Sword_JE3_BE3.png",
|
|
||||||
"Leather Tunic": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/b/b7/Leather_Tunic_JE4_BE2.png",
|
|
||||||
"Iron Chestplate": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/3/31/Iron_Chestplate_JE2_BE2.png",
|
|
||||||
"Diamond Chestplate": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/e/e0/Diamond_Chestplate_JE3_BE2.png",
|
|
||||||
"Iron Ingot": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/f/fc/Iron_Ingot_JE3_BE2.png",
|
|
||||||
"Block of Iron": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/7/7e/Block_of_Iron_JE4_BE3.png",
|
|
||||||
"Brewing Stand": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/b/b3/Brewing_Stand_%28empty%29_JE10.png",
|
|
||||||
"Ender Pearl": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/f/f6/Ender_Pearl_JE3_BE2.png",
|
|
||||||
"Bucket": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/f/fc/Bucket_JE2_BE2.png",
|
|
||||||
"Bow": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/a/ab/Bow_%28Pull_2%29_JE1_BE1.png",
|
|
||||||
"Shield": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/c/c6/Shield_JE2_BE1.png",
|
|
||||||
"Red Bed": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/6/6a/Red_Bed_%28N%29.png",
|
|
||||||
"Netherite Scrap": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/3/33/Netherite_Scrap_JE2_BE1.png",
|
|
||||||
"Flint and Steel": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/9/94/Flint_and_Steel_JE4_BE2.png",
|
|
||||||
"Enchanting Table": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/3/31/Enchanting_Table.gif",
|
|
||||||
"Fishing Rod": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/7/7f/Fishing_Rod_JE2_BE2.png",
|
|
||||||
"Campfire": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/9/91/Campfire_JE2_BE2.gif",
|
|
||||||
"Water Bottle": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/7/75/Water_Bottle_JE2_BE2.png",
|
|
||||||
"Spyglass": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/c/c1/Spyglass_JE2_BE1.png",
|
|
||||||
"Dragon Egg Shard": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/3/38/Dragon_Egg_JE4.png",
|
|
||||||
"Lead": "https://static.wikia.nocookie.net/minecraft_gamepedia/images/1/1f/Lead_JE2_BE2.png",
|
|
||||||
"Saddle": "https://i.imgur.com/2QtDyR0.png",
|
|
||||||
"Channeling Book": "https://i.imgur.com/J3WsYZw.png",
|
|
||||||
"Silk Touch Book": "https://i.imgur.com/iqERxHQ.png",
|
|
||||||
"Piercing IV Book": "https://i.imgur.com/OzJptGz.png",
|
|
||||||
}
|
|
||||||
|
|
||||||
minecraft_location_ids = {
|
|
||||||
"Story": [42073, 42023, 42027, 42039, 42002, 42009, 42010, 42070,
|
|
||||||
42041, 42049, 42004, 42031, 42025, 42029, 42051, 42077],
|
|
||||||
"Nether": [42017, 42044, 42069, 42058, 42034, 42060, 42066, 42076, 42064, 42071, 42021,
|
|
||||||
42062, 42008, 42061, 42033, 42011, 42006, 42019, 42000, 42040, 42001, 42015, 42104, 42014],
|
|
||||||
"The End": [42052, 42005, 42012, 42032, 42030, 42042, 42018, 42038, 42046],
|
|
||||||
"Adventure": [42047, 42050, 42096, 42097, 42098, 42059, 42055, 42072, 42003, 42109, 42035, 42016, 42020,
|
|
||||||
42048, 42054, 42068, 42043, 42106, 42074, 42075, 42024, 42026, 42037, 42045, 42056, 42105,
|
|
||||||
42099, 42103, 42110, 42100],
|
|
||||||
"Husbandry": [42065, 42067, 42078, 42022, 42113, 42107, 42007, 42079, 42013, 42028, 42036, 42108, 42111,
|
|
||||||
42112,
|
|
||||||
42057, 42063, 42053, 42102, 42101, 42092, 42093, 42094, 42095],
|
|
||||||
"Archipelago": [42080, 42081, 42082, 42083, 42084, 42085, 42086, 42087, 42088, 42089, 42090, 42091],
|
|
||||||
}
|
|
||||||
|
|
||||||
display_data = {}
|
|
||||||
|
|
||||||
# Determine display for progressive items
|
|
||||||
progressive_items = {
|
|
||||||
"Progressive Tools": 45013,
|
|
||||||
"Progressive Weapons": 45012,
|
|
||||||
"Progressive Armor": 45014,
|
|
||||||
"Progressive Resource Crafting": 45001
|
|
||||||
}
|
|
||||||
progressive_names = {
|
|
||||||
"Progressive Tools": ["Wooden Pickaxe", "Stone Pickaxe", "Iron Pickaxe", "Diamond Pickaxe"],
|
|
||||||
"Progressive Weapons": ["Wooden Sword", "Stone Sword", "Iron Sword", "Diamond Sword"],
|
|
||||||
"Progressive Armor": ["Leather Tunic", "Iron Chestplate", "Diamond Chestplate"],
|
|
||||||
"Progressive Resource Crafting": ["Iron Ingot", "Iron Ingot", "Block of Iron"]
|
|
||||||
}
|
|
||||||
|
|
||||||
inventory = tracker_data.get_player_inventory_counts(team, player)
|
|
||||||
for item_name, item_id in progressive_items.items():
|
|
||||||
level = min(inventory[item_id], len(progressive_names[item_name]) - 1)
|
|
||||||
display_name = progressive_names[item_name][level]
|
|
||||||
base_name = item_name.split(maxsplit=1)[1].lower().replace(" ", "_")
|
|
||||||
display_data[base_name + "_url"] = icons[display_name]
|
|
||||||
|
|
||||||
# Multi-items
|
|
||||||
multi_items = {
|
|
||||||
"3 Ender Pearls": 45029,
|
|
||||||
"8 Netherite Scrap": 45015,
|
|
||||||
"Dragon Egg Shard": 45043
|
|
||||||
}
|
|
||||||
for item_name, item_id in multi_items.items():
|
|
||||||
base_name = item_name.split()[-1].lower()
|
|
||||||
count = inventory[item_id]
|
|
||||||
if count >= 0:
|
|
||||||
display_data[base_name + "_count"] = count
|
|
||||||
|
|
||||||
# Victory condition
|
|
||||||
game_state = tracker_data.get_player_client_status(team, player)
|
|
||||||
display_data["game_finished"] = game_state == 30
|
|
||||||
|
|
||||||
# Turn location IDs into advancement tab counts
|
|
||||||
checked_locations = tracker_data.get_player_checked_locations(team, player)
|
|
||||||
lookup_name = lambda id: tracker_data.location_id_to_name["Minecraft"][id]
|
|
||||||
location_info = {tab_name: {lookup_name(id): (id in checked_locations) for id in tab_locations}
|
|
||||||
for tab_name, tab_locations in minecraft_location_ids.items()}
|
|
||||||
checks_done = {tab_name: len([id for id in tab_locations if id in checked_locations])
|
|
||||||
for tab_name, tab_locations in minecraft_location_ids.items()}
|
|
||||||
checks_done["Total"] = len(checked_locations)
|
|
||||||
checks_in_area = {tab_name: len(tab_locations) for tab_name, tab_locations in minecraft_location_ids.items()}
|
|
||||||
checks_in_area["Total"] = sum(checks_in_area.values())
|
|
||||||
|
|
||||||
lookup_any_item_id_to_name = tracker_data.item_id_to_name["Minecraft"]
|
|
||||||
return render_template(
|
|
||||||
"tracker__Minecraft.html",
|
|
||||||
inventory=inventory,
|
|
||||||
icons=icons,
|
|
||||||
acquired_items={lookup_any_item_id_to_name[id] for id, count in inventory.items() if count > 0},
|
|
||||||
player=player,
|
|
||||||
team=team,
|
|
||||||
room=tracker_data.room,
|
|
||||||
player_name=tracker_data.get_player_name(team, player),
|
|
||||||
saving_second=tracker_data.get_room_saving_second(),
|
|
||||||
checks_done=checks_done,
|
|
||||||
checks_in_area=checks_in_area,
|
|
||||||
location_info=location_info,
|
|
||||||
**display_data,
|
|
||||||
)
|
|
||||||
|
|
||||||
_player_trackers["Minecraft"] = render_Minecraft_tracker
|
|
||||||
|
|
||||||
if "Ocarina of Time" in network_data_package["games"]:
|
if "Ocarina of Time" in network_data_package["games"]:
|
||||||
def render_OcarinaOfTime_tracker(tracker_data: TrackerData, team: int, player: int) -> str:
|
def render_OcarinaOfTime_tracker(tracker_data: TrackerData, team: int, player: int) -> str:
|
||||||
icons = {
|
icons = {
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import base64
|
|
||||||
import json
|
import json
|
||||||
import pickle
|
import pickle
|
||||||
import typing
|
import typing
|
||||||
@@ -14,9 +13,8 @@ from pony.orm.core import TransactionIntegrityError
|
|||||||
import schema
|
import schema
|
||||||
|
|
||||||
import MultiServer
|
import MultiServer
|
||||||
from NetUtils import SlotType
|
from NetUtils import GamesPackage, SlotType
|
||||||
from Utils import VersionException, __version__
|
from Utils import VersionException, __version__
|
||||||
from worlds import GamesPackage
|
|
||||||
from worlds.Files import AutoPatchRegister
|
from worlds.Files import AutoPatchRegister
|
||||||
from worlds.AutoWorld import data_package_checksum
|
from worlds.AutoWorld import data_package_checksum
|
||||||
from . import app
|
from . import app
|
||||||
@@ -135,11 +133,6 @@ def upload_zip_to_db(zfile: zipfile.ZipFile, owner=None, meta={"race": False}, s
|
|||||||
flash("Could not load multidata. File may be corrupted or incompatible.")
|
flash("Could not load multidata. File may be corrupted or incompatible.")
|
||||||
multidata = None
|
multidata = None
|
||||||
|
|
||||||
# Minecraft
|
|
||||||
elif file.filename.endswith(".apmc"):
|
|
||||||
data = zfile.open(file, "r").read()
|
|
||||||
metadata = json.loads(base64.b64decode(data).decode("utf-8"))
|
|
||||||
files[metadata["player_id"]] = data
|
|
||||||
|
|
||||||
# Factorio
|
# Factorio
|
||||||
elif file.filename.endswith(".zip"):
|
elif file.filename.endswith(".zip"):
|
||||||
|
|||||||
@@ -333,6 +333,7 @@ async def nes_sync_task(ctx: ZeldaContext):
|
|||||||
except ConnectionRefusedError:
|
except ConnectionRefusedError:
|
||||||
logger.debug("Connection Refused, Trying Again")
|
logger.debug("Connection Refused, Trying Again")
|
||||||
ctx.nes_status = CONNECTION_REFUSED_STATUS
|
ctx.nes_status = CONNECTION_REFUSED_STATUS
|
||||||
|
await asyncio.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -24,9 +24,20 @@
|
|||||||
<BaseButton>:
|
<BaseButton>:
|
||||||
ripple_color: app.theme_cls.primaryColor
|
ripple_color: app.theme_cls.primaryColor
|
||||||
ripple_duration_in_fast: 0.2
|
ripple_duration_in_fast: 0.2
|
||||||
<MDTabsItemBase>:
|
<MDNavigationItemBase>:
|
||||||
ripple_color: app.theme_cls.primaryColor
|
on_release: app.screens.switch_screens(self)
|
||||||
ripple_duration_in_fast: 0.2
|
|
||||||
|
MDNavigationItemLabel:
|
||||||
|
text: root.text
|
||||||
|
theme_text_color: "Custom"
|
||||||
|
text_color_active: self.theme_cls.primaryColor
|
||||||
|
text_color_normal: 1, 1, 1, 1
|
||||||
|
# indicator is on icon only for some reason
|
||||||
|
canvas.before:
|
||||||
|
Color:
|
||||||
|
rgba: self.theme_cls.secondaryContainerColor if root.active else self.theme_cls.transparentColor
|
||||||
|
Rectangle:
|
||||||
|
size: root.size
|
||||||
<TooltipLabel>:
|
<TooltipLabel>:
|
||||||
adaptive_height: True
|
adaptive_height: True
|
||||||
theme_font_size: "Custom"
|
theme_font_size: "Custom"
|
||||||
|
|||||||
@@ -477,7 +477,7 @@ function main()
|
|||||||
elseif (curstate == STATE_UNINITIALIZED) then
|
elseif (curstate == STATE_UNINITIALIZED) then
|
||||||
-- If we're uninitialized, attempt to make the connection.
|
-- If we're uninitialized, attempt to make the connection.
|
||||||
if (frame % 120 == 0) then
|
if (frame % 120 == 0) then
|
||||||
server:settimeout(2)
|
server:settimeout(120)
|
||||||
local client, timeout = server:accept()
|
local client, timeout = server:accept()
|
||||||
if timeout == nil then
|
if timeout == nil then
|
||||||
print('Initial Connection Made')
|
print('Initial Connection Made')
|
||||||
|
|||||||
BIN
data/mcicon.ico
BIN
data/mcicon.ico
Binary file not shown.
|
Before Width: | Height: | Size: 2.6 KiB |
@@ -46,7 +46,9 @@ requires:
|
|||||||
|
|
||||||
{{ yaml_dump(game) }}:
|
{{ yaml_dump(game) }}:
|
||||||
{%- for group_name, group_options in option_groups.items() %}
|
{%- for group_name, group_options in option_groups.items() %}
|
||||||
# {{ group_name }}
|
##{% for _ in group_name %}#{% endfor %}##
|
||||||
|
# {{ group_name }} #
|
||||||
|
##{% for _ in group_name %}#{% endfor %}##
|
||||||
|
|
||||||
{%- for option_key, option in group_options.items() %}
|
{%- for option_key, option in group_options.items() %}
|
||||||
{{ option_key }}:
|
{{ option_key }}:
|
||||||
|
|||||||
61
deploy/docker-compose.yml
Normal file
61
deploy/docker-compose.yml
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
services:
|
||||||
|
multiworld:
|
||||||
|
# Build only once. Web service uses the same image build
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
# Name image for use in web service
|
||||||
|
image: archipelago-base
|
||||||
|
# Use locally-built image
|
||||||
|
pull_policy: never
|
||||||
|
# Launch main process without website hosting (config override)
|
||||||
|
entrypoint: python WebHost.py --config_override selflaunch.yaml
|
||||||
|
volumes:
|
||||||
|
# Mount application volume
|
||||||
|
- app_volume:/app
|
||||||
|
|
||||||
|
# Mount configs
|
||||||
|
- ./example_config.yaml:/app/config.yaml
|
||||||
|
- ./example_selflaunch.yaml:/app/selflaunch.yaml
|
||||||
|
|
||||||
|
# Expose on host network for access to dynamically mapped ports
|
||||||
|
network_mode: host
|
||||||
|
|
||||||
|
# No Healthcheck in place yet for multiworld
|
||||||
|
healthcheck:
|
||||||
|
test: ["NONE"]
|
||||||
|
web:
|
||||||
|
# Use image build by multiworld service
|
||||||
|
image: archipelago-base
|
||||||
|
# Use locally-built image
|
||||||
|
pull_policy: never
|
||||||
|
# Launch gunicorn targeting WebHost application
|
||||||
|
entrypoint: gunicorn -c gunicorn.conf.py
|
||||||
|
volumes:
|
||||||
|
# Mount application volume
|
||||||
|
- app_volume:/app
|
||||||
|
|
||||||
|
# Mount configs
|
||||||
|
- ./example_config.yaml:/app/config.yaml
|
||||||
|
- ./example_gunicorn.conf.py:/app/gunicorn.conf.py
|
||||||
|
environment:
|
||||||
|
# Bind gunicorn on 8000
|
||||||
|
- PORT=8000
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx:stable-alpine
|
||||||
|
volumes:
|
||||||
|
# Mount application volume
|
||||||
|
- app_volume:/app
|
||||||
|
|
||||||
|
# Mount config
|
||||||
|
- ./example_nginx.conf:/etc/nginx/nginx.conf
|
||||||
|
ports:
|
||||||
|
# Nginx listening internally on port 80 -- mapped to 8080 on host
|
||||||
|
- 8080:80
|
||||||
|
depends_on:
|
||||||
|
- web
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
# Share application directory amongst multiworld and web services
|
||||||
|
# (for access to log files and the like), and nginx (for static files)
|
||||||
|
app_volume:
|
||||||
10
deploy/example_config.yaml
Normal file
10
deploy/example_config.yaml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
# Refer to ../docs/webhost configuration sample.yaml
|
||||||
|
|
||||||
|
# We'll be hosting VIA gunicorn
|
||||||
|
SELFHOST: false
|
||||||
|
# We'll start a separate process for rooms and generators
|
||||||
|
SELFLAUNCH: false
|
||||||
|
|
||||||
|
# Host Address. This is the address encoded into the patch that will be used for client auto-connect.
|
||||||
|
# Set as your local IP (192.168.x.x) to serve over LAN.
|
||||||
|
HOST_ADDRESS: localhost
|
||||||
19
deploy/example_gunicorn.conf.py
Normal file
19
deploy/example_gunicorn.conf.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
workers = 2
|
||||||
|
threads = 2
|
||||||
|
wsgi_app = "WebHost:get_app()"
|
||||||
|
accesslog = "-"
|
||||||
|
access_log_format = (
|
||||||
|
'%({x-forwarded-for}i)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
|
||||||
|
)
|
||||||
|
worker_class = "gthread" # "sync" | "gthread"
|
||||||
|
forwarded_allow_ips = "*"
|
||||||
|
loglevel = "info"
|
||||||
|
|
||||||
|
"""
|
||||||
|
You can programatically set values.
|
||||||
|
For example, set number of workers to half of the cpu count:
|
||||||
|
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
workers = multiprocessing.cpu_count() / 2
|
||||||
|
"""
|
||||||
64
deploy/example_nginx.conf
Normal file
64
deploy/example_nginx.conf
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
worker_processes 1;
|
||||||
|
|
||||||
|
user nobody nogroup;
|
||||||
|
# 'user nobody nobody;' for systems with 'nobody' as a group instead
|
||||||
|
error_log /var/log/nginx/error.log warn;
|
||||||
|
pid /var/run/nginx.pid;
|
||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 1024; # increase if you have lots of clients
|
||||||
|
accept_mutex off; # set to 'on' if nginx worker_processes > 1
|
||||||
|
# 'use epoll;' to enable for Linux 2.6+
|
||||||
|
# 'use kqueue;' to enable for FreeBSD, OSX
|
||||||
|
use epoll;
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
include mime.types;
|
||||||
|
# fallback in case we can't determine a type
|
||||||
|
default_type application/octet-stream;
|
||||||
|
access_log /var/log/nginx/access.log combined;
|
||||||
|
sendfile on;
|
||||||
|
|
||||||
|
upstream app_server {
|
||||||
|
# fail_timeout=0 means we always retry an upstream even if it failed
|
||||||
|
# to return a good HTTP response
|
||||||
|
|
||||||
|
# for UNIX domain socket setups
|
||||||
|
# server unix:/tmp/gunicorn.sock fail_timeout=0;
|
||||||
|
|
||||||
|
# for a TCP configuration
|
||||||
|
server web:8000 fail_timeout=0;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
# use 'listen 80 deferred;' for Linux
|
||||||
|
# use 'listen 80 accept_filter=httpready;' for FreeBSD
|
||||||
|
listen 80 deferred;
|
||||||
|
client_max_body_size 4G;
|
||||||
|
|
||||||
|
# set the correct host(s) for your site
|
||||||
|
# server_name example.com www.example.com;
|
||||||
|
|
||||||
|
keepalive_timeout 5;
|
||||||
|
|
||||||
|
# path for static files
|
||||||
|
root /app/WebHostLib;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
# checks for static file, if not found proxy to app
|
||||||
|
try_files $uri @proxy_to_app;
|
||||||
|
}
|
||||||
|
|
||||||
|
location @proxy_to_app {
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
# we don't want nginx trying to do something clever with
|
||||||
|
# redirects, we set the Host: header above already.
|
||||||
|
proxy_redirect off;
|
||||||
|
|
||||||
|
proxy_pass http://app_server;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
13
deploy/example_selflaunch.yaml
Normal file
13
deploy/example_selflaunch.yaml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Refer to ../docs/webhost configuration sample.yaml
|
||||||
|
|
||||||
|
# We'll be hosting VIA gunicorn
|
||||||
|
SELFHOST: false
|
||||||
|
# Start room and generator processes
|
||||||
|
SELFLAUNCH: true
|
||||||
|
JOB_THRESHOLD: 0
|
||||||
|
|
||||||
|
# Maximum concurrent world gens
|
||||||
|
GENERATORS: 3
|
||||||
|
|
||||||
|
# Rooms will be spread across multiple processes
|
||||||
|
HOSTERS: 4
|
||||||
@@ -48,9 +48,6 @@
|
|||||||
# Civilization VI
|
# Civilization VI
|
||||||
/worlds/civ6/ @hesto2
|
/worlds/civ6/ @hesto2
|
||||||
|
|
||||||
# Clique
|
|
||||||
/worlds/clique/ @ThePhar
|
|
||||||
|
|
||||||
# Dark Souls III
|
# Dark Souls III
|
||||||
/worlds/dark_souls_3/ @Marechal-L @nex3
|
/worlds/dark_souls_3/ @Marechal-L @nex3
|
||||||
|
|
||||||
@@ -121,9 +118,6 @@
|
|||||||
# The Messenger
|
# The Messenger
|
||||||
/worlds/messenger/ @alwaysintreble
|
/worlds/messenger/ @alwaysintreble
|
||||||
|
|
||||||
# Minecraft
|
|
||||||
/worlds/minecraft/ @KonoTyran @espeon65536
|
|
||||||
|
|
||||||
# Mega Man 2
|
# Mega Man 2
|
||||||
/worlds/mm2/ @Silvris
|
/worlds/mm2/ @Silvris
|
||||||
|
|
||||||
@@ -142,6 +136,9 @@
|
|||||||
# Overcooked! 2
|
# Overcooked! 2
|
||||||
/worlds/overcooked2/ @toasterparty
|
/worlds/overcooked2/ @toasterparty
|
||||||
|
|
||||||
|
# Paint
|
||||||
|
/worlds/paint/ @MarioManTAW
|
||||||
|
|
||||||
# Pokemon Emerald
|
# Pokemon Emerald
|
||||||
/worlds/pokemon_emerald/ @Zunawe
|
/worlds/pokemon_emerald/ @Zunawe
|
||||||
|
|
||||||
@@ -151,9 +148,6 @@
|
|||||||
# Raft
|
# Raft
|
||||||
/worlds/raft/ @SunnyBat
|
/worlds/raft/ @SunnyBat
|
||||||
|
|
||||||
# Rogue Legacy
|
|
||||||
/worlds/rogue_legacy/ @ThePhar
|
|
||||||
|
|
||||||
# Risk of Rain 2
|
# Risk of Rain 2
|
||||||
/worlds/ror2/ @kindasneaki
|
/worlds/ror2/ @kindasneaki
|
||||||
|
|
||||||
@@ -206,7 +200,7 @@
|
|||||||
/worlds/timespinner/ @Jarno458
|
/worlds/timespinner/ @Jarno458
|
||||||
|
|
||||||
# The Legend of Zelda (1)
|
# The Legend of Zelda (1)
|
||||||
/worlds/tloz/ @Rosalie-A @t3hf1gm3nt
|
/worlds/tloz/ @Rosalie-A
|
||||||
|
|
||||||
# TUNIC
|
# TUNIC
|
||||||
/worlds/tunic/ @silent-destroyer @ScipioWright
|
/worlds/tunic/ @silent-destroyer @ScipioWright
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ game contributions:
|
|||||||
* **Do not introduce unit test failures/regressions.**
|
* **Do not introduce unit test failures/regressions.**
|
||||||
Archipelago supports multiple versions of Python. You may need to download older Python versions to fully test
|
Archipelago supports multiple versions of Python. You may need to download older Python versions to fully test
|
||||||
your changes. Currently, the oldest supported version
|
your changes. Currently, the oldest supported version
|
||||||
is [Python 3.10](https://www.python.org/downloads/release/python-31015/).
|
is [Python 3.11](https://www.python.org/downloads/release/python-31113/).
|
||||||
It is recommended that automated github actions are turned on in your fork to have github run unit tests after
|
It is recommended that automated github actions are turned on in your fork to have github run unit tests after
|
||||||
pushing.
|
pushing.
|
||||||
You can turn them on here:
|
You can turn them on here:
|
||||||
|
|||||||
92
docs/deploy using containers.md
Normal file
92
docs/deploy using containers.md
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
# Deploy Using Containers
|
||||||
|
|
||||||
|
If you just want to play and there is a compiled version available on the [Archipelago releases page](https://github.com/ArchipelagoMW/Archipelago/releases), use that version.
|
||||||
|
To build the full Archipelago software stack, refer to [Running From Source](running%20from%20source.md).
|
||||||
|
Follow these steps to build and deploy a containerized instance of the web host software, optionally integrating [Gunicorn](https://gunicorn.org/) WSGI HTTP Server running behind the [nginx](https://nginx.org/) reverse proxy.
|
||||||
|
|
||||||
|
|
||||||
|
## Building the Container Image
|
||||||
|
|
||||||
|
What you'll need:
|
||||||
|
* A container runtime engine such as:
|
||||||
|
* [Docker](https://www.docker.com/) (Version 23.0 or later)
|
||||||
|
* [Podman](https://podman.io/) (version 4.0 or later)
|
||||||
|
* For running with rootless podman, you need to ensure all ports used are usable rootless, by default ports less than 1024 are root only. See [the official tutorial](https://github.com/containers/podman/blob/main/docs/tutorials/rootless_tutorial.md) for details.
|
||||||
|
* The Docker Buildx plugin (for Docker), as the Dockerfile uses `$TARGETARCH` for architecture detection. Follow [Docker's guide](https://docs.docker.com/build/buildx/install/). Verify with `docker buildx version`.
|
||||||
|
|
||||||
|
Starting from the root repository directory, the standalone Archipelago image can be built and run with the command:
|
||||||
|
`docker build -t archipelago .`
|
||||||
|
Or:
|
||||||
|
`podman build -t archipelago .`
|
||||||
|
|
||||||
|
It is recommended to tag the image using `-t` to more easily identify the image and run it.
|
||||||
|
|
||||||
|
|
||||||
|
## Running the Container
|
||||||
|
|
||||||
|
Running the container can be performed using:
|
||||||
|
`docker run --network host archipelago`
|
||||||
|
Or:
|
||||||
|
`podman run --network host archipelago`
|
||||||
|
|
||||||
|
The Archipelago web host requires access to multiple ports in order to host game servers simultaneously. To simplify configuration for this purpose, specify `--network host`.
|
||||||
|
|
||||||
|
Given the default configuration, the website will be accessible at the hostname/IP address (localhost if run locally) of the machine being deployed to, at port 80. It can be configured by creating a YAML file and mapping a volume to the container when running initially:
|
||||||
|
`docker run archipelago --network host -v /path/to/config.yaml:/app/config.yaml`
|
||||||
|
See `docs/webhost configuration sample.yaml` for example.
|
||||||
|
|
||||||
|
|
||||||
|
## Using Docker Compose
|
||||||
|
|
||||||
|
An example [docker compose](../deploy/docker-compose.yml) file can be found in [deploy](../deploy), along with example configuration files used by the services it orchestrates. Using these files as-is will spin up two separate archipelago containers with special modifications to their runtime arguments, in addition to deploying an `nginx` reverse proxy container.
|
||||||
|
|
||||||
|
To deploy in this manner, from the ["deploy"](../deploy) directory, run:
|
||||||
|
`docker compose up -d`
|
||||||
|
|
||||||
|
### Services
|
||||||
|
|
||||||
|
The `docker-compose.yaml` file defines three services:
|
||||||
|
* multiworld:
|
||||||
|
* Executes the main `WebHost` process, using the [example config](../deploy/example_config.yaml), and overriding with a secondary [selflaunch example config](../deploy/example_selflaunch.yaml). This is because we do not want to launch the website through this service.
|
||||||
|
* web:
|
||||||
|
* Executes `gunicorn` using its [example config](../deploy/example_gunicorn.conf.py), which will bind it to the `WebHost` application, in effect launching it.
|
||||||
|
* We mount the main [config](../deploy/example_config.yaml) without an override to specify that we are launching the website through this service.
|
||||||
|
* No ports are exposed through to the host.
|
||||||
|
* nginx:
|
||||||
|
* Serves as a reverse proxy with `web` as its upstream.
|
||||||
|
* Directs all HTTP traffic from port 80 to the upstream service.
|
||||||
|
* Exposed to the host on port 8080. This is where we can reach the website.
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
|
||||||
|
As these are examples, they can be copied and modified. For instance setting the value of `HOST_ADDRESS` in [example config](../deploy/example_config.yaml) to host machines local IP address, will expose the service to its local area network.
|
||||||
|
|
||||||
|
The configuration files may be modified to handle for machine-specific optimizations, such as:
|
||||||
|
* Web pages responding too slowly
|
||||||
|
* Edit [the gunicorn config](../deploy/example_gunicorn.conf.py) to increase thread and/or worker count.
|
||||||
|
* Game generation stalls
|
||||||
|
* Increase the generator count in [selflaunch config](../deploy/example_selflaunch.yaml)
|
||||||
|
* Gameplay lags
|
||||||
|
* Increase the hoster count in [selflaunch config](../deploy/example_selflaunch.yaml)
|
||||||
|
|
||||||
|
Changes made to `docker-compose.yaml` can be applied by running `docker compose up -d`, while those made to other files are applied by running `docker compose restart`.
|
||||||
|
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
It is possible to carry out these deployment steps on Windows under [Windows Subsystem for Linux](https://learn.microsoft.com/en-us/windows/wsl/install).
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: A Link to the Past Enemizer
|
||||||
|
|
||||||
|
Only required to generate seeds that include A Link to the Past with certain options enabled. You will receive an
|
||||||
|
error if it is required.
|
||||||
|
Enemizer can be enabled on `x86_64` platform architecture, and is included in the image build process. Enemizer requires a version 1.0 Japanese "Zelda no Densetsu" `.sfc` rom file to be placed in the application directory:
|
||||||
|
`docker run archipelago -v "/path/to/zelda.sfc:/app/Zelda no Densetsu - Kamigami no Triforce (Japan).sfc"`.
|
||||||
|
Enemizer is not currently available for `aarch64`.
|
||||||
|
|
||||||
|
|
||||||
|
## Optional: Git
|
||||||
|
|
||||||
|
Building the image requires a local copy of the ArchipelagoMW source code.
|
||||||
|
Refer to [Running From Source](running%20from%20source.md#optional-git).
|
||||||
@@ -117,12 +117,6 @@ flowchart LR
|
|||||||
%% Java Based Games
|
%% Java Based Games
|
||||||
subgraph Java
|
subgraph Java
|
||||||
JM[Mod with Archipelago.MultiClient.Java]
|
JM[Mod with Archipelago.MultiClient.Java]
|
||||||
subgraph Minecraft
|
|
||||||
MCS[Minecraft Forge Server]
|
|
||||||
JMC[Any Java Minecraft Clients]
|
|
||||||
MCS <-- TCP --> JMC
|
|
||||||
end
|
|
||||||
JM <-- Forge Mod Loader --> MCS
|
|
||||||
end
|
end
|
||||||
AS <-- WebSockets --> JM
|
AS <-- WebSockets --> JM
|
||||||
|
|
||||||
@@ -131,10 +125,8 @@ flowchart LR
|
|||||||
NM[Mod with Archipelago.MultiClient.Net]
|
NM[Mod with Archipelago.MultiClient.Net]
|
||||||
subgraph FNA/XNA
|
subgraph FNA/XNA
|
||||||
TS[Timespinner]
|
TS[Timespinner]
|
||||||
RL[Rogue Legacy]
|
|
||||||
end
|
end
|
||||||
NM <-- TsRandomizer --> TS
|
NM <-- TsRandomizer --> TS
|
||||||
NM <-- RogueLegacyRandomizer --> RL
|
|
||||||
subgraph Unity
|
subgraph Unity
|
||||||
ROR[Risk of Rain 2]
|
ROR[Risk of Rain 2]
|
||||||
SN[Subnautica]
|
SN[Subnautica]
|
||||||
@@ -183,4 +175,4 @@ flowchart LR
|
|||||||
FMOD <--> FMAPI
|
FMOD <--> FMAPI
|
||||||
end
|
end
|
||||||
CC <-- Integrated --> FC
|
CC <-- Integrated --> FC
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -276,6 +276,7 @@ These packets are sent purely from client to server. They are not accepted by cl
|
|||||||
* [Sync](#Sync)
|
* [Sync](#Sync)
|
||||||
* [LocationChecks](#LocationChecks)
|
* [LocationChecks](#LocationChecks)
|
||||||
* [LocationScouts](#LocationScouts)
|
* [LocationScouts](#LocationScouts)
|
||||||
|
* [CreateHints](#CreateHints)
|
||||||
* [UpdateHint](#UpdateHint)
|
* [UpdateHint](#UpdateHint)
|
||||||
* [StatusUpdate](#StatusUpdate)
|
* [StatusUpdate](#StatusUpdate)
|
||||||
* [Say](#Say)
|
* [Say](#Say)
|
||||||
@@ -294,7 +295,7 @@ Sent by the client to initiate a connection to an Archipelago game session.
|
|||||||
| password | str | If the game session requires a password, it should be passed here. |
|
| password | str | If the game session requires a password, it should be passed here. |
|
||||||
| game | str | The name of the game the client is playing. Example: `A Link to the Past` |
|
| game | str | The name of the game the client is playing. Example: `A Link to the Past` |
|
||||||
| name | str | The player name for this client. |
|
| name | str | The player name for this client. |
|
||||||
| uuid | str | Unique identifier for player client. |
|
| uuid | str | Unique identifier for player. Cached in the user cache \Archipelago\Cache\common.json |
|
||||||
| version | [NetworkVersion](#NetworkVersion) | An object representing the Archipelago version this client supports. |
|
| version | [NetworkVersion](#NetworkVersion) | An object representing the Archipelago version this client supports. |
|
||||||
| items_handling | int | Flags configuring which items should be sent by the server. Read below for individual flags. |
|
| items_handling | int | Flags configuring which items should be sent by the server. Read below for individual flags. |
|
||||||
| tags | list\[str\] | Denotes special features or capabilities that the sender is capable of. [Tags](#Tags) |
|
| tags | list\[str\] | Denotes special features or capabilities that the sender is capable of. [Tags](#Tags) |
|
||||||
@@ -339,7 +340,8 @@ Sent to the server to retrieve the items that are on a specified list of locatio
|
|||||||
Fully remote clients without a patch file may use this to "place" items onto their in-game locations, most commonly to display their names or item classifications before/upon pickup.
|
Fully remote clients without a patch file may use this to "place" items onto their in-game locations, most commonly to display their names or item classifications before/upon pickup.
|
||||||
|
|
||||||
LocationScouts can also be used to inform the server of locations the client has seen, but not checked. This creates a hint as if the player had run `!hint_location` on a location, but without deducting hint points.
|
LocationScouts can also be used to inform the server of locations the client has seen, but not checked. This creates a hint as if the player had run `!hint_location` on a location, but without deducting hint points.
|
||||||
This is useful in cases where an item appears in the game world, such as 'ledge items' in _A Link to the Past_. To do this, set the `create_as_hint` parameter to a non-zero value.
|
This is useful in cases where an item appears in the game world, such as 'ledge items' in _A Link to the Past_. To do this, set the `create_as_hint` parameter to a non-zero value.
|
||||||
|
Note that LocationScouts with a non-zero `create_as_hint` value will _always_ create a **persistent** hint (listed in the Hints tab of concerning players' TextClients), even if the location was already found. If this is not desired behavior, you need to prevent sending LocationScouts with `create_as_hint` for already found locations in your client-side code.
|
||||||
|
|
||||||
#### Arguments
|
#### Arguments
|
||||||
| Name | Type | Notes |
|
| Name | Type | Notes |
|
||||||
@@ -347,6 +349,21 @@ This is useful in cases where an item appears in the game world, such as 'ledge
|
|||||||
| locations | list\[int\] | The ids of the locations seen by the client. May contain any number of locations, even ones sent before; duplicates do not cause issues with the Archipelago server. |
|
| locations | list\[int\] | The ids of the locations seen by the client. May contain any number of locations, even ones sent before; duplicates do not cause issues with the Archipelago server. |
|
||||||
| create_as_hint | int | If non-zero, the scouted locations get created and broadcasted as a player-visible hint. <br/>If 2 only new hints are broadcast, however this does not remove them from the LocationInfo reply. |
|
| create_as_hint | int | If non-zero, the scouted locations get created and broadcasted as a player-visible hint. <br/>If 2 only new hints are broadcast, however this does not remove them from the LocationInfo reply. |
|
||||||
|
|
||||||
|
### CreateHints
|
||||||
|
|
||||||
|
Sent to the server to create hints for a specified list of locations.
|
||||||
|
Hints that already exist will be silently skipped and their status will not be updated.
|
||||||
|
|
||||||
|
When creating hints for another slot's locations, the packet will fail if any of those locations don't contain items for the requesting slot.
|
||||||
|
When creating hints for your own slot's locations, non-existing locations will silently be skipped.
|
||||||
|
|
||||||
|
#### Arguments
|
||||||
|
| Name | Type | Notes |
|
||||||
|
| ---- | ---- | ----- |
|
||||||
|
| locations | list\[int\] | The ids of the locations to create hints for. |
|
||||||
|
| player | int | The ID of the player whose locations are being hinted for. Defaults to the requesting slot. |
|
||||||
|
| status | [HintStatus](#HintStatus) | If included, sets the status of the hint to this status. Defaults to `HINT_UNSPECIFIED`. Cannot set `HINT_FOUND`. |
|
||||||
|
|
||||||
### UpdateHint
|
### UpdateHint
|
||||||
Sent to the server to update the status of a Hint. The client must be the 'receiving_player' of the Hint, or the update fails.
|
Sent to the server to update the status of a Hint. The client must be the 'receiving_player' of the Hint, or the update fails.
|
||||||
|
|
||||||
|
|||||||
@@ -344,7 +344,7 @@ names, and `def can_place_boss`, which passes a boss and location, allowing you
|
|||||||
your game. When this function is called, `bosses`, `locations`, and the passed strings will all be lowercase. There is
|
your game. When this function is called, `bosses`, `locations`, and the passed strings will all be lowercase. There is
|
||||||
also a `duplicate_bosses` attribute allowing you to define if a boss can be placed multiple times in your world. False
|
also a `duplicate_bosses` attribute allowing you to define if a boss can be placed multiple times in your world. False
|
||||||
by default, and will reject duplicate boss names from the user. For an example of using this class, refer to
|
by default, and will reject duplicate boss names from the user. For an example of using this class, refer to
|
||||||
`worlds.alttp.options.py`
|
`worlds/alttp/Options.py`
|
||||||
|
|
||||||
### OptionDict
|
### OptionDict
|
||||||
This option returns a dictionary. Setting a default here is recommended as it will output the dictionary to the
|
This option returns a dictionary. Setting a default here is recommended as it will output the dictionary to the
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ use that version. These steps are for developers or platforms without compiled r
|
|||||||
## General
|
## General
|
||||||
|
|
||||||
What you'll need:
|
What you'll need:
|
||||||
* [Python 3.10.11 or newer](https://www.python.org/downloads/), not the Windows Store version
|
* [Python 3.11.9 or newer](https://www.python.org/downloads/), not the Windows Store version
|
||||||
* On Windows, please consider only using the latest supported version in production environments since security
|
* On Windows, please consider only using the latest supported version in production environments since security
|
||||||
updates for older versions are not easily available.
|
updates for older versions are not easily available.
|
||||||
* Python 3.12.x is currently the newest supported version
|
* Python 3.12.x is currently the newest supported version
|
||||||
|
|||||||
@@ -181,10 +181,3 @@ circular / partial imports. Instead, the code should fetch from settings on dema
|
|||||||
|
|
||||||
"Global" settings are populated immediately, while worlds settings are lazy loaded, so if really necessary,
|
"Global" settings are populated immediately, while worlds settings are lazy loaded, so if really necessary,
|
||||||
"global" settings could be used in global scope of worlds.
|
"global" settings could be used in global scope of worlds.
|
||||||
|
|
||||||
|
|
||||||
### APWorld Backwards Compatibility
|
|
||||||
|
|
||||||
APWorlds that want to be compatible with both stable and dev versions, have two options:
|
|
||||||
1. use the old Utils.get_options() API until Archipelago 0.4.2 is out
|
|
||||||
2. add some sort of compatibility code to your world that mimics the new API
|
|
||||||
|
|||||||
@@ -29,6 +29,10 @@
|
|||||||
* New classes, attributes, and methods in core code should have docstrings that follow
|
* New classes, attributes, and methods in core code should have docstrings that follow
|
||||||
[reST style](https://peps.python.org/pep-0287/).
|
[reST style](https://peps.python.org/pep-0287/).
|
||||||
* Worlds that do not follow PEP8 should still have a consistent style across its files to make reading easier.
|
* Worlds that do not follow PEP8 should still have a consistent style across its files to make reading easier.
|
||||||
|
* [Match statements](https://docs.python.org/3/tutorial/controlflow.html#tut-match)
|
||||||
|
may be used instead of `if`-`elif` if they result in nicer code, or they actually use pattern matching.
|
||||||
|
Beware of the performance: they are not `goto`s, but `if`-`elif` under the hood, and you may have less control. When
|
||||||
|
in doubt, just don't use it.
|
||||||
|
|
||||||
## Markdown
|
## Markdown
|
||||||
|
|
||||||
|
|||||||
557
docs/webhost api.md
Normal file
557
docs/webhost api.md
Normal file
@@ -0,0 +1,557 @@
|
|||||||
|
# API Guide
|
||||||
|
|
||||||
|
Archipelago has a rudimentary API that can be queried by endpoints. The API is a work-in-progress and should be improved over time.
|
||||||
|
|
||||||
|
The following API requests are formatted as: `https://<Archipelago URL>/api/<endpoint>`
|
||||||
|
|
||||||
|
The returned data will be formated in a combination of JSON lists or dicts, with their keys or values being notated in `blocks` (if applicable)
|
||||||
|
|
||||||
|
Current endpoints:
|
||||||
|
- Datapackage API
|
||||||
|
- [`/datapackage`](#datapackage)
|
||||||
|
- [`/datapackage/<string:checksum>`](#datapackagestringchecksum)
|
||||||
|
- [`/datapackage_checksum`](#datapackagechecksum)
|
||||||
|
- Generation API
|
||||||
|
- [`/generate`](#generate)
|
||||||
|
- [`/status/<suuid:seed>`](#status)
|
||||||
|
- Room API
|
||||||
|
- [`/room_status/<suuid:room_id>`](#roomstatus)
|
||||||
|
- Tracker API
|
||||||
|
- [`/tracker/<suuid:tracker>`](#tracker)
|
||||||
|
- User API
|
||||||
|
- [`/get_rooms`](#getrooms)
|
||||||
|
- [`/get_seeds`](#getseeds)
|
||||||
|
|
||||||
|
|
||||||
|
## Datapackage Endpoints
|
||||||
|
These endpoints are used by applications to acquire a room's datapackage, and validate that they have the correct datapackage for use. Datapackages normally include, item IDs, location IDs, and name groupings, for a given room, and are essential for mapping IDs received from Archipelago to their correct items or locations.
|
||||||
|
|
||||||
|
### `/datapackage`
|
||||||
|
<a name="datapackage"></a>
|
||||||
|
Fetches the current datapackage from the WebHost.
|
||||||
|
You'll receive a dict named `games` that contains a named dict of every game and its data currently supported by Archipelago.
|
||||||
|
Each game will have:
|
||||||
|
- A checksum `checksum`
|
||||||
|
- A dict of item groups `item_name_groups`
|
||||||
|
- Item name to AP ID dict `item_name_to_id`
|
||||||
|
- A dict of location groups `location_name_groups`
|
||||||
|
- Location name to AP ID dict `location_name_to_id`
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"games": {
|
||||||
|
...
|
||||||
|
"Clique": {
|
||||||
|
"checksum": "0271f7a80b44ba72187f92815c2bc8669cb464c7",
|
||||||
|
"item_name_groups": {
|
||||||
|
"Everything": [
|
||||||
|
"A Cool Filler Item (No Satisfaction Guaranteed)",
|
||||||
|
"Button Activation",
|
||||||
|
"Feeling of Satisfaction"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"item_name_to_id": {
|
||||||
|
"A Cool Filler Item (No Satisfaction Guaranteed)": 69696967,
|
||||||
|
"Button Activation": 69696968,
|
||||||
|
"Feeling of Satisfaction": 69696969
|
||||||
|
},
|
||||||
|
"location_name_groups": {
|
||||||
|
"Everywhere": [
|
||||||
|
"The Big Red Button",
|
||||||
|
"The Item on the Desk"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"location_name_to_id": {
|
||||||
|
"The Big Red Button": 69696969,
|
||||||
|
"The Item on the Desk": 69696968
|
||||||
|
}
|
||||||
|
},
|
||||||
|
...
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `/datapackage/<string:checksum>`
|
||||||
|
<a name="datapackagestringchecksum"></a>
|
||||||
|
Fetches a single datapackage by checksum.
|
||||||
|
Returns a dict of the game's data with:
|
||||||
|
- A checksum `checksum`
|
||||||
|
- A dict of item groups `item_name_groups`
|
||||||
|
- Item name to AP ID dict `item_name_to_id`
|
||||||
|
- A dict of location groups `location_name_groups`
|
||||||
|
- Location name to AP ID dict `location_name_to_id`
|
||||||
|
|
||||||
|
Its format will be identical to the whole-datapackage endpoint (`/datapackage`), except you'll only be returned the single game's data in a dict.
|
||||||
|
|
||||||
|
### `/datapackage_checksum`
|
||||||
|
<a name="datapackagechecksum"></a>
|
||||||
|
Fetches the checksums of the current static datapackages on the WebHost.
|
||||||
|
You'll receive a dict with `game:checksum` key-value pairs for all the current officially supported games.
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
...
|
||||||
|
"Donkey Kong Country 3":"f90acedcd958213f483a6a4c238e2a3faf92165e",
|
||||||
|
"Factorio":"a699194a9589db3ebc0d821915864b422c782f44",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Generation Endpoint
|
||||||
|
These endpoints are used internally for the WebHost to generate games and validate their generation. They are also used by external applications to generate games automatically.
|
||||||
|
|
||||||
|
### `/generate`
|
||||||
|
<a name="generate"></a>
|
||||||
|
Submits a game to the WebHost for generation.
|
||||||
|
**This endpoint only accepts a POST HTTP request.**
|
||||||
|
|
||||||
|
There are two ways to submit data for generation: With a file and with JSON.
|
||||||
|
|
||||||
|
#### With a file:
|
||||||
|
Have your ZIP of yaml(s) or a single yaml, and submit a POST request to the `/generate` endpoint.
|
||||||
|
If the options are valid, you'll be returned a successful generation response. (see [Generation Response](#generation-response))
|
||||||
|
|
||||||
|
Example using the python requests library:
|
||||||
|
```
|
||||||
|
file = {'file': open('Games.zip', 'rb')}
|
||||||
|
req = requests.post("https://archipelago.gg/api/generate", files=file)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### With JSON:
|
||||||
|
Compile your weights/yaml data into a dict. Then insert that into a dict with the key `"weights"`.
|
||||||
|
Finally, submit a POST request to the `/generate` endpoint.
|
||||||
|
If the weighted options are valid, you'll be returned a successful generation response (see [Generation Response](#generation-response))
|
||||||
|
|
||||||
|
Example using the python requests library:
|
||||||
|
```
|
||||||
|
data = {"Test":{"game": "Factorio","name": "Test","Factorio": {}},}
|
||||||
|
weights={"weights": data}
|
||||||
|
req = requests.post("https://archipelago.gg/api/generate", json=weights)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Generation Response:
|
||||||
|
##### Successful Generation:
|
||||||
|
Upon successful generation, you'll be sent a JSON dict response detailing the generation:
|
||||||
|
- The UUID of the generation `detail`
|
||||||
|
- The SUUID of the generation `encoded`
|
||||||
|
- The response text `text`
|
||||||
|
- The page that will resolve to the seed/room generation page once generation has completed `url`
|
||||||
|
- The API status page of the generation `wait_api_url` (see [Status Endpoint](#status))
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"detail": "19878f16-5a58-4b76-aab7-d6bf38be9463",
|
||||||
|
"encoded": "GYePFlpYS3aqt9a_OL6UYw",
|
||||||
|
"text": "Generation of seed 19878f16-5a58-4b76-aab7-d6bf38be9463 started successfully.",
|
||||||
|
"url": "http://archipelago.gg/wait/GYePFlpYS3aqt9a_OL6UYw",
|
||||||
|
"wait_api_url": "http://archipelago.gg/api/status/GYePFlpYS3aqt9a_OL6UYw"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Failed Generation:
|
||||||
|
|
||||||
|
Upon failed generation, you'll be returned a single key-value pair. The key will always be `text`
|
||||||
|
The value will give you a hint as to what may have gone wrong.
|
||||||
|
- Options without tags, and a 400 status code
|
||||||
|
- Options in a string, and a 400 status code
|
||||||
|
- Invalid file/weight string, `No options found. Expected file attachment or json weights.` with a 400 status code
|
||||||
|
- Too many slots for the server to process, `Max size of multiworld exceeded` with a 409 status code
|
||||||
|
|
||||||
|
If the generation detects a issue in generation, you'll be sent a dict with two key-value pairs (`text` and `detail`) and a 400 status code. The values will be:
|
||||||
|
- Summary of issue in `text`
|
||||||
|
- Detailed issue in `detail`
|
||||||
|
|
||||||
|
In the event of an unhandled server exception, you'll be provided a dict with a single key `text`:
|
||||||
|
- Exception, `Uncought Exception: <error>` with a 500 status code
|
||||||
|
|
||||||
|
### `/status/<suuid:seed>`
|
||||||
|
<a name="status"></a>
|
||||||
|
Retrieves the status of the seed's generation.
|
||||||
|
This endpoint will return a dict with a single key-vlaue pair. The key will always be `text`
|
||||||
|
The value will tell you the status of the generation:
|
||||||
|
- Generation was completed: `Generation done` with a 201 status code
|
||||||
|
- Generation request was not found: `Generation not found` with a 404 status code
|
||||||
|
- Generation of the seed failed: `Generation failed` with a 500 status code
|
||||||
|
- Generation is in progress still: `Generation running` with a 202 status code
|
||||||
|
|
||||||
|
## Room Endpoints
|
||||||
|
Endpoints to fetch information of the active WebHost room with the supplied room_ID.
|
||||||
|
|
||||||
|
### `/room_status/<suuid:room_id>`
|
||||||
|
<a name="roomstatus"></a>
|
||||||
|
Will provide a dict of room data with the following keys:
|
||||||
|
- Tracker SUUID (`tracker`)
|
||||||
|
- A list of players (`players`)
|
||||||
|
- Each item containing a list with the Slot name and Game
|
||||||
|
- Last known hosted port (`last_port`)
|
||||||
|
- Last activity timestamp (`last_activity`)
|
||||||
|
- The room timeout counter (`timeout`)
|
||||||
|
- A list of downloads for files required for gameplay (`downloads`)
|
||||||
|
- Each item is a dict containings the download URL and slot (`slot`, `download`)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"downloads": [
|
||||||
|
{
|
||||||
|
"download": "/slot_file/kK5fmxd8TfisU5Yp_eg/1",
|
||||||
|
"slot": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"download": "/slot_file/kK5fmxd8TfisU5Yp_eg/2",
|
||||||
|
"slot": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"download": "/slot_file/kK5fmxd8TfisU5Yp_eg/3",
|
||||||
|
"slot": 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"download": "/slot_file/kK5fmxd8TfisU5Yp_eg/4",
|
||||||
|
"slot": 4
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"download": "/slot_file/kK5fmxd8TfisU5Yp_eg/5",
|
||||||
|
"slot": 5
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"last_activity": "Fri, 18 Apr 2025 20:35:45 GMT",
|
||||||
|
"last_port": 52122,
|
||||||
|
"players": [
|
||||||
|
[
|
||||||
|
"Slot_Name_1",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_2",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_3",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_4",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_5",
|
||||||
|
"Ocarina of Time"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"timeout": 7200,
|
||||||
|
"tracker": "cf6989c0-4703-45d7-a317-2e5158431171"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tracker Endpoints
|
||||||
|
Endpoints to fetch information regarding players of an active WebHost room with the supplied tracker_ID. The tracker ID
|
||||||
|
can either be viewed while on a room tracker page, or from the [room's endpoint](#room-endpoints).
|
||||||
|
|
||||||
|
### `/tracker/<suuid:tracker>`
|
||||||
|
<a name=tracker></a>
|
||||||
|
Will provide a dict of tracker data with the following keys:
|
||||||
|
|
||||||
|
- item_link groups and their players (`groups`)
|
||||||
|
- Each player's slot_data (`slot_data`)
|
||||||
|
- Each player's current alias (`aliases`)
|
||||||
|
- Will return the name if there is none
|
||||||
|
- A list of items each player has received as a NetworkItem (`player_items_received`)
|
||||||
|
- A list of checks done by each player as a list of the location id's (`player_checks_done`)
|
||||||
|
- The total number of checks done by all players (`total_checks_done`)
|
||||||
|
- Hints that players have used or received (`hints`)
|
||||||
|
- The time of last activity of each player in RFC 1123 format (`activity_timers`)
|
||||||
|
- The time of last active connection of each player in RFC 1123 format (`connection_timers`)
|
||||||
|
- The current client status of each player (`player_status`)
|
||||||
|
- The datapackage hash for each player (`datapackage`)
|
||||||
|
- This hash can then be sent to the datapackage API to receive the appropriate datapackage as necessary
|
||||||
|
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"slot": 5,
|
||||||
|
"name": "testGroup",
|
||||||
|
"members": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"slot": 6,
|
||||||
|
"name": "myCoolLink",
|
||||||
|
"members": [
|
||||||
|
3,
|
||||||
|
4
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"slot_data": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"slot_data": {
|
||||||
|
"example_option": 1,
|
||||||
|
"other_option": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"slot_data": {
|
||||||
|
"example_option": 1,
|
||||||
|
"other_option": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"aliases": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"alias": "Incompetence"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"alias": "Slot_Name_2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_items_received": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"items": [
|
||||||
|
[1, 1, 1, 0],
|
||||||
|
[2, 2, 2, 1]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"items": [
|
||||||
|
[1, 1, 1, 2],
|
||||||
|
[2, 2, 2, 0]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_checks_done": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"locations": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"locations": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"total_checks_done": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"checks_done": 4
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hints": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"hints": [
|
||||||
|
[1, 2, 4, 6, 0, "", 4, 0]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"hints": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"activity_timers": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:35:45 GMT"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:42:46 GMT"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"connection_timers": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:38:25 GMT"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"time": "Fri, 18 Apr 2025 21:03:00 GMT"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_status": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"players": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"status": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"status": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"datapackage": {
|
||||||
|
"Archipelago": {
|
||||||
|
"checksum": "ac9141e9ad0318df2fa27da5f20c50a842afeecb",
|
||||||
|
"version": 0
|
||||||
|
},
|
||||||
|
"The Messenger": {
|
||||||
|
"checksum": "6991cbcda7316b65bcb072667f3ee4c4cae71c0b",
|
||||||
|
"version": 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## User Endpoints
|
||||||
|
User endpoints can get room and seed details from the current session tokens (cookies)
|
||||||
|
|
||||||
|
### `/get_rooms`
|
||||||
|
<a name="getrooms"></a>
|
||||||
|
Retreives a list of all rooms currently owned by the session token.
|
||||||
|
Each list item will contain a dict with the room's details:
|
||||||
|
- Room SUUID (`room_id`)
|
||||||
|
- Seed SUUID (`seed_id`)
|
||||||
|
- Creation timestamp (`creation_time`)
|
||||||
|
- Last activity timestamp (`last_activity`)
|
||||||
|
- Last known AP port (`last_port`)
|
||||||
|
- Room timeout counter in seconds (`timeout`)
|
||||||
|
- Room tracker SUUID (`tracker`)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"creation_time": "Fri, 18 Apr 2025 19:46:53 GMT",
|
||||||
|
"last_activity": "Fri, 18 Apr 2025 21:16:02 GMT",
|
||||||
|
"last_port": 52122,
|
||||||
|
"room_id": "90ae5f9b-177c-4df8-ac53-9629fc3bff7a",
|
||||||
|
"seed_id": "efbd62c2-aaeb-4dda-88c3-f461c029cef6",
|
||||||
|
"timeout": 7200,
|
||||||
|
"tracker": "cf6989c0-4703-45d7-a317-2e5158431171"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"creation_time": "Fri, 18 Apr 2025 20:36:42 GMT",
|
||||||
|
"last_activity": "Fri, 18 Apr 2025 20:36:46 GMT",
|
||||||
|
"last_port": 56884,
|
||||||
|
"room_id": "14465c05-d08e-4d28-96bd-916f994609d8",
|
||||||
|
"seed_id": "a528e34c-3b4f-42a9-9f8f-00a4fd40bacb",
|
||||||
|
"timeout": 7200,
|
||||||
|
"tracker": "4e624bd8-32b6-42e4-9178-aa407f72751c"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### `/get_seeds`
|
||||||
|
<a name="getseeds"></a>
|
||||||
|
Retreives a list of all seeds currently owned by the session token.
|
||||||
|
Each item in the list will contain a dict with the seed's details:
|
||||||
|
- Seed SUUID (`seed_id`)
|
||||||
|
- Creation timestamp (`creation_time`)
|
||||||
|
- A list of player slots (`players`)
|
||||||
|
- Each item in the list will contain a list of the slot name and game
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"creation_time": "Fri, 18 Apr 2025 19:46:52 GMT",
|
||||||
|
"players": [
|
||||||
|
[
|
||||||
|
"Slot_Name_1",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_2",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_3",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_4",
|
||||||
|
"Ocarina of Time"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_5",
|
||||||
|
"Ocarina of Time"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"seed_id": "efbd62c2-aaeb-4dda-88c3-f461c029cef6"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"creation_time": "Fri, 18 Apr 2025 20:36:39 GMT",
|
||||||
|
"players": [
|
||||||
|
[
|
||||||
|
"Slot_Name_1",
|
||||||
|
"Clique"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_2",
|
||||||
|
"Clique"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_3",
|
||||||
|
"Clique"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"Slot_Name_4",
|
||||||
|
"Archipelago"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"seed_id": "a528e34c-3b4f-42a9-9f8f-00a4fd40bacb"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
@@ -257,31 +257,14 @@ another flag like "progression", it means "an especially useful progression item
|
|||||||
combined with `progression`; see below)
|
combined with `progression`; see below)
|
||||||
* `progression_skip_balancing`: the combination of `progression` and `skip_balancing`, i.e., a progression item that
|
* `progression_skip_balancing`: the combination of `progression` and `skip_balancing`, i.e., a progression item that
|
||||||
will not be moved around by progression balancing; used, e.g., for currency or tokens, to not flood early spheres
|
will not be moved around by progression balancing; used, e.g., for currency or tokens, to not flood early spheres
|
||||||
|
* `deprioritized`: denotes that an item should not be placed on priority locations
|
||||||
### Events
|
(to be combined with `progression`; see below)
|
||||||
|
* `progression_deprioritized`: the combination of `progression` and `deprioritized`, i.e. a progression item that
|
||||||
An Event is a special combination of a Location and an Item, with both having an `id` of `None`. These can be used to
|
should not be placed on priority locations, despite being progression;
|
||||||
track certain logic interactions, with the Event Item being required for access in other locations or regions, but not
|
like skip_balancing, this is commonly used for currency or tokens.
|
||||||
being "real". Since the item and location have no ID, they get dropped at the end of generation and so the server is
|
* `progression_deprioritized_skip_balancing`: the combination of `progression`, `deprioritized` and `skip_balancing`.
|
||||||
never made aware of them and these locations can never be checked, nor can the items be received during play.
|
Since there is overlap between the kind of items that want `skip_balancing` and `deprioritized`,
|
||||||
They may also be used for making the spoiler log look nicer, i.e. by having a `"Victory"` Event Item, that
|
this combined classification exists for convenience
|
||||||
is required to finish the game. This makes it very clear when the player finishes, rather than only seeing their last
|
|
||||||
relevant Item. Events function just like any other Location, and can still have their own access rules, etc.
|
|
||||||
By convention, the Event "pair" of Location and Item typically have the same name, though this is not a requirement.
|
|
||||||
They must not exist in the `name_to_id` lookups, as they have no ID.
|
|
||||||
|
|
||||||
The most common way to create an Event pair is to create and place the Item on the Location as soon as it's created:
|
|
||||||
|
|
||||||
```python
|
|
||||||
from worlds.AutoWorld import World
|
|
||||||
from BaseClasses import ItemClassification
|
|
||||||
from .subclasses import MyGameLocation, MyGameItem
|
|
||||||
|
|
||||||
|
|
||||||
class MyGameWorld(World):
|
|
||||||
victory_loc = MyGameLocation(self.player, "Victory", None)
|
|
||||||
victory_loc.place_locked_item(MyGameItem("Victory", ItemClassification.progression, None, self.player))
|
|
||||||
```
|
|
||||||
|
|
||||||
### Regions
|
### Regions
|
||||||
|
|
||||||
@@ -291,7 +274,7 @@ like entrance randomization in logic.
|
|||||||
|
|
||||||
Regions have a list called `exits`, containing `Entrance` objects representing transitions to other regions.
|
Regions have a list called `exits`, containing `Entrance` objects representing transitions to other regions.
|
||||||
|
|
||||||
There must be one special region (Called "Menu" by default, but configurable using [origin_region_name](https://github.com/ArchipelagoMW/Archipelago/blob/main/worlds/AutoWorld.py#L298-L299)),
|
There must be one special region (Called "Menu" by default, but configurable using [origin_region_name](https://github.com/ArchipelagoMW/Archipelago/blob/main/worlds/AutoWorld.py#L310-L311)),
|
||||||
from which the logic unfolds. AP assumes that a player will always be able to return to this starting region by resetting the game ("Save and quit").
|
from which the logic unfolds. AP assumes that a player will always be able to return to this starting region by resetting the game ("Save and quit").
|
||||||
|
|
||||||
### Entrances
|
### Entrances
|
||||||
@@ -339,6 +322,63 @@ avoiding the need for indirect conditions at the expense of performance.
|
|||||||
An item rule is a function that returns `True` or `False` for a `Location` based on a single item. It can be used to
|
An item rule is a function that returns `True` or `False` for a `Location` based on a single item. It can be used to
|
||||||
reject the placement of an item there.
|
reject the placement of an item there.
|
||||||
|
|
||||||
|
### Events (or "generation-only items/locations")
|
||||||
|
|
||||||
|
An event item or location is one that only exists during multiworld generation; the server is never made aware of them.
|
||||||
|
Event locations can never be checked by the player, and event items cannot be received during play.
|
||||||
|
|
||||||
|
Events are used to represent in-game actions (that aren't regular Archipelago locations) when either:
|
||||||
|
|
||||||
|
* We want to show in the spoiler log when the player is expected to perform the in-game action.
|
||||||
|
* It's the cleanest way to represent how that in-game action impacts logic.
|
||||||
|
|
||||||
|
Typical examples include completing the goal, defeating a boss, or flipping a switch that affects multiple areas.
|
||||||
|
|
||||||
|
To be precise: the term "event" on its own refers to the special combination of an "event item" placed on an "event
|
||||||
|
location". Event items and locations are created the same way as normal items and locations, except that they have an
|
||||||
|
`id` of `None`, and an event item must be placed on an event location
|
||||||
|
(and vice versa). Finally, although events are often described as "fake" items and locations, it's important to
|
||||||
|
understand that they are perfectly real during generation.
|
||||||
|
|
||||||
|
The most common way to create an event is to create the event item and the event location, then immediately call
|
||||||
|
`Location.place_locked_item()`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
victory_loc = MyGameLocation(self.player, "Defeat the Final Boss", None, final_boss_arena_region)
|
||||||
|
victory_loc.place_locked_item(MyGameItem("Victory", ItemClassification.progression, None, self.player))
|
||||||
|
self.multiworld.completion_condition[self.player] = lambda state: state.has("Victory", self.player)
|
||||||
|
set_rule(victory_loc, lambda state: state.has("Boss Defeating Sword", self.player))
|
||||||
|
```
|
||||||
|
|
||||||
|
Requiring an event to finish the game will make the spoiler log display an additional
|
||||||
|
`Defeat the Final Boss: Victory` line when the player is expected to finish, rather than only showing their last
|
||||||
|
relevant item. But events aren't just about the spoiler log; a more substantial example of using events to structure
|
||||||
|
your logic might be:
|
||||||
|
|
||||||
|
```python
|
||||||
|
water_loc = MyGameLocation(self.player, "Water Level Switch", None, pump_station_region)
|
||||||
|
water_loc.place_locked_item(MyGameItem("Lowered Water Level", ItemClassification.progression, None, self.player))
|
||||||
|
pump_station_region.locations.append(water_loc)
|
||||||
|
set_rule(water_loc, lambda state: state.has("Double Jump", self.player)) # the switch is really high up
|
||||||
|
...
|
||||||
|
basement_loc = MyGameLocation(self.player, "Flooded House - Basement Chest", None, flooded_house_region)
|
||||||
|
flooded_house_region.locations += [upstairs_loc, ground_floor_loc, basement_loc]
|
||||||
|
...
|
||||||
|
set_rule(basement_loc, lambda state: state.has("Lowered Water Level", self.player))
|
||||||
|
```
|
||||||
|
|
||||||
|
This creates a "Lowered Water Level" event and a regular location whose access rule depends on that
|
||||||
|
event being reachable. If you made several more locations the same way, this would ensure all of those locations can
|
||||||
|
only become reachable when the event location is reachable (i.e. when the water level can be lowered), without
|
||||||
|
copy-pasting the event location's access rule and then repeatedly re-evaluating it. Also, the spoiler log will show
|
||||||
|
`Water Level Switch: Lowered Water Level` when the player is expected to do this.
|
||||||
|
|
||||||
|
To be clear, this example could also be modeled with a second Region (perhaps "Un-Flooded House"). Or you could modify
|
||||||
|
the game so flipping that switch checks a regular AP location in addition to lowering the water level.
|
||||||
|
Events are never required, but it may be cleaner to use an event if e.g. flipping that switch affects the logic in
|
||||||
|
dozens of half-flooded areas that would all otherwise need additional Regions, and you don't want it to be a regular
|
||||||
|
location. It depends on the game.
|
||||||
|
|
||||||
## Implementation
|
## Implementation
|
||||||
|
|
||||||
### Your World
|
### Your World
|
||||||
@@ -483,13 +523,14 @@ In addition, the following methods can be implemented and are called in this ord
|
|||||||
called per player before any items or locations are created. You can set properties on your
|
called per player before any items or locations are created. You can set properties on your
|
||||||
world here. Already has access to player options and RNG. This is the earliest step where the world should start
|
world here. Already has access to player options and RNG. This is the earliest step where the world should start
|
||||||
setting up for the current multiworld, as the multiworld itself is still setting up before this point.
|
setting up for the current multiworld, as the multiworld itself is still setting up before this point.
|
||||||
|
You cannot modify `local_items`, or `non_local_items` after this step.
|
||||||
* `create_regions(self)`
|
* `create_regions(self)`
|
||||||
called to place player's regions and their locations into the MultiWorld's regions list.
|
called to place player's regions and their locations into the MultiWorld's regions list.
|
||||||
If it's hard to separate, this can be done during `generate_early` or `create_items` as well.
|
If it's hard to separate, this can be done during `generate_early` or `create_items` as well.
|
||||||
* `create_items(self)`
|
* `create_items(self)`
|
||||||
called to place player's items into the MultiWorld's itempool. By the end of this step all regions, locations and
|
called to place player's items into the MultiWorld's itempool. By the end of this step all regions, locations and
|
||||||
items have to be in the MultiWorld's regions and itempool. You cannot add or remove items, locations, or regions
|
items have to be in the MultiWorld's regions and itempool. You cannot add or remove items, locations, or regions after
|
||||||
after this step. Locations cannot be moved to different regions after this step.
|
this step. Locations cannot be moved to different regions after this step. This includes event items and locations.
|
||||||
* `set_rules(self)`
|
* `set_rules(self)`
|
||||||
called to set access and item rules on locations and entrances.
|
called to set access and item rules on locations and entrances.
|
||||||
* `connect_entrances(self)`
|
* `connect_entrances(self)`
|
||||||
@@ -501,12 +542,12 @@ In addition, the following methods can be implemented and are called in this ord
|
|||||||
called to modify item placement before, during, and after the regular fill process; all finishing before
|
called to modify item placement before, during, and after the regular fill process; all finishing before
|
||||||
`generate_output`. Any items that need to be placed during `pre_fill` should not exist in the itempool, and if there
|
`generate_output`. Any items that need to be placed during `pre_fill` should not exist in the itempool, and if there
|
||||||
are any items that need to be filled this way, but need to be in state while you fill other items, they can be
|
are any items that need to be filled this way, but need to be in state while you fill other items, they can be
|
||||||
returned from `get_prefill_items`.
|
returned from `get_pre_fill_items`.
|
||||||
* `generate_output(self, output_directory: str)`
|
* `generate_output(self, output_directory: str)`
|
||||||
creates the output files if there is output to be generated. When this is called,
|
creates the output files if there is output to be generated. When this is called,
|
||||||
`self.multiworld.get_locations(self.player)` has all locations for the player, with attribute `item` pointing to the
|
`self.multiworld.get_locations(self.player)` has all locations for the player, with attribute `item` pointing to the
|
||||||
item. `location.item.player` can be used to see if it's a local item.
|
item. `location.item.player` can be used to see if it's a local item.
|
||||||
* `fill_slot_data(self)` and `modify_multidata(self, multidata: Dict[str, Any])` can be used to modify the data that
|
* `fill_slot_data(self)` and `modify_multidata(self, multidata: MultiData)` can be used to modify the data that
|
||||||
will be used by the server to host the MultiWorld.
|
will be used by the server to host the MultiWorld.
|
||||||
|
|
||||||
All instance methods can, optionally, have a class method defined which will be called after all instance methods are
|
All instance methods can, optionally, have a class method defined which will be called after all instance methods are
|
||||||
@@ -579,17 +620,10 @@ def create_items(self) -> None:
|
|||||||
# If there are two of the same item, the item has to be twice in the pool.
|
# If there are two of the same item, the item has to be twice in the pool.
|
||||||
# Which items are added to the pool may depend on player options, e.g. custom win condition like triforce hunt.
|
# Which items are added to the pool may depend on player options, e.g. custom win condition like triforce hunt.
|
||||||
# Having an item in the start inventory won't remove it from the pool.
|
# Having an item in the start inventory won't remove it from the pool.
|
||||||
# If an item can't have duplicates it has to be excluded manually.
|
# If you want to do that, use start_inventory_from_pool
|
||||||
|
|
||||||
# List of items to exclude, as a copy since it will be destroyed below
|
|
||||||
exclude = [item for item in self.multiworld.precollected_items[self.player]]
|
|
||||||
|
|
||||||
for item in map(self.create_item, mygame_items):
|
for item in map(self.create_item, mygame_items):
|
||||||
if item in exclude:
|
self.multiworld.itempool.append(item)
|
||||||
exclude.remove(item) # this is destructive. create unique list above
|
|
||||||
self.multiworld.itempool.append(self.create_item("nothing"))
|
|
||||||
else:
|
|
||||||
self.multiworld.itempool.append(item)
|
|
||||||
|
|
||||||
# itempool and number of locations should match up.
|
# itempool and number of locations should match up.
|
||||||
# If this is not the case we want to fill the itempool with junk.
|
# If this is not the case we want to fill the itempool with junk.
|
||||||
|
|||||||
@@ -52,13 +52,15 @@ class EntranceLookup:
|
|||||||
_coupled: bool
|
_coupled: bool
|
||||||
_usable_exits: set[Entrance]
|
_usable_exits: set[Entrance]
|
||||||
|
|
||||||
def __init__(self, rng: random.Random, coupled: bool, usable_exits: set[Entrance]):
|
def __init__(self, rng: random.Random, coupled: bool, usable_exits: set[Entrance], targets: Iterable[Entrance]):
|
||||||
self.dead_ends = EntranceLookup.GroupLookup()
|
self.dead_ends = EntranceLookup.GroupLookup()
|
||||||
self.others = EntranceLookup.GroupLookup()
|
self.others = EntranceLookup.GroupLookup()
|
||||||
self._random = rng
|
self._random = rng
|
||||||
self._expands_graph_cache = {}
|
self._expands_graph_cache = {}
|
||||||
self._coupled = coupled
|
self._coupled = coupled
|
||||||
self._usable_exits = usable_exits
|
self._usable_exits = usable_exits
|
||||||
|
for target in targets:
|
||||||
|
self.add(target)
|
||||||
|
|
||||||
def _can_expand_graph(self, entrance: Entrance) -> bool:
|
def _can_expand_graph(self, entrance: Entrance) -> bool:
|
||||||
"""
|
"""
|
||||||
@@ -121,7 +123,14 @@ class EntranceLookup:
|
|||||||
dead_end: bool,
|
dead_end: bool,
|
||||||
preserve_group_order: bool
|
preserve_group_order: bool
|
||||||
) -> Iterable[Entrance]:
|
) -> Iterable[Entrance]:
|
||||||
|
"""
|
||||||
|
Gets available targets for the requested groups
|
||||||
|
|
||||||
|
:param groups: The groups to find targets for
|
||||||
|
:param dead_end: Whether to find dead ends. If false, finds non-dead-ends
|
||||||
|
:param preserve_group_order: Whether to preserve the group order in the returned iterable. If true, a sequence
|
||||||
|
like AAABBB is guaranteed. If false, groups can be interleaved, e.g. BAABAB.
|
||||||
|
"""
|
||||||
lookup = self.dead_ends if dead_end else self.others
|
lookup = self.dead_ends if dead_end else self.others
|
||||||
if preserve_group_order:
|
if preserve_group_order:
|
||||||
for group in groups:
|
for group in groups:
|
||||||
@@ -132,6 +141,27 @@ class EntranceLookup:
|
|||||||
self._random.shuffle(ret)
|
self._random.shuffle(ret)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def find_target(self, name: str, group: int | None = None, dead_end: bool | None = None) -> Entrance | None:
|
||||||
|
"""
|
||||||
|
Finds a specific target in the lookup, if it is present.
|
||||||
|
|
||||||
|
:param name: The name of the target
|
||||||
|
:param group: The target's group. Providing this will make the lookup faster, but can be omitted if it is not
|
||||||
|
known ahead of time for some reason.
|
||||||
|
:param dead_end: Whether the target is a dead end. Providing this will make the lookup faster, but can be
|
||||||
|
omitted if this is not known ahead of time (much more likely)
|
||||||
|
"""
|
||||||
|
if dead_end is None:
|
||||||
|
return (found
|
||||||
|
if (found := self.find_target(name, group, True))
|
||||||
|
else self.find_target(name, group, False))
|
||||||
|
lookup = self.dead_ends if dead_end else self.others
|
||||||
|
targets_to_check = lookup if group is None else lookup[group]
|
||||||
|
for target in targets_to_check:
|
||||||
|
if target.name == name:
|
||||||
|
return target
|
||||||
|
return None
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.dead_ends) + len(self.others)
|
return len(self.dead_ends) + len(self.others)
|
||||||
|
|
||||||
@@ -146,15 +176,18 @@ class ERPlacementState:
|
|||||||
"""The world which is having its entrances randomized"""
|
"""The world which is having its entrances randomized"""
|
||||||
collection_state: CollectionState
|
collection_state: CollectionState
|
||||||
"""The CollectionState backing the entrance randomization logic"""
|
"""The CollectionState backing the entrance randomization logic"""
|
||||||
|
entrance_lookup: EntranceLookup
|
||||||
|
"""A lookup table of all unconnected ER targets"""
|
||||||
coupled: bool
|
coupled: bool
|
||||||
"""Whether entrance randomization is operating in coupled mode"""
|
"""Whether entrance randomization is operating in coupled mode"""
|
||||||
|
|
||||||
def __init__(self, world: World, coupled: bool):
|
def __init__(self, world: World, entrance_lookup: EntranceLookup, coupled: bool):
|
||||||
self.placements = []
|
self.placements = []
|
||||||
self.pairings = []
|
self.pairings = []
|
||||||
self.world = world
|
self.world = world
|
||||||
self.coupled = coupled
|
self.coupled = coupled
|
||||||
self.collection_state = world.multiworld.get_all_state(False, True)
|
self.collection_state = world.multiworld.get_all_state(False, True)
|
||||||
|
self.entrance_lookup = entrance_lookup
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def placed_regions(self) -> set[Region]:
|
def placed_regions(self) -> set[Region]:
|
||||||
@@ -182,6 +215,7 @@ class ERPlacementState:
|
|||||||
self.collection_state.stale[self.world.player] = True
|
self.collection_state.stale[self.world.player] = True
|
||||||
self.placements.append(source_exit)
|
self.placements.append(source_exit)
|
||||||
self.pairings.append((source_exit.name, target_entrance.name))
|
self.pairings.append((source_exit.name, target_entrance.name))
|
||||||
|
self.entrance_lookup.remove(target_entrance)
|
||||||
|
|
||||||
def test_speculative_connection(self, source_exit: Entrance, target_entrance: Entrance,
|
def test_speculative_connection(self, source_exit: Entrance, target_entrance: Entrance,
|
||||||
usable_exits: set[Entrance]) -> bool:
|
usable_exits: set[Entrance]) -> bool:
|
||||||
@@ -311,7 +345,7 @@ def randomize_entrances(
|
|||||||
preserve_group_order: bool = False,
|
preserve_group_order: bool = False,
|
||||||
er_targets: list[Entrance] | None = None,
|
er_targets: list[Entrance] | None = None,
|
||||||
exits: list[Entrance] | None = None,
|
exits: list[Entrance] | None = None,
|
||||||
on_connect: Callable[[ERPlacementState, list[Entrance]], None] | None = None
|
on_connect: Callable[[ERPlacementState, list[Entrance], list[Entrance]], bool | None] | None = None
|
||||||
) -> ERPlacementState:
|
) -> ERPlacementState:
|
||||||
"""
|
"""
|
||||||
Randomizes Entrances for a single world in the multiworld.
|
Randomizes Entrances for a single world in the multiworld.
|
||||||
@@ -328,14 +362,18 @@ def randomize_entrances(
|
|||||||
:param exits: The list of exits (Entrance objects with no target region) to use for randomization.
|
:param exits: The list of exits (Entrance objects with no target region) to use for randomization.
|
||||||
Remember to be deterministic! If not provided, automatically discovers all valid exits in your world.
|
Remember to be deterministic! If not provided, automatically discovers all valid exits in your world.
|
||||||
:param on_connect: A callback function which allows specifying side effects after a placement is completed
|
:param on_connect: A callback function which allows specifying side effects after a placement is completed
|
||||||
successfully and the underlying collection state has been updated.
|
successfully and the underlying collection state has been updated. The arguments are
|
||||||
|
1. The ER state
|
||||||
|
2. The exits placed in this placement pass
|
||||||
|
3. The entrances they were connected to.
|
||||||
|
If you use on_connect to make additional placements, you are expected to return True to inform
|
||||||
|
GER that an additional sweep is needed.
|
||||||
"""
|
"""
|
||||||
if not world.explicit_indirect_conditions:
|
if not world.explicit_indirect_conditions:
|
||||||
raise EntranceRandomizationError("Entrance randomization requires explicit indirect conditions in order "
|
raise EntranceRandomizationError("Entrance randomization requires explicit indirect conditions in order "
|
||||||
+ "to correctly analyze whether dead end regions can be required in logic.")
|
+ "to correctly analyze whether dead end regions can be required in logic.")
|
||||||
|
|
||||||
start_time = time.perf_counter()
|
start_time = time.perf_counter()
|
||||||
er_state = ERPlacementState(world, coupled)
|
|
||||||
# similar to fill, skip validity checks on entrances if the game is beatable on minimal accessibility
|
# similar to fill, skip validity checks on entrances if the game is beatable on minimal accessibility
|
||||||
perform_validity_check = True
|
perform_validity_check = True
|
||||||
|
|
||||||
@@ -351,23 +389,25 @@ def randomize_entrances(
|
|||||||
|
|
||||||
# used when membership checks are needed on the exit list, e.g. speculative sweep
|
# used when membership checks are needed on the exit list, e.g. speculative sweep
|
||||||
exits_set = set(exits)
|
exits_set = set(exits)
|
||||||
entrance_lookup = EntranceLookup(world.random, coupled, exits_set)
|
|
||||||
for entrance in er_targets:
|
|
||||||
entrance_lookup.add(entrance)
|
|
||||||
|
|
||||||
|
er_state = ERPlacementState(
|
||||||
|
world,
|
||||||
|
EntranceLookup(world.random, coupled, exits_set, er_targets),
|
||||||
|
coupled
|
||||||
|
)
|
||||||
# place the menu region and connected start region(s)
|
# place the menu region and connected start region(s)
|
||||||
er_state.collection_state.update_reachable_regions(world.player)
|
er_state.collection_state.update_reachable_regions(world.player)
|
||||||
|
|
||||||
def do_placement(source_exit: Entrance, target_entrance: Entrance) -> None:
|
def do_placement(source_exit: Entrance, target_entrance: Entrance) -> None:
|
||||||
placed_exits, removed_entrances = er_state.connect(source_exit, target_entrance)
|
placed_exits, paired_entrances = er_state.connect(source_exit, target_entrance)
|
||||||
# remove the placed targets from consideration
|
|
||||||
for entrance in removed_entrances:
|
|
||||||
entrance_lookup.remove(entrance)
|
|
||||||
# propagate new connections
|
# propagate new connections
|
||||||
er_state.collection_state.update_reachable_regions(world.player)
|
er_state.collection_state.update_reachable_regions(world.player)
|
||||||
er_state.collection_state.sweep_for_advancements()
|
er_state.collection_state.sweep_for_advancements()
|
||||||
if on_connect:
|
if on_connect:
|
||||||
on_connect(er_state, placed_exits)
|
change = on_connect(er_state, placed_exits, paired_entrances)
|
||||||
|
if change:
|
||||||
|
er_state.collection_state.update_reachable_regions(world.player)
|
||||||
|
er_state.collection_state.sweep_for_advancements()
|
||||||
|
|
||||||
def needs_speculative_sweep(dead_end: bool, require_new_exits: bool, placeable_exits: list[Entrance]) -> bool:
|
def needs_speculative_sweep(dead_end: bool, require_new_exits: bool, placeable_exits: list[Entrance]) -> bool:
|
||||||
# speculative sweep is expensive. We currently only do it as a last resort, if we might cap off the graph
|
# speculative sweep is expensive. We currently only do it as a last resort, if we might cap off the graph
|
||||||
@@ -388,12 +428,12 @@ def randomize_entrances(
|
|||||||
# check to see if we are proposing the last placement
|
# check to see if we are proposing the last placement
|
||||||
if not coupled:
|
if not coupled:
|
||||||
# in uncoupled, this check is easy as there will only be one target.
|
# in uncoupled, this check is easy as there will only be one target.
|
||||||
is_last_placement = len(entrance_lookup) == 1
|
is_last_placement = len(er_state.entrance_lookup) == 1
|
||||||
else:
|
else:
|
||||||
# a bit harder, there may be 1 or 2 targets depending on if the exit to place is one way or two way.
|
# a bit harder, there may be 1 or 2 targets depending on if the exit to place is one way or two way.
|
||||||
# if it is two way, we can safely assume that one of the targets is the logical pair of the exit.
|
# if it is two way, we can safely assume that one of the targets is the logical pair of the exit.
|
||||||
desired_target_count = 2 if placeable_exits[0].randomization_type == EntranceType.TWO_WAY else 1
|
desired_target_count = 2 if placeable_exits[0].randomization_type == EntranceType.TWO_WAY else 1
|
||||||
is_last_placement = len(entrance_lookup) == desired_target_count
|
is_last_placement = len(er_state.entrance_lookup) == desired_target_count
|
||||||
# if it's not the last placement, we need a sweep
|
# if it's not the last placement, we need a sweep
|
||||||
return not is_last_placement
|
return not is_last_placement
|
||||||
|
|
||||||
@@ -402,7 +442,7 @@ def randomize_entrances(
|
|||||||
placeable_exits = er_state.find_placeable_exits(perform_validity_check, exits)
|
placeable_exits = er_state.find_placeable_exits(perform_validity_check, exits)
|
||||||
for source_exit in placeable_exits:
|
for source_exit in placeable_exits:
|
||||||
target_groups = target_group_lookup[source_exit.randomization_group]
|
target_groups = target_group_lookup[source_exit.randomization_group]
|
||||||
for target_entrance in entrance_lookup.get_targets(target_groups, dead_end, preserve_group_order):
|
for target_entrance in er_state.entrance_lookup.get_targets(target_groups, dead_end, preserve_group_order):
|
||||||
# when requiring new exits, ideally we would like to make it so that every placement increases
|
# when requiring new exits, ideally we would like to make it so that every placement increases
|
||||||
# (or keeps the same number of) reachable exits. The goal is to continue to expand the search space
|
# (or keeps the same number of) reachable exits. The goal is to continue to expand the search space
|
||||||
# so that we do not crash. In the interest of performance and bias reduction, generally, just checking
|
# so that we do not crash. In the interest of performance and bias reduction, generally, just checking
|
||||||
@@ -420,7 +460,7 @@ def randomize_entrances(
|
|||||||
else:
|
else:
|
||||||
# no source exits had any valid target so this stage is deadlocked. retries may be implemented if early
|
# no source exits had any valid target so this stage is deadlocked. retries may be implemented if early
|
||||||
# deadlocking is a frequent issue.
|
# deadlocking is a frequent issue.
|
||||||
lookup = entrance_lookup.dead_ends if dead_end else entrance_lookup.others
|
lookup = er_state.entrance_lookup.dead_ends if dead_end else er_state.entrance_lookup.others
|
||||||
|
|
||||||
# if we're in a stage where we're trying to get to new regions, we could also enter this
|
# if we're in a stage where we're trying to get to new regions, we could also enter this
|
||||||
# branch in a success state (when all regions of the preferred type have been placed, but there are still
|
# branch in a success state (when all regions of the preferred type have been placed, but there are still
|
||||||
@@ -466,21 +506,21 @@ def randomize_entrances(
|
|||||||
f"All unplaced exits: {unplaced_exits}")
|
f"All unplaced exits: {unplaced_exits}")
|
||||||
|
|
||||||
# stage 1 - try to place all the non-dead-end entrances
|
# stage 1 - try to place all the non-dead-end entrances
|
||||||
while entrance_lookup.others:
|
while er_state.entrance_lookup.others:
|
||||||
if not find_pairing(dead_end=False, require_new_exits=True):
|
if not find_pairing(dead_end=False, require_new_exits=True):
|
||||||
break
|
break
|
||||||
# stage 2 - try to place all the dead-end entrances
|
# stage 2 - try to place all the dead-end entrances
|
||||||
while entrance_lookup.dead_ends:
|
while er_state.entrance_lookup.dead_ends:
|
||||||
if not find_pairing(dead_end=True, require_new_exits=True):
|
if not find_pairing(dead_end=True, require_new_exits=True):
|
||||||
break
|
break
|
||||||
# stage 3 - all the regions should be placed at this point. We now need to connect dangling edges
|
# stage 3 - all the regions should be placed at this point. We now need to connect dangling edges
|
||||||
# stage 3a - get the rest of the dead ends (e.g. second entrances into already-visited regions)
|
# stage 3a - get the rest of the dead ends (e.g. second entrances into already-visited regions)
|
||||||
# doing this before the non-dead-ends is important to ensure there are enough connections to
|
# doing this before the non-dead-ends is important to ensure there are enough connections to
|
||||||
# go around
|
# go around
|
||||||
while entrance_lookup.dead_ends:
|
while er_state.entrance_lookup.dead_ends:
|
||||||
find_pairing(dead_end=True, require_new_exits=False)
|
find_pairing(dead_end=True, require_new_exits=False)
|
||||||
# stage 3b - tie all the other loose ends connecting visited regions to each other
|
# stage 3b - tie all the other loose ends connecting visited regions to each other
|
||||||
while entrance_lookup.others:
|
while er_state.entrance_lookup.others:
|
||||||
find_pairing(dead_end=False, require_new_exits=False)
|
find_pairing(dead_end=False, require_new_exits=False)
|
||||||
|
|
||||||
running_time = time.perf_counter() - start_time
|
running_time = time.perf_counter() - start_time
|
||||||
|
|||||||
@@ -53,10 +53,6 @@ Name: "full"; Description: "Full installation"
|
|||||||
Name: "minimal"; Description: "Minimal installation"
|
Name: "minimal"; Description: "Minimal installation"
|
||||||
Name: "custom"; Description: "Custom installation"; Flags: iscustom
|
Name: "custom"; Description: "Custom installation"; Flags: iscustom
|
||||||
|
|
||||||
[Components]
|
|
||||||
Name: "core"; Description: "Archipelago"; Types: full minimal custom; Flags: fixed
|
|
||||||
Name: "lttp_sprites"; Description: "Download ""A Link to the Past"" player sprites"; Types: full;
|
|
||||||
|
|
||||||
[Dirs]
|
[Dirs]
|
||||||
NAME: "{app}"; Flags: setntfscompression; Permissions: everyone-modify users-modify authusers-modify;
|
NAME: "{app}"; Flags: setntfscompression; Permissions: everyone-modify users-modify authusers-modify;
|
||||||
|
|
||||||
@@ -76,7 +72,6 @@ Name: "{commondesktop}\{#MyAppName} Launcher"; Filename: "{app}\ArchipelagoLaunc
|
|||||||
[Run]
|
[Run]
|
||||||
|
|
||||||
Filename: "{tmp}\vc_redist.x64.exe"; Parameters: "/passive /norestart"; Check: IsVCRedist64BitNeeded; StatusMsg: "Installing VC++ redistributable..."
|
Filename: "{tmp}\vc_redist.x64.exe"; Parameters: "/passive /norestart"; Check: IsVCRedist64BitNeeded; StatusMsg: "Installing VC++ redistributable..."
|
||||||
Filename: "{app}\ArchipelagoLttPAdjuster"; Parameters: "--update_sprites"; StatusMsg: "Updating Sprite Library..."; Components: lttp_sprites
|
|
||||||
Filename: "{app}\ArchipelagoLauncher"; Parameters: "--update_settings"; StatusMsg: "Updating host.yaml..."; Flags: runasoriginaluser runhidden
|
Filename: "{app}\ArchipelagoLauncher"; Parameters: "--update_settings"; StatusMsg: "Updating host.yaml..."; Flags: runasoriginaluser runhidden
|
||||||
Filename: "{app}\ArchipelagoLauncher"; Description: "{cm:LaunchProgram,{#StringChange('Launcher', '&', '&&')}}"; Flags: nowait postinstall skipifsilent
|
Filename: "{app}\ArchipelagoLauncher"; Description: "{cm:LaunchProgram,{#StringChange('Launcher', '&', '&&')}}"; Flags: nowait postinstall skipifsilent
|
||||||
|
|
||||||
@@ -138,11 +133,6 @@ Root: HKCR; Subkey: "{#MyAppName}kdl3patch"; ValueData: "Arc
|
|||||||
Root: HKCR; Subkey: "{#MyAppName}kdl3patch\DefaultIcon"; ValueData: "{app}\ArchipelagoSNIClient.exe,0"; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}kdl3patch\DefaultIcon"; ValueData: "{app}\ArchipelagoSNIClient.exe,0"; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}kdl3patch\shell\open\command"; ValueData: """{app}\ArchipelagoSNIClient.exe"" ""%1"""; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}kdl3patch\shell\open\command"; ValueData: """{app}\ArchipelagoSNIClient.exe"" ""%1"""; ValueType: string; ValueName: "";
|
||||||
|
|
||||||
Root: HKCR; Subkey: ".apmc"; ValueData: "{#MyAppName}mcdata"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
|
||||||
Root: HKCR; Subkey: "{#MyAppName}mcdata"; ValueData: "Archipelago Minecraft Data"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
|
||||||
Root: HKCR; Subkey: "{#MyAppName}mcdata\DefaultIcon"; ValueData: "{app}\ArchipelagoMinecraftClient.exe,0"; ValueType: string; ValueName: "";
|
|
||||||
Root: HKCR; Subkey: "{#MyAppName}mcdata\shell\open\command"; ValueData: """{app}\ArchipelagoMinecraftClient.exe"" ""%1"""; ValueType: string; ValueName: "";
|
|
||||||
|
|
||||||
Root: HKCR; Subkey: ".apz5"; ValueData: "{#MyAppName}n64zpf"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: ".apz5"; ValueData: "{#MyAppName}n64zpf"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}n64zpf"; ValueData: "Archipelago Ocarina of Time Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}n64zpf"; ValueData: "Archipelago Ocarina of Time Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}n64zpf\DefaultIcon"; ValueData: "{app}\ArchipelagoOoTClient.exe,0"; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}n64zpf\DefaultIcon"; ValueData: "{app}\ArchipelagoOoTClient.exe,0"; ValueType: string; ValueName: "";
|
||||||
|
|||||||
178
kvui.py
178
kvui.py
@@ -60,7 +60,10 @@ from kivymd.uix.dialog import MDDialog, MDDialogHeadlineText, MDDialogSupporting
|
|||||||
from kivymd.uix.gridlayout import MDGridLayout
|
from kivymd.uix.gridlayout import MDGridLayout
|
||||||
from kivymd.uix.floatlayout import MDFloatLayout
|
from kivymd.uix.floatlayout import MDFloatLayout
|
||||||
from kivymd.uix.boxlayout import MDBoxLayout
|
from kivymd.uix.boxlayout import MDBoxLayout
|
||||||
from kivymd.uix.tab.tab import MDTabsSecondary, MDTabsItem, MDTabsItemText, MDTabsCarousel
|
from kivymd.uix.navigationbar import MDNavigationBar, MDNavigationItem
|
||||||
|
from kivymd.uix.screen import MDScreen
|
||||||
|
from kivymd.uix.screenmanager import MDScreenManager
|
||||||
|
|
||||||
from kivymd.uix.menu import MDDropdownMenu
|
from kivymd.uix.menu import MDDropdownMenu
|
||||||
from kivymd.uix.menu.menu import MDDropdownTextItem
|
from kivymd.uix.menu.menu import MDDropdownTextItem
|
||||||
from kivymd.uix.dropdownitem import MDDropDownItem, MDDropDownItemText
|
from kivymd.uix.dropdownitem import MDDropDownItem, MDDropDownItemText
|
||||||
@@ -726,6 +729,10 @@ class MessageBox(Popup):
|
|||||||
self.height += max(0, label.height - 18)
|
self.height += max(0, label.height - 18)
|
||||||
|
|
||||||
|
|
||||||
|
class MDNavigationItemBase(MDNavigationItem):
|
||||||
|
text = StringProperty(None)
|
||||||
|
|
||||||
|
|
||||||
class ButtonsPrompt(MDDialog):
|
class ButtonsPrompt(MDDialog):
|
||||||
def __init__(self, title: str, text: str, response: typing.Callable[[str], None],
|
def __init__(self, title: str, text: str, response: typing.Callable[[str], None],
|
||||||
*prompts: str, **kwargs) -> None:
|
*prompts: str, **kwargs) -> None:
|
||||||
@@ -766,58 +773,34 @@ class ButtonsPrompt(MDDialog):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ClientTabs(MDTabsSecondary):
|
class MDScreenManagerBase(MDScreenManager):
|
||||||
carousel: MDTabsCarousel
|
current_tab: MDNavigationItemBase
|
||||||
lock_swiping = True
|
local_screen_names: list[str]
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
self.carousel = MDTabsCarousel(lock_swiping=True, anim_move_duration=0.2)
|
super().__init__(**kwargs)
|
||||||
super().__init__(*args, MDDivider(size_hint_y=None, height=dp(1)), self.carousel, **kwargs)
|
self.local_screen_names = []
|
||||||
self.size_hint_y = 1
|
|
||||||
|
|
||||||
def _check_panel_height(self, *args):
|
def add_widget(self, widget: Widget, *args, **kwargs) -> None:
|
||||||
self.ids.tab_scroll.height = dp(38)
|
super().add_widget(widget, *args, **kwargs)
|
||||||
|
if "index" in kwargs:
|
||||||
def update_indicator(
|
self.local_screen_names.insert(kwargs["index"], widget.name)
|
||||||
self, x: float = 0.0, w: float = 0.0, instance: MDTabsItem = None
|
|
||||||
) -> None:
|
|
||||||
def update_indicator(*args):
|
|
||||||
indicator_pos = (0, 0)
|
|
||||||
indicator_size = (0, 0)
|
|
||||||
|
|
||||||
item_text_object = self._get_tab_item_text_icon_object()
|
|
||||||
|
|
||||||
if item_text_object:
|
|
||||||
indicator_pos = (
|
|
||||||
instance.x + dp(12),
|
|
||||||
self.indicator.pos[1]
|
|
||||||
if not self._tabs_carousel
|
|
||||||
else self._tabs_carousel.height,
|
|
||||||
)
|
|
||||||
indicator_size = (
|
|
||||||
instance.width - dp(24),
|
|
||||||
self.indicator_height,
|
|
||||||
)
|
|
||||||
|
|
||||||
Animation(
|
|
||||||
pos=indicator_pos,
|
|
||||||
size=indicator_size,
|
|
||||||
d=0 if not self.indicator_anim else self.indicator_duration,
|
|
||||||
t=self.indicator_transition,
|
|
||||||
).start(self.indicator)
|
|
||||||
|
|
||||||
if not instance:
|
|
||||||
self.indicator.pos = (x, self.indicator.pos[1])
|
|
||||||
self.indicator.size = (w, self.indicator_height)
|
|
||||||
else:
|
else:
|
||||||
Clock.schedule_once(update_indicator)
|
self.local_screen_names.append(widget.name)
|
||||||
|
|
||||||
def remove_tab(self, tab, content=None):
|
def switch_screens(self, new_tab: MDNavigationItemBase) -> None:
|
||||||
if content is None:
|
"""
|
||||||
content = tab.content
|
Called whenever the user clicks a tab to switch to a different screen.
|
||||||
self.ids.container.remove_widget(tab)
|
|
||||||
self.carousel.remove_widget(content)
|
:param new_tab: The new screen to switch to's tab.
|
||||||
self.on_size(self, self.size)
|
"""
|
||||||
|
name = new_tab.text
|
||||||
|
if self.local_screen_names.index(name) > self.local_screen_names.index(self.current_screen.name):
|
||||||
|
self.transition.direction = "left"
|
||||||
|
else:
|
||||||
|
self.transition.direction = "right"
|
||||||
|
self.current = name
|
||||||
|
self.current_tab = new_tab
|
||||||
|
|
||||||
|
|
||||||
class CommandButton(MDButton, MDTooltip):
|
class CommandButton(MDButton, MDTooltip):
|
||||||
@@ -845,6 +828,9 @@ class GameManager(ThemedApp):
|
|||||||
main_area_container: MDGridLayout
|
main_area_container: MDGridLayout
|
||||||
""" subclasses can add more columns beside the tabs """
|
""" subclasses can add more columns beside the tabs """
|
||||||
|
|
||||||
|
tabs: MDNavigationBar
|
||||||
|
screens: MDScreenManagerBase
|
||||||
|
|
||||||
def __init__(self, ctx: context_type):
|
def __init__(self, ctx: context_type):
|
||||||
self.title = self.base_title
|
self.title = self.base_title
|
||||||
self.ctx = ctx
|
self.ctx = ctx
|
||||||
@@ -874,7 +860,7 @@ class GameManager(ThemedApp):
|
|||||||
@property
|
@property
|
||||||
def tab_count(self):
|
def tab_count(self):
|
||||||
if hasattr(self, "tabs"):
|
if hasattr(self, "tabs"):
|
||||||
return max(1, len(self.tabs.tab_list))
|
return max(1, len(self.tabs.children))
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def on_start(self):
|
def on_start(self):
|
||||||
@@ -914,30 +900,32 @@ class GameManager(ThemedApp):
|
|||||||
self.grid.add_widget(self.progressbar)
|
self.grid.add_widget(self.progressbar)
|
||||||
|
|
||||||
# middle part
|
# middle part
|
||||||
self.tabs = ClientTabs(pos_hint={"center_x": 0.5, "center_y": 0.5})
|
self.screens = MDScreenManagerBase(pos_hint={"center_x": 0.5})
|
||||||
self.tabs.add_widget(MDTabsItem(MDTabsItemText(text="All" if len(self.logging_pairs) > 1 else "Archipelago")))
|
self.tabs = MDNavigationBar(orientation="horizontal", size_hint_y=None, height=dp(40), set_bars_color=True)
|
||||||
self.log_panels["All"] = self.tabs.default_tab_content = UILog(*(logging.getLogger(logger_name)
|
# bind the method to the bar for back compatibility
|
||||||
for logger_name, name in
|
self.tabs.remove_tab = self.remove_client_tab
|
||||||
self.logging_pairs))
|
self.screens.current_tab = self.add_client_tab(
|
||||||
self.tabs.carousel.add_widget(self.tabs.default_tab_content)
|
"All" if len(self.logging_pairs) > 1 else "Archipelago",
|
||||||
|
UILog(*(logging.getLogger(logger_name) for logger_name, name in self.logging_pairs)),
|
||||||
|
)
|
||||||
|
self.log_panels["All"] = self.screens.current_tab.content
|
||||||
|
self.screens.current_tab.active = True
|
||||||
|
|
||||||
for logger_name, display_name in self.logging_pairs:
|
for logger_name, display_name in self.logging_pairs:
|
||||||
bridge_logger = logging.getLogger(logger_name)
|
bridge_logger = logging.getLogger(logger_name)
|
||||||
self.log_panels[display_name] = UILog(bridge_logger)
|
self.log_panels[display_name] = UILog(bridge_logger)
|
||||||
if len(self.logging_pairs) > 1:
|
if len(self.logging_pairs) > 1:
|
||||||
panel = MDTabsItem(MDTabsItemText(text=display_name))
|
self.add_client_tab(display_name, self.log_panels[display_name])
|
||||||
panel.content = self.log_panels[display_name]
|
|
||||||
# show Archipelago tab if other logging is present
|
|
||||||
self.tabs.carousel.add_widget(panel.content)
|
|
||||||
self.tabs.add_widget(panel)
|
|
||||||
|
|
||||||
hint_panel = self.add_client_tab("Hints", HintLayout())
|
|
||||||
self.hint_log = HintLog(self.json_to_kivy_parser)
|
self.hint_log = HintLog(self.json_to_kivy_parser)
|
||||||
|
hint_panel = self.add_client_tab("Hints", HintLayout(self.hint_log))
|
||||||
self.log_panels["Hints"] = hint_panel.content
|
self.log_panels["Hints"] = hint_panel.content
|
||||||
hint_panel.content.add_widget(self.hint_log)
|
|
||||||
|
|
||||||
self.main_area_container = MDGridLayout(size_hint_y=1, rows=1)
|
self.main_area_container = MDGridLayout(size_hint_y=1, rows=1)
|
||||||
self.main_area_container.add_widget(self.tabs)
|
tab_container = MDGridLayout(size_hint_y=1, cols=1)
|
||||||
|
tab_container.add_widget(self.tabs)
|
||||||
|
tab_container.add_widget(self.screens)
|
||||||
|
self.main_area_container.add_widget(tab_container)
|
||||||
|
|
||||||
self.grid.add_widget(self.main_area_container)
|
self.grid.add_widget(self.main_area_container)
|
||||||
|
|
||||||
@@ -974,25 +962,61 @@ class GameManager(ThemedApp):
|
|||||||
|
|
||||||
return self.container
|
return self.container
|
||||||
|
|
||||||
def add_client_tab(self, title: str, content: Widget, index: int = -1) -> Widget:
|
def add_client_tab(self, title: str, content: Widget, index: int = -1) -> MDNavigationItemBase:
|
||||||
"""Adds a new tab to the client window with a given title, and provides a given Widget as its content.
|
"""
|
||||||
Returns the new tab widget, with the provided content being placed on the tab as content."""
|
Adds a new tab to the client window with a given title, and provides a given Widget as its content.
|
||||||
new_tab = MDTabsItem(MDTabsItemText(text=title))
|
Returns the new tab widget, with the provided content being placed on the tab as content.
|
||||||
|
|
||||||
|
:param title: The title of the tab.
|
||||||
|
:param content: The Widget to be added as content for this tab's new MDScreen. Will also be added to the
|
||||||
|
returned tab as tab.content.
|
||||||
|
:param index: The index to insert the tab at. Defaults to -1, meaning the tab will be appended to the end.
|
||||||
|
|
||||||
|
:return: The new tab.
|
||||||
|
"""
|
||||||
|
if self.tabs.children:
|
||||||
|
self.tabs.add_widget(MDDivider(orientation="vertical"))
|
||||||
|
new_tab = MDNavigationItemBase(text=title)
|
||||||
new_tab.content = content
|
new_tab.content = content
|
||||||
if -1 < index <= len(self.tabs.carousel.slides):
|
new_screen = MDScreen(name=title)
|
||||||
new_tab.bind(on_release=self.tabs.set_active_item)
|
new_screen.add_widget(content)
|
||||||
new_tab._tabs = self.tabs
|
if -1 < index <= len(self.tabs.children):
|
||||||
self.tabs.ids.container.add_widget(new_tab, index=index)
|
remapped_index = len(self.tabs.children) - index
|
||||||
self.tabs.carousel.add_widget(new_tab.content, index=len(self.tabs.carousel.slides) - index)
|
self.tabs.add_widget(new_tab, index=remapped_index)
|
||||||
|
self.screens.add_widget(new_screen, index=index)
|
||||||
else:
|
else:
|
||||||
self.tabs.add_widget(new_tab)
|
self.tabs.add_widget(new_tab)
|
||||||
self.tabs.carousel.add_widget(new_tab.content)
|
self.screens.add_widget(new_screen)
|
||||||
return new_tab
|
return new_tab
|
||||||
|
|
||||||
|
def remove_client_tab(self, tab: MDNavigationItemBase) -> None:
|
||||||
|
"""
|
||||||
|
Called to remove a tab and its screen.
|
||||||
|
|
||||||
|
:param tab: The tab to remove.
|
||||||
|
"""
|
||||||
|
tab_index = self.tabs.children.index(tab)
|
||||||
|
# if the tab is currently active we need to swap before removing it
|
||||||
|
if tab == self.screens.current_tab:
|
||||||
|
if not tab_index:
|
||||||
|
# account for the divider
|
||||||
|
swap_index = tab_index + 2
|
||||||
|
else:
|
||||||
|
swap_index = tab_index - 2
|
||||||
|
self.tabs.children[swap_index].on_release()
|
||||||
|
# self.screens.switch_screens(self.tabs.children[swap_index])
|
||||||
|
# get the divider to the left if we can
|
||||||
|
if not tab_index:
|
||||||
|
divider_index = tab_index + 1
|
||||||
|
else:
|
||||||
|
divider_index = tab_index - 1
|
||||||
|
self.tabs.remove_widget(self.tabs.children[divider_index])
|
||||||
|
self.tabs.remove_widget(tab)
|
||||||
|
self.screens.remove_widget(self.screens.get_screen(tab.text))
|
||||||
|
|
||||||
def update_texts(self, dt):
|
def update_texts(self, dt):
|
||||||
for slide in self.tabs.carousel.slides:
|
if hasattr(self.screens.current_tab.content, "fix_heights"):
|
||||||
if hasattr(slide, "fix_heights"):
|
getattr(self.screens.current_tab.content, "fix_heights")()
|
||||||
slide.fix_heights() # TODO: remove this when Kivy fixes this upstream
|
|
||||||
if self.ctx.server:
|
if self.ctx.server:
|
||||||
self.title = self.base_title + " " + Utils.__version__ + \
|
self.title = self.base_title + " " + Utils.__version__ + \
|
||||||
f" | Connected to: {self.ctx.server_address} " \
|
f" | Connected to: {self.ctx.server_address} " \
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[pytest]
|
[pytest]
|
||||||
python_files = test_*.py Test*.py __init__.py # TODO: remove Test* once all worlds have been ported
|
python_files = test_*.py Test*.py **/test*/**/__init__.py # TODO: remove Test* once all worlds have been ported
|
||||||
python_classes = Test
|
python_classes = Test
|
||||||
python_functions = test
|
python_functions = test
|
||||||
testpaths =
|
testpaths =
|
||||||
|
|||||||
@@ -754,7 +754,12 @@ class Settings(Group):
|
|||||||
return super().__getattribute__(key)
|
return super().__getattribute__(key)
|
||||||
# directly import world and grab settings class
|
# directly import world and grab settings class
|
||||||
world_mod, world_cls_name = _world_settings_name_cache[key].rsplit(".", 1)
|
world_mod, world_cls_name = _world_settings_name_cache[key].rsplit(".", 1)
|
||||||
world = cast(type, getattr(__import__(world_mod, fromlist=[world_cls_name]), world_cls_name))
|
try:
|
||||||
|
world = cast(type, getattr(__import__(world_mod, fromlist=[world_cls_name]), world_cls_name))
|
||||||
|
except AttributeError:
|
||||||
|
import warnings
|
||||||
|
warnings.warn(f"World {world_cls_name} failed to initialize properly.")
|
||||||
|
return super().__getattribute__(key)
|
||||||
assert getattr(world, "settings_key") == key
|
assert getattr(world, "settings_key") == key
|
||||||
try:
|
try:
|
||||||
cls_or_name = world.__annotations__["settings"]
|
cls_or_name = world.__annotations__["settings"]
|
||||||
|
|||||||
48
setup.py
48
setup.py
@@ -9,6 +9,7 @@ import subprocess
|
|||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import threading
|
import threading
|
||||||
|
import urllib.error
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import warnings
|
import warnings
|
||||||
import zipfile
|
import zipfile
|
||||||
@@ -16,6 +17,10 @@ from collections.abc import Iterable, Sequence
|
|||||||
from hashlib import sha3_512
|
from hashlib import sha3_512
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
SNI_VERSION = "v0.0.100" # change back to "latest" once tray icon issues are fixed
|
||||||
|
|
||||||
|
|
||||||
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
||||||
requirement = 'cx-Freeze==8.0.0'
|
requirement = 'cx-Freeze==8.0.0'
|
||||||
try:
|
try:
|
||||||
@@ -25,7 +30,7 @@ try:
|
|||||||
install_cx_freeze = False
|
install_cx_freeze = False
|
||||||
except pkg_resources.ResolutionError:
|
except pkg_resources.ResolutionError:
|
||||||
install_cx_freeze = True
|
install_cx_freeze = True
|
||||||
except ImportError:
|
except (AttributeError, ImportError):
|
||||||
install_cx_freeze = True
|
install_cx_freeze = True
|
||||||
pkg_resources = None # type: ignore[assignment]
|
pkg_resources = None # type: ignore[assignment]
|
||||||
|
|
||||||
@@ -57,13 +62,11 @@ from Utils import version_tuple, is_windows, is_linux
|
|||||||
from Cython.Build import cythonize
|
from Cython.Build import cythonize
|
||||||
|
|
||||||
|
|
||||||
# On Python < 3.10 LogicMixin is not currently supported.
|
|
||||||
non_apworlds: set[str] = {
|
non_apworlds: set[str] = {
|
||||||
"A Link to the Past",
|
"A Link to the Past",
|
||||||
"Adventure",
|
"Adventure",
|
||||||
"ArchipIDLE",
|
"ArchipIDLE",
|
||||||
"Archipelago",
|
"Archipelago",
|
||||||
"Clique",
|
|
||||||
"Lufia II Ancient Cave",
|
"Lufia II Ancient Cave",
|
||||||
"Meritous",
|
"Meritous",
|
||||||
"Ocarina of Time",
|
"Ocarina of Time",
|
||||||
@@ -75,9 +78,6 @@ non_apworlds: set[str] = {
|
|||||||
"Wargroove",
|
"Wargroove",
|
||||||
}
|
}
|
||||||
|
|
||||||
# LogicMixin is broken before 3.10 import revamp
|
|
||||||
if sys.version_info < (3,10):
|
|
||||||
non_apworlds.add("Hollow Knight")
|
|
||||||
|
|
||||||
def download_SNI() -> None:
|
def download_SNI() -> None:
|
||||||
print("Updating SNI")
|
print("Updating SNI")
|
||||||
@@ -90,7 +90,8 @@ def download_SNI() -> None:
|
|||||||
machine_name = platform.machine().lower()
|
machine_name = platform.machine().lower()
|
||||||
# force amd64 on macos until we have universal2 sni, otherwise resolve to GOARCH
|
# force amd64 on macos until we have universal2 sni, otherwise resolve to GOARCH
|
||||||
machine_name = "universal" if platform_name == "darwin" else machine_to_go.get(machine_name, machine_name)
|
machine_name = "universal" if platform_name == "darwin" else machine_to_go.get(machine_name, machine_name)
|
||||||
with urllib.request.urlopen("https://api.github.com/repos/alttpo/sni/releases/latest") as request:
|
sni_version_ref = "latest" if SNI_VERSION == "latest" else f"tags/{SNI_VERSION}"
|
||||||
|
with urllib.request.urlopen(f"https://api.github.com/repos/alttpo/SNI/releases/{sni_version_ref}") as request:
|
||||||
data = json.load(request)
|
data = json.load(request)
|
||||||
files = data["assets"]
|
files = data["assets"]
|
||||||
|
|
||||||
@@ -104,8 +105,8 @@ def download_SNI() -> None:
|
|||||||
# prefer "many" builds
|
# prefer "many" builds
|
||||||
if "many" in download_url:
|
if "many" in download_url:
|
||||||
break
|
break
|
||||||
# prefer the correct windows or windows7 build
|
# prefer non-windows7 builds to get up-to-date dependencies
|
||||||
if platform_name == "windows" and ("windows7" in download_url) == (sys.version_info < (3, 9)):
|
if platform_name == "windows" and "windows7" not in download_url:
|
||||||
break
|
break
|
||||||
|
|
||||||
if source_url and source_url.endswith(".zip"):
|
if source_url and source_url.endswith(".zip"):
|
||||||
@@ -144,15 +145,16 @@ def download_SNI() -> None:
|
|||||||
print(f"No SNI found for system spec {platform_name} {machine_name}")
|
print(f"No SNI found for system spec {platform_name} {machine_name}")
|
||||||
|
|
||||||
|
|
||||||
signtool: str | None
|
signtool: str | None = None
|
||||||
if os.path.exists("X:/pw.txt"):
|
try:
|
||||||
print("Using signtool")
|
with urllib.request.urlopen('http://192.168.206.4:12345/connector/status') as response:
|
||||||
with open("X:/pw.txt", encoding="utf-8-sig") as f:
|
html = response.read()
|
||||||
pw = f.read()
|
if b"status=OK\n" in html:
|
||||||
signtool = r'signtool sign /f X:/_SITS_Zertifikat_.pfx /p "' + pw + \
|
signtool = (r'signtool sign /sha1 6df76fe776b82869a5693ddcb1b04589cffa6faf /fd sha256 /td sha256 '
|
||||||
r'" /fd sha256 /td sha256 /tr http://timestamp.digicert.com/ '
|
r'/tr http://timestamp.digicert.com/ ')
|
||||||
else:
|
print("Using signtool")
|
||||||
signtool = None
|
except (ConnectionError, TimeoutError, urllib.error.URLError) as e:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
build_platform = sysconfig.get_platform()
|
build_platform = sysconfig.get_platform()
|
||||||
@@ -197,9 +199,10 @@ extra_libs = ["libssl.so", "libcrypto.so"] if is_linux else []
|
|||||||
|
|
||||||
|
|
||||||
def remove_sprites_from_folder(folder: Path) -> None:
|
def remove_sprites_from_folder(folder: Path) -> None:
|
||||||
for file in os.listdir(folder):
|
if os.path.isdir(folder):
|
||||||
if file != ".gitignore":
|
for file in os.listdir(folder):
|
||||||
os.remove(folder / file)
|
if file != ".gitignore":
|
||||||
|
os.remove(folder / file)
|
||||||
|
|
||||||
|
|
||||||
def _threaded_hash(filepath: str | Path) -> str:
|
def _threaded_hash(filepath: str | Path) -> str:
|
||||||
@@ -408,13 +411,14 @@ class BuildExeCommand(cx_Freeze.command.build_exe.build_exe):
|
|||||||
os.system(signtool + os.path.join(self.buildfolder, "lib", "worlds", "oot", "data", *exe_path))
|
os.system(signtool + os.path.join(self.buildfolder, "lib", "worlds", "oot", "data", *exe_path))
|
||||||
|
|
||||||
remove_sprites_from_folder(self.buildfolder / "data" / "sprites" / "alttpr")
|
remove_sprites_from_folder(self.buildfolder / "data" / "sprites" / "alttpr")
|
||||||
|
remove_sprites_from_folder(self.buildfolder / "data" / "sprites" / "alttp" / "remote")
|
||||||
|
|
||||||
self.create_manifest()
|
self.create_manifest()
|
||||||
|
|
||||||
if is_windows:
|
if is_windows:
|
||||||
# Inno setup stuff
|
# Inno setup stuff
|
||||||
with open("setup.ini", "w") as f:
|
with open("setup.ini", "w") as f:
|
||||||
min_supported_windows = "6.2.9200" if sys.version_info > (3, 9) else "6.0.6000"
|
min_supported_windows = "6.2.9200"
|
||||||
f.write(f"[Data]\nsource_path={self.buildfolder}\nmin_windows={min_supported_windows}\n")
|
f.write(f"[Data]\nsource_path={self.buildfolder}\nmin_windows={min_supported_windows}\n")
|
||||||
with open("installdelete.iss", "w") as f:
|
with open("installdelete.iss", "w") as f:
|
||||||
f.writelines("Type: filesandordirs; Name: \"{app}\\lib\\worlds\\"+world_directory+"\"\n"
|
f.writelines("Type: filesandordirs; Name: \"{app}\\lib\\worlds\\"+world_directory+"\"\n"
|
||||||
|
|||||||
@@ -29,14 +29,9 @@ def run_locations_benchmark():
|
|||||||
|
|
||||||
rule_iterations: int = 100_000
|
rule_iterations: int = 100_000
|
||||||
|
|
||||||
if sys.version_info >= (3, 9):
|
@staticmethod
|
||||||
@staticmethod
|
def format_times_from_counter(counter: collections.Counter[str], top: int = 5) -> str:
|
||||||
def format_times_from_counter(counter: collections.Counter[str], top: int = 5) -> str:
|
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
||||||
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
|
||||||
else:
|
|
||||||
@staticmethod
|
|
||||||
def format_times_from_counter(counter: collections.Counter, top: int = 5) -> str:
|
|
||||||
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
|
||||||
|
|
||||||
def location_test(self, test_location: Location, state: CollectionState, state_name: str) -> float:
|
def location_test(self, test_location: Location, state: CollectionState, state_name: str) -> float:
|
||||||
with TimeIt(f"{test_location.game} {self.rule_iterations} "
|
with TimeIt(f"{test_location.game} {self.rule_iterations} "
|
||||||
|
|||||||
66
test/benchmark/match.py
Normal file
66
test/benchmark/match.py
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
"""Micro benchmark comparing match as "switch" with if-elif and dict access"""
|
||||||
|
|
||||||
|
from timeit import timeit
|
||||||
|
|
||||||
|
|
||||||
|
def make_match(count: int) -> str:
|
||||||
|
code = f"for val in range({count}):\n match val:\n"
|
||||||
|
for n in range(count):
|
||||||
|
m = n + 1
|
||||||
|
code += f" case {n}:\n"
|
||||||
|
code += f" res = {m}\n"
|
||||||
|
return code
|
||||||
|
|
||||||
|
|
||||||
|
def make_elif(count: int) -> str:
|
||||||
|
code = f"for val in range({count}):\n"
|
||||||
|
for n in range(count):
|
||||||
|
m = n + 1
|
||||||
|
code += f" {'' if n == 0 else 'el'}if val == {n}:\n"
|
||||||
|
code += f" res = {m}\n"
|
||||||
|
return code
|
||||||
|
|
||||||
|
|
||||||
|
def make_dict(count: int, mode: str) -> str:
|
||||||
|
if mode == "value":
|
||||||
|
code = "dct = {\n"
|
||||||
|
for n in range(count):
|
||||||
|
m = n + 1
|
||||||
|
code += f" {n}: {m},\n"
|
||||||
|
code += "}\n"
|
||||||
|
code += f"for val in range({count}):\n res = dct[val]"
|
||||||
|
return code
|
||||||
|
elif mode == "call":
|
||||||
|
code = ""
|
||||||
|
for n in range(count):
|
||||||
|
m = n + 1
|
||||||
|
code += f"def func{n}():\n val = {m}\n\n"
|
||||||
|
code += "dct = {\n"
|
||||||
|
for n in range(count):
|
||||||
|
code += f" {n}: func{n},\n"
|
||||||
|
code += "}\n"
|
||||||
|
code += f"for val in range({count}):\n dct[val]()"
|
||||||
|
return code
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def timeit_best_of_5(stmt: str, setup: str = "pass") -> float:
|
||||||
|
"""
|
||||||
|
Benchmark some code, returning the best of 5 runs.
|
||||||
|
:param stmt: Code to benchmark
|
||||||
|
:param setup: Optional code to set up environment
|
||||||
|
:return: Time taken in microseconds
|
||||||
|
"""
|
||||||
|
return min(timeit(stmt, setup, number=10000, globals={}) for _ in range(5)) * 100
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
for count in (3, 5, 8, 10, 20, 30):
|
||||||
|
print(f"value of {count:-2} with match: {timeit_best_of_5(make_match(count)) / count:.3f} us")
|
||||||
|
print(f"value of {count:-2} with elif: {timeit_best_of_5(make_elif(count)) / count:.3f} us")
|
||||||
|
print(f"value of {count:-2} with dict: {timeit_best_of_5(make_dict(count, 'value')) / count:.3f} us")
|
||||||
|
print(f"call of {count:-2} with dict: {timeit_best_of_5(make_dict(count, 'call')) / count:.3f} us")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -3,7 +3,7 @@ from typing import List, Optional, Tuple, Type, Union
|
|||||||
|
|
||||||
from BaseClasses import CollectionState, Item, ItemClassification, Location, MultiWorld, Region
|
from BaseClasses import CollectionState, Item, ItemClassification, Location, MultiWorld, Region
|
||||||
from worlds import network_data_package
|
from worlds import network_data_package
|
||||||
from worlds.AutoWorld import World, call_all
|
from worlds.AutoWorld import World, WebWorld, call_all
|
||||||
|
|
||||||
gen_steps = (
|
gen_steps = (
|
||||||
"generate_early",
|
"generate_early",
|
||||||
@@ -17,7 +17,7 @@ gen_steps = (
|
|||||||
|
|
||||||
|
|
||||||
def setup_solo_multiworld(
|
def setup_solo_multiworld(
|
||||||
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
||||||
) -> MultiWorld:
|
) -> MultiWorld:
|
||||||
"""
|
"""
|
||||||
Creates a multiworld with a single player of `world_type`, sets default options, and calls provided gen steps.
|
Creates a multiworld with a single player of `world_type`, sets default options, and calls provided gen steps.
|
||||||
@@ -62,11 +62,16 @@ def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple
|
|||||||
return multiworld
|
return multiworld
|
||||||
|
|
||||||
|
|
||||||
|
class TestWebWorld(WebWorld):
|
||||||
|
tutorials = []
|
||||||
|
|
||||||
|
|
||||||
class TestWorld(World):
|
class TestWorld(World):
|
||||||
game = f"Test Game"
|
game = f"Test Game"
|
||||||
item_name_to_id = {}
|
item_name_to_id = {}
|
||||||
location_name_to_id = {}
|
location_name_to_id = {}
|
||||||
hidden = True
|
hidden = True
|
||||||
|
web = TestWebWorld()
|
||||||
|
|
||||||
|
|
||||||
# add our test world to the data package, so we can test it later
|
# add our test world to the data package, so we can test it later
|
||||||
|
|||||||
@@ -69,11 +69,9 @@ class TestEntranceLookup(unittest.TestCase):
|
|||||||
exits_set = set([ex for region in multiworld.get_regions(1)
|
exits_set = set([ex for region in multiworld.get_regions(1)
|
||||||
for ex in region.exits if not ex.connected_region])
|
for ex in region.exits if not ex.connected_region])
|
||||||
|
|
||||||
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set)
|
|
||||||
er_targets = [entrance for region in multiworld.get_regions(1)
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
for entrance in region.entrances if not entrance.parent_region]
|
for entrance in region.entrances if not entrance.parent_region]
|
||||||
for entrance in er_targets:
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
lookup.add(entrance)
|
|
||||||
|
|
||||||
retrieved_targets = lookup.get_targets([ERTestGroups.TOP, ERTestGroups.BOTTOM],
|
retrieved_targets = lookup.get_targets([ERTestGroups.TOP, ERTestGroups.BOTTOM],
|
||||||
False, False)
|
False, False)
|
||||||
@@ -92,11 +90,9 @@ class TestEntranceLookup(unittest.TestCase):
|
|||||||
exits_set = set([ex for region in multiworld.get_regions(1)
|
exits_set = set([ex for region in multiworld.get_regions(1)
|
||||||
for ex in region.exits if not ex.connected_region])
|
for ex in region.exits if not ex.connected_region])
|
||||||
|
|
||||||
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set)
|
|
||||||
er_targets = [entrance for region in multiworld.get_regions(1)
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
for entrance in region.entrances if not entrance.parent_region]
|
for entrance in region.entrances if not entrance.parent_region]
|
||||||
for entrance in er_targets:
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
lookup.add(entrance)
|
|
||||||
|
|
||||||
retrieved_targets = lookup.get_targets([ERTestGroups.TOP, ERTestGroups.BOTTOM],
|
retrieved_targets = lookup.get_targets([ERTestGroups.TOP, ERTestGroups.BOTTOM],
|
||||||
False, True)
|
False, True)
|
||||||
@@ -112,12 +108,10 @@ class TestEntranceLookup(unittest.TestCase):
|
|||||||
for ex in region.exits if not ex.connected_region
|
for ex in region.exits if not ex.connected_region
|
||||||
and ex.name != "region20_right" and ex.name != "region21_left"])
|
and ex.name != "region20_right" and ex.name != "region21_left"])
|
||||||
|
|
||||||
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set)
|
|
||||||
er_targets = [entrance for region in multiworld.get_regions(1)
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
for entrance in region.entrances if not entrance.parent_region and
|
for entrance in region.entrances if not entrance.parent_region and
|
||||||
entrance.name != "region20_right" and entrance.name != "region21_left"]
|
entrance.name != "region20_right" and entrance.name != "region21_left"]
|
||||||
for entrance in er_targets:
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
lookup.add(entrance)
|
|
||||||
# region 20 is the bottom left corner of the grid, and therefore only has a right entrance from region 21
|
# region 20 is the bottom left corner of the grid, and therefore only has a right entrance from region 21
|
||||||
# and a top entrance from region 15; since we've told lookup to ignore the right entrance from region 21,
|
# and a top entrance from region 15; since we've told lookup to ignore the right entrance from region 21,
|
||||||
# the top entrance from region 15 should be considered a dead-end
|
# the top entrance from region 15 should be considered a dead-end
|
||||||
@@ -129,6 +123,56 @@ class TestEntranceLookup(unittest.TestCase):
|
|||||||
self.assertTrue(dead_end in lookup.dead_ends)
|
self.assertTrue(dead_end in lookup.dead_ends)
|
||||||
self.assertEqual(len(lookup.dead_ends), 1)
|
self.assertEqual(len(lookup.dead_ends), 1)
|
||||||
|
|
||||||
|
def test_find_target_by_name(self):
|
||||||
|
"""Tests that find_target can find the correct target by name only"""
|
||||||
|
multiworld = generate_test_multiworld()
|
||||||
|
generate_disconnected_region_grid(multiworld, 5)
|
||||||
|
exits_set = set([ex for region in multiworld.get_regions(1)
|
||||||
|
for ex in region.exits if not ex.connected_region])
|
||||||
|
|
||||||
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
|
for entrance in region.entrances if not entrance.parent_region]
|
||||||
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
|
|
||||||
|
target = lookup.find_target("region0_right")
|
||||||
|
self.assertEqual(target.name, "region0_right")
|
||||||
|
self.assertEqual(target.randomization_group, ERTestGroups.RIGHT)
|
||||||
|
self.assertIsNone(lookup.find_target("nonexistant"))
|
||||||
|
|
||||||
|
def test_find_target_by_name_and_group(self):
|
||||||
|
"""Tests that find_target can find the correct target by name and group"""
|
||||||
|
multiworld = generate_test_multiworld()
|
||||||
|
generate_disconnected_region_grid(multiworld, 5)
|
||||||
|
exits_set = set([ex for region in multiworld.get_regions(1)
|
||||||
|
for ex in region.exits if not ex.connected_region])
|
||||||
|
|
||||||
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
|
for entrance in region.entrances if not entrance.parent_region]
|
||||||
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
|
|
||||||
|
target = lookup.find_target("region0_right", ERTestGroups.RIGHT)
|
||||||
|
self.assertEqual(target.name, "region0_right")
|
||||||
|
self.assertEqual(target.randomization_group, ERTestGroups.RIGHT)
|
||||||
|
# wrong group
|
||||||
|
self.assertIsNone(lookup.find_target("region0_right", ERTestGroups.LEFT))
|
||||||
|
|
||||||
|
def test_find_target_by_name_and_group_and_category(self):
|
||||||
|
"""Tests that find_target can find the correct target by name, group, and dead-endedness"""
|
||||||
|
multiworld = generate_test_multiworld()
|
||||||
|
generate_disconnected_region_grid(multiworld, 5)
|
||||||
|
exits_set = set([ex for region in multiworld.get_regions(1)
|
||||||
|
for ex in region.exits if not ex.connected_region])
|
||||||
|
|
||||||
|
er_targets = [entrance for region in multiworld.get_regions(1)
|
||||||
|
for entrance in region.entrances if not entrance.parent_region]
|
||||||
|
lookup = EntranceLookup(multiworld.worlds[1].random, coupled=True, usable_exits=exits_set, targets=er_targets)
|
||||||
|
|
||||||
|
target = lookup.find_target("region0_right", ERTestGroups.RIGHT, False)
|
||||||
|
self.assertEqual(target.name, "region0_right")
|
||||||
|
self.assertEqual(target.randomization_group, ERTestGroups.RIGHT)
|
||||||
|
# wrong deadendedness
|
||||||
|
self.assertIsNone(lookup.find_target("region0_right", ERTestGroups.RIGHT, True))
|
||||||
|
|
||||||
class TestBakeTargetGroupLookup(unittest.TestCase):
|
class TestBakeTargetGroupLookup(unittest.TestCase):
|
||||||
def test_lookup_generation(self):
|
def test_lookup_generation(self):
|
||||||
multiworld = generate_test_multiworld()
|
multiworld = generate_test_multiworld()
|
||||||
@@ -265,12 +309,12 @@ class TestRandomizeEntrances(unittest.TestCase):
|
|||||||
generate_disconnected_region_grid(multiworld, 5)
|
generate_disconnected_region_grid(multiworld, 5)
|
||||||
seen_placement_count = 0
|
seen_placement_count = 0
|
||||||
|
|
||||||
def verify_coupled(_: ERPlacementState, placed_entrances: list[Entrance]):
|
def verify_coupled(_: ERPlacementState, placed_exits: list[Entrance], placed_targets: list[Entrance]):
|
||||||
nonlocal seen_placement_count
|
nonlocal seen_placement_count
|
||||||
seen_placement_count += len(placed_entrances)
|
seen_placement_count += len(placed_exits)
|
||||||
self.assertEqual(2, len(placed_entrances))
|
self.assertEqual(2, len(placed_exits))
|
||||||
self.assertEqual(placed_entrances[0].parent_region, placed_entrances[1].connected_region)
|
self.assertEqual(placed_exits[0].parent_region, placed_exits[1].connected_region)
|
||||||
self.assertEqual(placed_entrances[1].parent_region, placed_entrances[0].connected_region)
|
self.assertEqual(placed_exits[1].parent_region, placed_exits[0].connected_region)
|
||||||
|
|
||||||
result = randomize_entrances(multiworld.worlds[1], True, directionally_matched_group_lookup,
|
result = randomize_entrances(multiworld.worlds[1], True, directionally_matched_group_lookup,
|
||||||
on_connect=verify_coupled)
|
on_connect=verify_coupled)
|
||||||
@@ -313,10 +357,10 @@ class TestRandomizeEntrances(unittest.TestCase):
|
|||||||
generate_disconnected_region_grid(multiworld, 5)
|
generate_disconnected_region_grid(multiworld, 5)
|
||||||
seen_placement_count = 0
|
seen_placement_count = 0
|
||||||
|
|
||||||
def verify_uncoupled(state: ERPlacementState, placed_entrances: list[Entrance]):
|
def verify_uncoupled(state: ERPlacementState, placed_exits: list[Entrance], placed_targets: list[Entrance]):
|
||||||
nonlocal seen_placement_count
|
nonlocal seen_placement_count
|
||||||
seen_placement_count += len(placed_entrances)
|
seen_placement_count += len(placed_exits)
|
||||||
self.assertEqual(1, len(placed_entrances))
|
self.assertEqual(1, len(placed_exits))
|
||||||
|
|
||||||
result = randomize_entrances(multiworld.worlds[1], False, directionally_matched_group_lookup,
|
result = randomize_entrances(multiworld.worlds[1], False, directionally_matched_group_lookup,
|
||||||
on_connect=verify_uncoupled)
|
on_connect=verify_uncoupled)
|
||||||
|
|||||||
@@ -48,13 +48,14 @@ class TestBase(unittest.TestCase):
|
|||||||
|
|
||||||
original_get_all_state = multiworld.get_all_state
|
original_get_all_state = multiworld.get_all_state
|
||||||
|
|
||||||
def patched_get_all_state(use_cache: bool, allow_partial_entrances: bool = False):
|
def patched_get_all_state(use_cache: bool | None = None, allow_partial_entrances: bool = False,
|
||||||
|
**kwargs):
|
||||||
self.assertTrue(allow_partial_entrances, (
|
self.assertTrue(allow_partial_entrances, (
|
||||||
"Before the connect_entrances step finishes, other worlds might still have partial entrances. "
|
"Before the connect_entrances step finishes, other worlds might still have partial entrances. "
|
||||||
"As such, any call to get_all_state must use allow_partial_entrances = True."
|
"As such, any call to get_all_state must use allow_partial_entrances = True."
|
||||||
))
|
))
|
||||||
|
|
||||||
return original_get_all_state(use_cache, allow_partial_entrances)
|
return original_get_all_state(use_cache, allow_partial_entrances, **kwargs)
|
||||||
|
|
||||||
multiworld.get_all_state = patched_get_all_state
|
multiworld.get_all_state = patched_get_all_state
|
||||||
|
|
||||||
|
|||||||
@@ -603,6 +603,28 @@ class TestDistributeItemsRestrictive(unittest.TestCase):
|
|||||||
self.assertTrue(player3.locations[2].item.advancement)
|
self.assertTrue(player3.locations[2].item.advancement)
|
||||||
self.assertTrue(player3.locations[3].item.advancement)
|
self.assertTrue(player3.locations[3].item.advancement)
|
||||||
|
|
||||||
|
def test_deprioritized_does_not_land_on_priority(self):
|
||||||
|
multiworld = generate_test_multiworld(1)
|
||||||
|
player1 = generate_player_data(multiworld, 1, 2, prog_item_count=2)
|
||||||
|
|
||||||
|
player1.prog_items[0].classification |= ItemClassification.deprioritized
|
||||||
|
player1.locations[0].progress_type = LocationProgressType.PRIORITY
|
||||||
|
|
||||||
|
distribute_items_restrictive(multiworld)
|
||||||
|
|
||||||
|
self.assertFalse(player1.locations[0].item.deprioritized)
|
||||||
|
|
||||||
|
def test_deprioritized_still_goes_on_priority_ahead_of_filler(self):
|
||||||
|
multiworld = generate_test_multiworld(1)
|
||||||
|
player1 = generate_player_data(multiworld, 1, 2, prog_item_count=1, basic_item_count=1)
|
||||||
|
|
||||||
|
player1.prog_items[0].classification |= ItemClassification.deprioritized
|
||||||
|
player1.locations[0].progress_type = LocationProgressType.PRIORITY
|
||||||
|
|
||||||
|
distribute_items_restrictive(multiworld)
|
||||||
|
|
||||||
|
self.assertTrue(player1.locations[0].item.advancement)
|
||||||
|
|
||||||
def test_can_remove_locations_in_fill_hook(self):
|
def test_can_remove_locations_in_fill_hook(self):
|
||||||
"""Test that distribute_items_restrictive calls the fill hook and allows for item and location removal"""
|
"""Test that distribute_items_restrictive calls the fill hook and allows for item and location removal"""
|
||||||
multiworld = generate_test_multiworld()
|
multiworld = generate_test_multiworld()
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from Fill import distribute_items_restrictive
|
from Fill import distribute_items_restrictive
|
||||||
from NetUtils import encode
|
from NetUtils import convert_to_base_types
|
||||||
from worlds.AutoWorld import AutoWorldRegister, call_all
|
from worlds.AutoWorld import AutoWorldRegister, call_all
|
||||||
from worlds import failed_world_loads
|
from worlds import failed_world_loads
|
||||||
from . import setup_solo_multiworld
|
from . import setup_solo_multiworld
|
||||||
@@ -47,7 +47,7 @@ class TestImplemented(unittest.TestCase):
|
|||||||
call_all(multiworld, "post_fill")
|
call_all(multiworld, "post_fill")
|
||||||
for key, data in multiworld.worlds[1].fill_slot_data().items():
|
for key, data in multiworld.worlds[1].fill_slot_data().items():
|
||||||
self.assertIsInstance(key, str, "keys in slot data must be a string")
|
self.assertIsInstance(key, str, "keys in slot data must be a string")
|
||||||
self.assertIsInstance(encode(data), str, f"object {type(data).__name__} not serializable.")
|
convert_to_base_types(data) # only put base data types into slot data
|
||||||
|
|
||||||
def test_no_failed_world_loads(self):
|
def test_no_failed_world_loads(self):
|
||||||
if failed_world_loads:
|
if failed_world_loads:
|
||||||
|
|||||||
@@ -148,8 +148,8 @@ class TestBase(unittest.TestCase):
|
|||||||
|
|
||||||
def test_locality_not_modified(self):
|
def test_locality_not_modified(self):
|
||||||
"""Test that worlds don't modify the locality of items after duplicates are resolved"""
|
"""Test that worlds don't modify the locality of items after duplicates are resolved"""
|
||||||
gen_steps = ("generate_early", "create_regions", "create_items")
|
gen_steps = ("generate_early",)
|
||||||
additional_steps = ("set_rules", "connect_entrances", "generate_basic", "pre_fill")
|
additional_steps = ("create_regions", "create_items", "set_rules", "connect_entrances", "generate_basic", "pre_fill")
|
||||||
worlds_to_test = {game: world for game, world in AutoWorldRegister.world_types.items()}
|
worlds_to_test = {game: world for game, world in AutoWorldRegister.world_types.items()}
|
||||||
for game_name, world_type in worlds_to_test.items():
|
for game_name, world_type in worlds_to_test.items():
|
||||||
with self.subTest("Game", game=game_name):
|
with self.subTest("Game", game=game_name):
|
||||||
|
|||||||
@@ -1,7 +1,8 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from BaseClasses import MultiWorld, PlandoOptions
|
from BaseClasses import PlandoOptions
|
||||||
from Options import ItemLinks
|
from Options import ItemLinks, Choice
|
||||||
|
from Utils import restricted_dumps
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
|
|
||||||
@@ -73,9 +74,10 @@ class TestOptions(unittest.TestCase):
|
|||||||
|
|
||||||
def test_pickle_dumps(self):
|
def test_pickle_dumps(self):
|
||||||
"""Test options can be pickled into database for WebHost generation"""
|
"""Test options can be pickled into database for WebHost generation"""
|
||||||
import pickle
|
|
||||||
for gamename, world_type in AutoWorldRegister.world_types.items():
|
for gamename, world_type in AutoWorldRegister.world_types.items():
|
||||||
if not world_type.hidden:
|
if not world_type.hidden:
|
||||||
for option_key, option in world_type.options_dataclass.type_hints.items():
|
for option_key, option in world_type.options_dataclass.type_hints.items():
|
||||||
with self.subTest(game=gamename, option=option_key):
|
with self.subTest(game=gamename, option=option_key):
|
||||||
pickle.dumps(option.from_any(option.default))
|
restricted_dumps(option.from_any(option.default))
|
||||||
|
if issubclass(option, Choice) and option.default in option.name_lookup:
|
||||||
|
restricted_dumps(option.from_text(option.name_lookup[option.default]))
|
||||||
|
|||||||
@@ -8,7 +8,12 @@ class TestPackages(unittest.TestCase):
|
|||||||
to indicate full package rather than namespace package."""
|
to indicate full package rather than namespace package."""
|
||||||
import Utils
|
import Utils
|
||||||
|
|
||||||
|
# Ignore directories with these names.
|
||||||
|
ignore_dirs = {".github"}
|
||||||
|
|
||||||
worlds_path = Utils.local_path("worlds")
|
worlds_path = Utils.local_path("worlds")
|
||||||
for dirpath, dirnames, filenames in os.walk(worlds_path):
|
for dirpath, dirnames, filenames in os.walk(worlds_path):
|
||||||
|
# Drop ignored directories from dirnames, excluding them from walking.
|
||||||
|
dirnames[:] = [d for d in dirnames if d not in ignore_dirs]
|
||||||
with self.subTest(directory=dirpath):
|
with self.subTest(directory=dirpath):
|
||||||
self.assertEqual("__init__.py" in filenames, any(file.endswith(".py") for file in filenames))
|
self.assertEqual("__init__.py" in filenames, any(file.endswith(".py") for file in filenames))
|
||||||
|
|||||||
@@ -63,12 +63,12 @@ if __name__ == "__main__":
|
|||||||
spacer = '=' * 80
|
spacer = '=' * 80
|
||||||
|
|
||||||
with TemporaryDirectory() as tempdir:
|
with TemporaryDirectory() as tempdir:
|
||||||
multis = [["Clique"], ["Temp World"], ["Clique", "Temp World"]]
|
multis = [["VVVVVV"], ["Temp World"], ["VVVVVV", "Temp World"]]
|
||||||
p1_games = []
|
p1_games = []
|
||||||
data_paths = []
|
data_paths = []
|
||||||
rooms = []
|
rooms = []
|
||||||
|
|
||||||
copy_world("Clique", "Temp World")
|
copy_world("VVVVVV", "Temp World")
|
||||||
try:
|
try:
|
||||||
for n, games in enumerate(multis, 1):
|
for n, games in enumerate(multis, 1):
|
||||||
print(f"Generating [{n}] {', '.join(games)}")
|
print(f"Generating [{n}] {', '.join(games)}")
|
||||||
@@ -101,7 +101,7 @@ if __name__ == "__main__":
|
|||||||
with Client(host.address, game, "Player1") as client:
|
with Client(host.address, game, "Player1") as client:
|
||||||
local_data_packages = client.games_packages
|
local_data_packages = client.games_packages
|
||||||
local_collected_items = len(client.checked_locations)
|
local_collected_items = len(client.checked_locations)
|
||||||
if collected_items < 2: # Clique only has 2 Locations
|
if collected_items < 2: # Don't collect anything on the last iteration
|
||||||
client.collect_any()
|
client.collect_any()
|
||||||
# TODO: Ctrl+C test here as well
|
# TODO: Ctrl+C test here as well
|
||||||
|
|
||||||
@@ -125,7 +125,7 @@ if __name__ == "__main__":
|
|||||||
with Client(host.address, game, "Player1") as client:
|
with Client(host.address, game, "Player1") as client:
|
||||||
web_data_packages = client.games_packages
|
web_data_packages = client.games_packages
|
||||||
web_collected_items = len(client.checked_locations)
|
web_collected_items = len(client.checked_locations)
|
||||||
if collected_items < 2: # Clique only has 2 Locations
|
if collected_items < 2: # Don't collect anything on the last iteration
|
||||||
client.collect_any()
|
client.collect_any()
|
||||||
if collected_items == 1:
|
if collected_items == 1:
|
||||||
sleep(1) # wait for the server to collect the item
|
sleep(1) # wait for the server to collect the item
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ def _generate_local_inner(games: Iterable[str],
|
|||||||
f.write(json.dumps({
|
f.write(json.dumps({
|
||||||
"name": f"Player{n}",
|
"name": f"Player{n}",
|
||||||
"game": game,
|
"game": game,
|
||||||
game: {"hard_mode": "true"},
|
game: {},
|
||||||
"description": f"generate_local slot {n} ('Player{n}'): {game}",
|
"description": f"generate_local slot {n} ('Player{n}'): {game}",
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ import re
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Optional, cast
|
from typing import TYPE_CHECKING, Optional, cast
|
||||||
|
|
||||||
|
from WebHostLib import to_python
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from werkzeug.test import Client as FlaskClient
|
from werkzeug.test import Client as FlaskClient
|
||||||
@@ -103,7 +105,7 @@ def stop_room(app_client: "FlaskClient",
|
|||||||
poll_interval = 2
|
poll_interval = 2
|
||||||
|
|
||||||
print(f"Stopping room {room_id}")
|
print(f"Stopping room {room_id}")
|
||||||
room_uuid = app.url_map.converters["suuid"].to_python(None, room_id) # type: ignore[arg-type]
|
room_uuid = to_python(room_id)
|
||||||
|
|
||||||
if timeout is not None:
|
if timeout is not None:
|
||||||
sleep(.1) # should not be required, but other things might use threading
|
sleep(.1) # should not be required, but other things might use threading
|
||||||
@@ -156,7 +158,7 @@ def set_room_timeout(room_id: str, timeout: float) -> None:
|
|||||||
from WebHostLib.models import Room
|
from WebHostLib.models import Room
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
|
|
||||||
room_uuid = app.url_map.converters["suuid"].to_python(None, room_id) # type: ignore[arg-type]
|
room_uuid = to_python(room_id)
|
||||||
with db_session:
|
with db_session:
|
||||||
room: Room = Room.get(id=room_uuid)
|
room: Room = Room.get(id=room_uuid)
|
||||||
room.timeout = timeout
|
room.timeout = timeout
|
||||||
@@ -168,7 +170,7 @@ def get_multidata_for_room(webhost_client: "FlaskClient", room_id: str) -> bytes
|
|||||||
from WebHostLib.models import Room
|
from WebHostLib.models import Room
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
|
|
||||||
room_uuid = app.url_map.converters["suuid"].to_python(None, room_id) # type: ignore[arg-type]
|
room_uuid = to_python(room_id)
|
||||||
with db_session:
|
with db_session:
|
||||||
room: Room = Room.get(id=room_uuid)
|
room: Room = Room.get(id=room_uuid)
|
||||||
return cast(bytes, room.seed.multidata)
|
return cast(bytes, room.seed.multidata)
|
||||||
@@ -180,7 +182,7 @@ def set_multidata_for_room(webhost_client: "FlaskClient", room_id: str, data: by
|
|||||||
from WebHostLib.models import Room
|
from WebHostLib.models import Room
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
|
|
||||||
room_uuid = app.url_map.converters["suuid"].to_python(None, room_id) # type: ignore[arg-type]
|
room_uuid = to_python(room_id)
|
||||||
with db_session:
|
with db_session:
|
||||||
room: Room = Room.get(id=room_uuid)
|
room: Room = Room.get(id=room_uuid)
|
||||||
room.seed.multidata = data
|
room.seed.multidata = data
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ def copy(src: str, dst: str) -> None:
|
|||||||
_new_worlds[dst] = str(dst_folder)
|
_new_worlds[dst] = str(dst_folder)
|
||||||
with open(dst_folder / "__init__.py", "r", encoding="utf-8-sig") as f:
|
with open(dst_folder / "__init__.py", "r", encoding="utf-8-sig") as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
contents = re.sub(r'game\s*=\s*[\'"]' + re.escape(src) + r'[\'"]', f'game = "{dst}"', contents)
|
contents = re.sub(r'game\s*(:\s*[a-zA-Z\[\]]+)?\s*=\s*[\'"]' + re.escape(src) + r'[\'"]', f'game = "{dst}"', contents)
|
||||||
with open(dst_folder / "__init__.py", "w", encoding="utf-8") as f:
|
with open(dst_folder / "__init__.py", "w", encoding="utf-8") as f:
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
|
|
||||||
|
|||||||
@@ -33,6 +33,15 @@ class TestNumericOptions(unittest.TestCase):
|
|||||||
self.assertEqual(choice_option_alias, TestChoice.alias_three)
|
self.assertEqual(choice_option_alias, TestChoice.alias_three)
|
||||||
self.assertEqual(choice_option_attr, TestChoice.non_option_attr)
|
self.assertEqual(choice_option_attr, TestChoice.non_option_attr)
|
||||||
|
|
||||||
|
self.assertLess(choice_option_string, "two")
|
||||||
|
self.assertGreater(choice_option_string, "zero")
|
||||||
|
self.assertLessEqual(choice_option_string, "one")
|
||||||
|
self.assertLessEqual(choice_option_string, "two")
|
||||||
|
self.assertGreaterEqual(choice_option_string, "one")
|
||||||
|
self.assertGreaterEqual(choice_option_string, "zero")
|
||||||
|
|
||||||
|
self.assertGreaterEqual(choice_option_alias, "three")
|
||||||
|
|
||||||
self.assertRaises(KeyError, TestChoice.from_any, "four")
|
self.assertRaises(KeyError, TestChoice.from_any, "four")
|
||||||
|
|
||||||
self.assertIn(choice_option_int, [1, 2, 3])
|
self.assertIn(choice_option_int, [1, 2, 3])
|
||||||
|
|||||||
@@ -2,6 +2,8 @@ import unittest
|
|||||||
import Utils
|
import Utils
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
import WebHost
|
import WebHost
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
@@ -9,36 +11,30 @@ from worlds.AutoWorld import AutoWorldRegister
|
|||||||
class TestDocs(unittest.TestCase):
|
class TestDocs(unittest.TestCase):
|
||||||
@classmethod
|
@classmethod
|
||||||
def setUpClass(cls) -> None:
|
def setUpClass(cls) -> None:
|
||||||
cls.tutorials_data = WebHost.create_ordered_tutorials_file()
|
WebHost.copy_tutorials_files_to_static()
|
||||||
|
|
||||||
def test_has_tutorial(self):
|
def test_has_tutorial(self):
|
||||||
games_with_tutorial = set(entry["gameTitle"] for entry in self.tutorials_data)
|
|
||||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||||
if not world_type.hidden:
|
if not world_type.hidden:
|
||||||
with self.subTest(game_name):
|
with self.subTest(game_name):
|
||||||
try:
|
tutorials = world_type.web.tutorials
|
||||||
self.assertIn(game_name, games_with_tutorial)
|
self.assertGreater(len(tutorials), 0, msg=f"{game_name} has no setup tutorial.")
|
||||||
except AssertionError:
|
|
||||||
# look for partial name in the tutorial name
|
safe_name = secure_filename(game_name)
|
||||||
for game in games_with_tutorial:
|
target_path = Utils.local_path("WebHostLib", "static", "generated", "docs", safe_name)
|
||||||
if game_name in game:
|
for tutorial in tutorials:
|
||||||
break
|
self.assertTrue(
|
||||||
else:
|
os.path.isfile(Utils.local_path(target_path, secure_filename(tutorial.file_name))),
|
||||||
self.fail(f"{game_name} has no setup tutorial. "
|
f'{game_name} missing tutorial file {tutorial.file_name}.'
|
||||||
f"Games with Tutorial: {games_with_tutorial}")
|
)
|
||||||
|
|
||||||
def test_has_game_info(self):
|
def test_has_game_info(self):
|
||||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||||
if not world_type.hidden:
|
if not world_type.hidden:
|
||||||
safe_name = Utils.get_file_safe_name(game_name)
|
safe_name = secure_filename(game_name)
|
||||||
target_path = Utils.local_path("WebHostLib", "static", "generated", "docs", safe_name)
|
target_path = Utils.local_path("WebHostLib", "static", "generated", "docs", safe_name)
|
||||||
for game_info_lang in world_type.web.game_info_languages:
|
for game_info_lang in world_type.web.game_info_languages:
|
||||||
with self.subTest(game_name):
|
with self.subTest(game_name):
|
||||||
self.assertTrue(
|
|
||||||
safe_name == game_name or
|
|
||||||
not os.path.isfile(Utils.local_path(target_path, f'{game_info_lang}_{game_name}.md')),
|
|
||||||
f'Info docs have be named <lang>_{safe_name}.md for {game_name}.'
|
|
||||||
)
|
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile(Utils.local_path(target_path, f'{game_info_lang}_{safe_name}.md')),
|
os.path.isfile(Utils.local_path(target_path, f'{game_info_lang}_{safe_name}.md')),
|
||||||
f'{game_name} missing game info file for "{game_info_lang}" language.'
|
f'{game_name} missing game info file for "{game_info_lang}" language.'
|
||||||
|
|||||||
@@ -29,8 +29,3 @@ class TestFileGeneration(unittest.TestCase):
|
|||||||
with open(file, encoding="utf-8-sig") as f:
|
with open(file, encoding="utf-8-sig") as f:
|
||||||
for value in roll_options({file.name: f.read()})[0].values():
|
for value in roll_options({file.name: f.read()})[0].values():
|
||||||
self.assertTrue(value is True, f"Default Options for template {file.name} cannot be run.")
|
self.assertTrue(value is True, f"Default Options for template {file.name} cannot be run.")
|
||||||
|
|
||||||
def test_tutorial(self):
|
|
||||||
WebHost.create_ordered_tutorials_file()
|
|
||||||
self.assertTrue(os.path.exists(os.path.join(self.correct_path, "static", "generated", "tutorials.json")))
|
|
||||||
self.assertFalse(os.path.exists(os.path.join(self.incorrect_path, "static", "generated", "tutorials.json")))
|
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user