mirror of
https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
synced 2026-03-05 05:36:54 +08:00
Compare commits
396 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c4bde71546 | |||
|
|
f56f993c69 | ||
|
|
cfcda6f597 | ||
|
|
96f5d4559d | ||
|
|
23f412b4f4 | ||
|
|
25b0d2bcdc | ||
|
|
a116f9901c | ||
|
|
82befd631e | ||
| 232389dd07 | |||
|
|
ce81015406 | ||
|
|
3d7b903f59 | ||
|
|
c5d499434e | ||
|
|
194fed24c9 | ||
|
|
1173c39e7a | ||
|
|
dfb19f150a | ||
|
|
7555297e1e | ||
|
|
587aa4a0de | ||
| 08a1a427b4 | |||
|
|
d4e91c8521 | ||
| dbde1181ce | |||
|
|
86fe4f0766 | ||
|
|
381f2505d6 | ||
| b3a77f5296 | |||
| 274f30f82a | |||
| efb1ddb260 | |||
| 7e3f49bc9e | |||
| 665772ed66 | |||
|
|
44fda8a19e | ||
|
|
6921bf4e37 | ||
|
|
c3c97c1c8b | ||
|
|
1d33872c9b | ||
|
|
b2d5a1e729 | ||
|
|
a0fd9eaed3 | ||
|
|
593723aa76 | ||
|
|
73d97d8458 | ||
|
|
d6f11655c1 | ||
|
|
376e85e36e | ||
|
|
45116a1418 | ||
|
|
a42d3e3837 | ||
|
|
b333c54c7d | ||
|
|
8840402d2f | ||
|
|
e8b64b23f5 | ||
|
|
40762a3180 | ||
|
|
a2c6ad8328 | ||
|
|
c7d93069ef | ||
| 4b514df2db | |||
| 47c83be1b5 | |||
| 6c0e092f51 | |||
| 04b9cd9eae | |||
|
|
61b5fcb137 | ||
| c0540769c8 | |||
|
|
0e19943046 | ||
|
|
7e1d2e8cb0 | ||
|
|
8931cfb5a7 | ||
|
|
ea8a18c1b1 | ||
| ef1acb0f16 | |||
|
|
f7bb667254 | ||
|
|
fa94c1beeb | ||
|
|
4e1e91a977 | ||
|
|
0f6a00819b | ||
|
|
b56385b412 | ||
|
|
7eea235f52 | ||
|
|
8a06b572ed | ||
| 6867245be3 | |||
|
|
eebff0a8ad | ||
| 74eef41506 | |||
| 5eb4771259 | |||
| 7a3a4d936d | |||
|
|
03ca7c4486 | ||
|
|
b043d1da59 | ||
|
|
c9659201b1 | ||
|
|
617d3ec658 | ||
|
|
57a1992675 | ||
|
|
8d1d2f329e | ||
|
|
fa6cbd5c6d | ||
|
|
9f0f0b87f4 | ||
|
|
96c298b1b8 | ||
|
|
df5ced235d | ||
|
|
af83c7a2d9 | ||
|
|
bc41a91034 | ||
|
|
d97291d1bc | ||
| 5b56de9de1 | |||
| 0898a81331 | |||
|
|
d464059c0a | ||
|
|
6ea8b9328c | ||
|
|
773ff5545c | ||
|
|
94710b938b | ||
| ec09bb734d | |||
|
|
9e9a642847 | ||
|
|
04e0b14e72 | ||
|
|
20ce9c64be | ||
|
|
8af07bf031 | ||
|
|
3a904f67ad | ||
|
|
fc9b751ac4 | ||
| cb4c6b96f0 | |||
|
|
25c3777c0f | ||
|
|
193fd1da2a | ||
|
|
2cd609dd40 | ||
| a206098805 | |||
|
|
d493ba5f0d | ||
|
|
581d1f9674 | ||
|
|
01c99e8a8c | ||
|
|
eb3f4bea04 | ||
|
|
ebbbd68b05 | ||
|
|
10e0eb815e | ||
|
|
a57b04e181 | ||
|
|
cc2e71f1a5 | ||
|
|
3384263bb2 | ||
|
|
68f210dc4f | ||
|
|
00a85fe3e9 | ||
|
|
a10a7584ae | ||
|
|
95aac5e321 | ||
| 89d8c938e2 | |||
| 84db42f1ce | |||
|
|
0a660922bb | ||
|
|
56bc98cc79 | ||
|
|
be61683b51 | ||
| ccd5706a95 | |||
| b69240caa5 | |||
| 49d00f4d0e | |||
| 389a850025 | |||
| 20dcc2bc3d | |||
| 606dddbca2 | |||
| f509b03cd0 | |||
| 6293d088db | |||
| 97e2abed78 | |||
|
|
5ea3fcb234 | ||
|
|
ca33ba1310 | ||
| 3629a2ff4a | |||
| a2108c9776 | |||
| 7133cd9384 | |||
|
|
406bc7674e | ||
|
|
259b38fda5 | ||
|
|
414345ae5c | ||
|
|
341cbd86cd | ||
|
|
bf7804738e | ||
|
|
553f373671 | ||
| e53e164a52 | |||
| 2cd7d89c3e | |||
| b8b6d5f6c8 | |||
| 7a44c0dca5 | |||
| 4155d8eb42 | |||
| 4cc942d226 | |||
| 996dd565d8 | |||
| 5b0660e45b | |||
| 8d1ebc06d1 | |||
| c57aa48048 | |||
| ad90562fdf | |||
| cbc96fc09e | |||
| 8e10cfe0d0 | |||
| d192f0506d | |||
| 44aed656b8 | |||
| feb662b980 | |||
| ed6eb9a5cf | |||
| 25e281a4c5 | |||
| a2d69b9113 | |||
| c8907a47a4 | |||
| 9fb176b4bc | |||
|
|
53740265b6 | ||
|
|
e6119074ce | ||
| f7a2e89274 | |||
| 3fe5a19c4a | |||
| d35469cdef | |||
| 0cbae117aa | |||
| 25dc57d911 | |||
| 6042417b65 | |||
| 63cd94a0d7 | |||
| eb810d4bd2 | |||
| 52df4cf170 | |||
|
|
7a6615f6c9 | ||
|
|
c363908434 | ||
|
|
e26fb44106 | ||
|
|
7e2c04426a | ||
|
|
5910f05dfe | ||
| eebbd08551 | |||
| f035b844ab | |||
| 197b81f9cf | |||
| 5c2ffe13b0 | |||
| b0cff16dc6 | |||
|
|
3c952530d1 | ||
|
|
57dfc8b94a | ||
| 1065e62d11 | |||
| 02e703ea91 | |||
| 429f99f77e | |||
| 9a16e2fa21 | |||
| 0719d549b5 | |||
| 9cb2a90197 | |||
|
|
bb0606a144 | ||
|
|
41068f7152 | ||
|
|
6f98136c0f | ||
|
|
62335abaa6 | ||
| 12a934566d | |||
| ff71dba516 | |||
| e029d51494 | |||
|
|
b1f48da6fe | ||
| 9a2927542a | |||
| 5117e7dbd9 | |||
| 4bb00cdeb7 | |||
| b7cbe2b2a0 | |||
| 8bb460fce0 | |||
| 41bbcdb66c | |||
| 160d81476a | |||
| 1e5b00a280 | |||
| ee53b92559 | |||
| cd9d29b748 | |||
| 214ebc5073 | |||
| 485706267e | |||
| 12cb5193b3 | |||
|
|
461d3450d6 | ||
|
|
64d77dbff2 | ||
|
|
e5b4d3bc08 | ||
|
|
4208018caf | ||
|
|
5032a3eb9a | ||
|
|
bf9a9953dd | ||
|
|
85feb9cb41 | ||
| 5a7c54528c | |||
| afce74afe8 | |||
| 435850819c | |||
| 6f439ad357 | |||
| b74cc1f4a0 | |||
| 1a1c2675d1 | |||
| 1f02c107f5 | |||
| 89c319a500 | |||
| 56f9a69c4d | |||
| 50431fe7cb | |||
| 71ad53a1f9 | |||
| 820393f216 | |||
| 27994cea6b | |||
|
|
eb753cb059 | ||
|
|
256d13d1df | ||
|
|
d8d56b44db | ||
|
|
57a57f0259 | ||
|
|
4f7f4a3e33 | ||
|
|
367a9a8297 | ||
|
|
009dd90609 | ||
| 8e6e0dc274 | |||
|
|
df7efc6707 | ||
|
|
d783ecd3eb | ||
|
|
13f3e34f79 | ||
| 566509dd46 | |||
| 737671d7a8 | |||
| e4f19d1d81 | |||
|
|
29c12e9249 | ||
|
|
1e22dae6f9 | ||
|
|
7b308d30bc | ||
|
|
ec13ebc43d | ||
|
|
640ecaea85 | ||
|
|
ef772a97ba | ||
|
|
e6cc6a8451 | ||
| 0989090456 | |||
| f8fc9ebdf8 | |||
|
|
ac2b115bd6 | ||
|
|
46eb9b4517 | ||
|
|
eaa81f8157 | ||
| e2eb288b90 | |||
| ed95c8c9fa | |||
|
|
7138e91b2e | ||
|
|
50642866b2 | ||
|
|
92c91f2388 | ||
|
|
42d1bc9a5f | ||
| c8013a080c | |||
| 8bdde936f8 | |||
| aacf518004 | |||
| 34c857387e | |||
| abc2ac07ef | |||
|
|
43d7972cc1 | ||
| 84a7a70183 | |||
| f61bbd00b7 | |||
| 84b74278a6 | |||
|
|
1438ad5efb | ||
| 01e85960fa | |||
|
|
c705610c1d | ||
| 5f0799d505 | |||
|
|
3454e0afbe | ||
| 95d9b74cd7 | |||
| 4b5f0263e4 | |||
| 7500640330 | |||
| 967a028235 | |||
|
|
abe5e30ede | ||
|
|
d9d3f63118 | ||
|
|
4f864c54bc | ||
|
|
2474f77291 | ||
|
|
6291a2ba70 | ||
| 77b10a858e | |||
| e908b3b67f | |||
|
|
bc98c0a3e6 | ||
|
|
f29caf4dc6 | ||
|
|
a1e88dd1c9 | ||
|
|
0dcfa53bcc | ||
| bf4ccdfd61 | |||
| ae65b5140f | |||
| 95aa5b0419 | |||
| b7b92cd785 | |||
| f97ae15969 | |||
| aae43df953 | |||
| c58f124f0c | |||
| 2f900d0538 | |||
| 3e75a4b4e2 | |||
| e285ccfa15 | |||
|
|
d2acbaa0ad | ||
|
|
c81be48585 | ||
|
|
93ec0d8808 | ||
| d5e07880fd | |||
| 8b370f152d | |||
| e8527c7ba4 | |||
| 1dd3d310c9 | |||
| b08685086a | |||
|
|
c2b6fe920f | ||
| a1ad86d0c7 | |||
| e6260ce170 | |||
| b0e53bc8c8 | |||
| 2267bc8f14 | |||
| 607a0927bc | |||
| 7b3ca9eb2a | |||
| 37c12e439c | |||
| 504579710e | |||
| ce94aee0f4 | |||
| b9c58ae125 | |||
| 92159e93b8 | |||
| f9b11895e2 | |||
| f7c3d493ea | |||
| 4954ab3d60 | |||
| bcca869e72 | |||
| a4247abdad | |||
| 2c1d43601a | |||
| c929c463ec | |||
| 314e1dede3 | |||
| d5b0ef34c5 | |||
| 3d9ef841b1 | |||
| b98871f170 | |||
| 38ab872dd8 | |||
| f44c0baa2e | |||
| 9b8d17577e | |||
| f301bee2b0 | |||
| fbe018e56a | |||
| ab046fe786 | |||
| ce95d8f977 | |||
| fa05b80e61 | |||
| 0ab0d11a98 | |||
| 7f469540b2 | |||
| 21bee29146 | |||
|
|
c2dd9c5d86 | ||
|
|
5927cb2bb5 | ||
|
|
4a4a215b61 | ||
| bfe931d3bf | |||
| b7b152d84d | |||
| b6f6eb1170 | |||
| 934800aae0 | |||
|
|
d19c37e99a | ||
|
|
43167fe9bd | ||
|
|
db8de88667 | ||
| 318b42dbd2 | |||
| af4a9f33b0 | |||
|
|
5e5bc4da2c | ||
|
|
594ea9a76f | ||
|
|
69e9ca7933 | ||
|
|
b1bc111b7a | ||
| 43970f4853 | |||
| 48b200697c | |||
| 1a791f5ef8 | |||
| 9b13a9e87c | |||
| ecad6b8070 | |||
| 1e6932b3de | |||
| 3ef7605e11 | |||
|
|
e8539c15cc | ||
|
|
9ace65f9df | ||
|
|
d727a0bc53 | ||
| 52947556a4 | |||
| 7fe9a6fd3d | |||
| 6dbfd31eab | |||
| 1788d40ed2 | |||
| 18d8e0cdcc | |||
| b37f927be6 | |||
| 314bf4c2f0 | |||
| c9f6817c6a | |||
| 4c7cd00a76 | |||
| b8cf10b45d | |||
| 4ec5c3bde1 | |||
| 270b953bc9 | |||
| 13bd0da592 | |||
| 9545f0b5d0 | |||
| 12f320cbb4 | |||
| 7ff59cfc01 | |||
| 498781f376 | |||
| a3c00dbd93 | |||
| 069d5953f9 | |||
| 3721d92f52 | |||
| 98b58866e1 | |||
|
|
189c3999f7 | ||
|
|
a2622d5102 | ||
|
|
c8832bd1c9 | ||
| e6c3a32532 | |||
| b3015aaa91 | |||
|
|
abc1038082 | ||
|
|
dd91455890 | ||
|
|
4b17b0b907 |
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "pip" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
target-branch: "main"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
27
.github/workflows/Release.yml
vendored
27
.github/workflows/Release.yml
vendored
@@ -3,7 +3,7 @@ name: Release CI
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "*"
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
release:
|
||||
@@ -12,24 +12,25 @@ jobs:
|
||||
id-token: write
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install poetry
|
||||
run: pipx install poetry
|
||||
shell: bash
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
name: Setup UV
|
||||
with:
|
||||
python-version: '3.11'
|
||||
cache: "poetry"
|
||||
enable-cache: true
|
||||
|
||||
- run: poetry install
|
||||
- name: 'Set up Python'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: '.python-version'
|
||||
|
||||
- run: uv sync
|
||||
shell: bash
|
||||
|
||||
- name: Get Version
|
||||
id: version
|
||||
run: |
|
||||
echo "VERSION=$(poetry version -s)" >> $GITHUB_OUTPUT
|
||||
echo "VERSION=$(uvx pdm show --version)" >> $GITHUB_OUTPUT
|
||||
echo "TAG_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||
|
||||
@@ -38,10 +39,10 @@ jobs:
|
||||
run: exit 1
|
||||
|
||||
- name: Build Package
|
||||
run: poetry build
|
||||
run: uv build
|
||||
|
||||
- name: Publish Package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@release/v1
|
||||
run: uv publish
|
||||
|
||||
- name: Publish Package to GitHub Release
|
||||
run: gh release create ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl -t "🔖 ${{ steps.version.outputs.TAG_NAME }}" --generate-notes
|
||||
|
||||
58
.github/workflows/Test.yml
vendored
Normal file
58
.github/workflows/Test.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Code Coverage
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
# python-version: ['3.10', '3.11', '3.12', '3.13']
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
fail-fast: false
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
PYTHON_VERSION: ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup uv
|
||||
uses: astral-sh/setup-uv@v4
|
||||
with:
|
||||
enable-cache: true
|
||||
cache-suffix: ${{ env.PYTHON_VERSION }}_${{ env.OS }}
|
||||
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
uv python pin ${{ env.PYTHON_VERSION }}
|
||||
uv sync --group test
|
||||
|
||||
- name: Run tests
|
||||
run: uv run pytest --cov=nonebot_plugin_tetris_stats --cov-report xml
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
env_vars: OS,PYTHON_VERSION
|
||||
|
||||
check:
|
||||
if: always()
|
||||
needs: test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Decide whether the needed jobs succeeded or failed
|
||||
uses: re-actors/alls-green@223e4bb7a751b91f43eda76992bcfbf23b8b0302
|
||||
with:
|
||||
jobs: ${{ toJSON(needs) }}
|
||||
33
.github/workflows/TypeCheck.yml
vendored
Normal file
33
.github/workflows/TypeCheck.yml
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
name: TypeCheck
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
jobs:
|
||||
TypeCheck:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: astral-sh/setup-uv@v4
|
||||
name: Setup UV
|
||||
with:
|
||||
enable-cache: true
|
||||
|
||||
- name: 'Set up Python'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version-file: '.python-version'
|
||||
|
||||
- run: uv sync
|
||||
shell: bash
|
||||
|
||||
- name: Run Mypy
|
||||
shell: bash
|
||||
run: |
|
||||
uv run mypy ./nonebot_plugin_tetris_stats
|
||||
|
||||
- name: Run BasedPyright
|
||||
shell: bash
|
||||
run: |
|
||||
uv run basedpyright ./nonebot_plugin_tetris_stats/
|
||||
61
.github/workflows/codeql-analysis.yml
vendored
61
.github/workflows/codeql-analysis.yml
vendored
@@ -9,14 +9,14 @@
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
name: 'CodeQL'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ main ]
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: '17 6 * * 5'
|
||||
|
||||
@@ -32,41 +32,40 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
language: ['python']
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
||||
545
.gitignore
vendored
545
.gitignore
vendored
@@ -1,21 +1,528 @@
|
||||
.idea
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/linux,macos,python,pycharm,windows,visualstudiocode,node
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=linux,macos,python,pycharm,windows,visualstudiocode,node
|
||||
|
||||
### Linux ###
|
||||
*~
|
||||
|
||||
# temporary files which can be created if a process still has a handle open of a deleted file
|
||||
.fuse_hidden*
|
||||
|
||||
# KDE directory preferences
|
||||
.directory
|
||||
|
||||
# Linux trash folder which might appear on any partition or disk
|
||||
.Trash-*
|
||||
|
||||
# .nfs files are created when an open file is removed but is still being accessed
|
||||
.nfs*
|
||||
|
||||
### macOS ###
|
||||
# General
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
|
||||
# Icon must end with two \r
|
||||
Icon
|
||||
|
||||
|
||||
# Thumbnails
|
||||
._*
|
||||
|
||||
# Files that might appear in the root of a volume
|
||||
.DocumentRevisions-V100
|
||||
.fseventsd
|
||||
.Spotlight-V100
|
||||
.TemporaryItems
|
||||
.Trashes
|
||||
.VolumeIcon.icns
|
||||
.com.apple.timemachine.donotpresent
|
||||
|
||||
# Directories potentially created on remote AFP share
|
||||
.AppleDB
|
||||
.AppleDesktop
|
||||
Network Trash Folder
|
||||
Temporary Items
|
||||
.apdisk
|
||||
|
||||
### macOS Patch ###
|
||||
# iCloud generated files
|
||||
*.icloud
|
||||
|
||||
### Node ###
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
lerna-debug.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# Diagnostic reports (https://nodejs.org/api/report.html)
|
||||
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
*.lcov
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (https://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Snowpack dependency directory (https://snowpack.dev/)
|
||||
web_modules/
|
||||
|
||||
# TypeScript cache
|
||||
*.tsbuildinfo
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional stylelint cache
|
||||
.stylelintcache
|
||||
|
||||
# Microbundle cache
|
||||
.rpt2_cache/
|
||||
.rts2_cache_cjs/
|
||||
.rts2_cache_es/
|
||||
.rts2_cache_umd/
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
|
||||
# dotenv environment variable files
|
||||
.env
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.local
|
||||
|
||||
# parcel-bundler cache (https://parceljs.org/)
|
||||
.cache
|
||||
.parcel-cache
|
||||
|
||||
# Next.js build output
|
||||
.next
|
||||
out
|
||||
|
||||
# Nuxt.js build / generate output
|
||||
.nuxt
|
||||
dist
|
||||
test_*
|
||||
Untitled*
|
||||
*copy*
|
||||
.vscode
|
||||
*dev*
|
||||
*_cache*
|
||||
*backup*
|
||||
*.pyc
|
||||
node_modules
|
||||
.prettier*
|
||||
package.json
|
||||
pnpm-lock.yaml
|
||||
*.drawio.svg
|
||||
package-lock.json
|
||||
*Zone.Identifier
|
||||
.env*
|
||||
|
||||
# Gatsby files
|
||||
.cache/
|
||||
# Comment in the public line in if your project uses Gatsby and not Next.js
|
||||
# https://nextjs.org/blog/next-9-1#public-directory-support
|
||||
# public
|
||||
|
||||
# vuepress build output
|
||||
.vuepress/dist
|
||||
|
||||
# vuepress v2.x temp and cache directory
|
||||
.temp
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
# Serverless directories
|
||||
.serverless/
|
||||
|
||||
# FuseBox cache
|
||||
.fusebox/
|
||||
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
# TernJS port file
|
||||
.tern-port
|
||||
|
||||
# Stores VSCode versions used for testing VSCode extensions
|
||||
.vscode-test
|
||||
|
||||
# yarn v2
|
||||
.yarn/cache
|
||||
.yarn/unplugged
|
||||
.yarn/build-state.yml
|
||||
.yarn/install-state.gz
|
||||
.pnp.*
|
||||
|
||||
### Node Patch ###
|
||||
# Serverless Webpack directories
|
||||
.webpack/
|
||||
|
||||
# Optional stylelint cache
|
||||
|
||||
# SvelteKit build / generate output
|
||||
.svelte-kit
|
||||
|
||||
### PyCharm ###
|
||||
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
|
||||
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
|
||||
|
||||
# User-specific stuff
|
||||
.idea/**/workspace.xml
|
||||
.idea/**/tasks.xml
|
||||
.idea/**/usage.statistics.xml
|
||||
.idea/**/dictionaries
|
||||
.idea/**/shelf
|
||||
|
||||
# AWS User-specific
|
||||
.idea/**/aws.xml
|
||||
|
||||
# Generated files
|
||||
.idea/**/contentModel.xml
|
||||
|
||||
# Sensitive or high-churn files
|
||||
.idea/**/dataSources/
|
||||
.idea/**/dataSources.ids
|
||||
.idea/**/dataSources.local.xml
|
||||
.idea/**/sqlDataSources.xml
|
||||
.idea/**/dynamic.xml
|
||||
.idea/**/uiDesigner.xml
|
||||
.idea/**/dbnavigator.xml
|
||||
|
||||
# Gradle
|
||||
.idea/**/gradle.xml
|
||||
.idea/**/libraries
|
||||
|
||||
# Gradle and Maven with auto-import
|
||||
# When using Gradle or Maven with auto-import, you should exclude module files,
|
||||
# since they will be recreated, and may cause churn. Uncomment if using
|
||||
# auto-import.
|
||||
# .idea/artifacts
|
||||
# .idea/compiler.xml
|
||||
# .idea/jarRepositories.xml
|
||||
# .idea/modules.xml
|
||||
# .idea/*.iml
|
||||
# .idea/modules
|
||||
# *.iml
|
||||
# *.ipr
|
||||
|
||||
# CMake
|
||||
cmake-build-*/
|
||||
|
||||
# Mongo Explorer plugin
|
||||
.idea/**/mongoSettings.xml
|
||||
|
||||
# File-based project format
|
||||
*.iws
|
||||
|
||||
# IntelliJ
|
||||
out/
|
||||
|
||||
# mpeltonen/sbt-idea plugin
|
||||
.idea_modules/
|
||||
|
||||
# JIRA plugin
|
||||
atlassian-ide-plugin.xml
|
||||
|
||||
# Cursive Clojure plugin
|
||||
.idea/replstate.xml
|
||||
|
||||
# SonarLint plugin
|
||||
.idea/sonarlint/
|
||||
|
||||
# Crashlytics plugin (for Android Studio and IntelliJ)
|
||||
com_crashlytics_export_strings.xml
|
||||
crashlytics.properties
|
||||
crashlytics-build.properties
|
||||
fabric.properties
|
||||
|
||||
# Editor-based Rest Client
|
||||
.idea/httpRequests
|
||||
|
||||
# Android studio 3.1+ serialized cache file
|
||||
.idea/caches/build_file_checksums.ser
|
||||
|
||||
### PyCharm Patch ###
|
||||
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
|
||||
|
||||
# *.iml
|
||||
# modules.xml
|
||||
# .idea/misc.xml
|
||||
# *.ipr
|
||||
|
||||
# Sonarlint plugin
|
||||
# https://plugins.jetbrains.com/plugin/7973-sonarlint
|
||||
.idea/**/sonarlint/
|
||||
|
||||
# SonarQube Plugin
|
||||
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
|
||||
.idea/**/sonarIssues.xml
|
||||
|
||||
# Markdown Navigator plugin
|
||||
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
|
||||
.idea/**/markdown-navigator.xml
|
||||
.idea/**/markdown-navigator-enh.xml
|
||||
.idea/**/markdown-navigator/
|
||||
|
||||
# Cache file creation bug
|
||||
# See https://youtrack.jetbrains.com/issue/JBR-2257
|
||||
.idea/$CACHE_FILE$
|
||||
|
||||
# CodeStream plugin
|
||||
# https://plugins.jetbrains.com/plugin/12206-codestream
|
||||
.idea/codestream.xml
|
||||
|
||||
# Azure Toolkit for IntelliJ plugin
|
||||
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
|
||||
.idea/**/azureSettings.xml
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
### Python Patch ###
|
||||
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||
poetry.toml
|
||||
|
||||
# ruff
|
||||
.ruff_cache/
|
||||
|
||||
# LSP config files
|
||||
pyrightconfig.json
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/*.code-snippets
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
# Built Visual Studio Code Extensions
|
||||
*.vsix
|
||||
|
||||
### VisualStudioCode Patch ###
|
||||
# Ignore all local history of files
|
||||
.history
|
||||
.ionide
|
||||
|
||||
### Windows ###
|
||||
# Windows thumbnail cache files
|
||||
Thumbs.db
|
||||
Thumbs.db:encryptable
|
||||
ehthumbs.db
|
||||
ehthumbs_vista.db
|
||||
|
||||
# Dump file
|
||||
*.stackdump
|
||||
|
||||
# Folder config file
|
||||
[Dd]esktop.ini
|
||||
|
||||
# Recycle Bin used on file shares
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Windows Installer files
|
||||
*.cab
|
||||
*.msi
|
||||
*.msix
|
||||
*.msm
|
||||
*.msp
|
||||
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/linux,macos,python,pycharm,windows,visualstudiocode,node
|
||||
|
||||
# NoneBot2
|
||||
bot.py
|
||||
TODO
|
||||
*.fish
|
||||
.env*
|
||||
|
||||
# Misc
|
||||
ignore_*
|
||||
*.backup
|
||||
TODO*
|
||||
|
||||
22
.pre-commit-config.yaml
Normal file
22
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
default_install_hook_types: [pre-commit, prepare-commit-msg]
|
||||
ci:
|
||||
autofix_commit_msg: ':rotating_light: auto fix by pre-commit hooks'
|
||||
autofix_prs: true
|
||||
autoupdate_branch: main
|
||||
autoupdate_schedule: weekly
|
||||
autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks'
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.1
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
stages: [pre-commit]
|
||||
- id: ruff-format
|
||||
stages: [pre-commit]
|
||||
|
||||
- repo: https://github.com/nonebot/nonemoji
|
||||
rev: v0.1.4
|
||||
hooks:
|
||||
- id: nonemoji
|
||||
stages: [prepare-commit-msg]
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.10
|
||||
58
CONTRIBUTING.en-US.md
Normal file
58
CONTRIBUTING.en-US.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# How to Contribute?
|
||||
|
||||
## Setting Up the Environment
|
||||
|
||||
### For Developers with Basic Python Knowledge
|
||||
|
||||
First, you need install [uv](https://docs.astral.sh/uv/).
|
||||
Then:
|
||||
|
||||
```bash
|
||||
# Set up the basic Python environment
|
||||
uv python install 3.10
|
||||
|
||||
# Clone the repository
|
||||
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
|
||||
cd nonebot-plugin-tetris-stats
|
||||
|
||||
# Install dependencies
|
||||
uv sync
|
||||
```
|
||||
|
||||
## Development
|
||||
|
||||
### Code Development
|
||||
|
||||
1. For static code analysis, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff check ./nonebot_plugin_tetris_stats/` to check the code.
|
||||
2. For code formatting, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff format ./nonebot_plugin_tetris_stats/` to format the code.
|
||||
3. For type checking, use both [basedpyright](https://docs.basedpyright.com/latest/) and [mypy](https://www.mypy-lang.org/). You can install the corresponding plugins for your IDE or use the following commands in the terminal to check the code:
|
||||
|
||||
```bash
|
||||
# basedpyright
|
||||
basedpyright ./nonebot_plugin_tetris_stats/
|
||||
|
||||
# mypy
|
||||
mypy ./nonebot_plugin_tetris_stats/
|
||||
```
|
||||
|
||||
### Internationalization
|
||||
|
||||
This project uses [Tarina](https://github.com/ArcletProject/Tarina) for internationalization support.
|
||||
|
||||
#### Adding a New Language
|
||||
|
||||
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||
2. Run `tarina-lang create {language_code}` \* Please note that the language code should preferably follow the [IETF language tag](https://en.wikipedia.org/wiki/IETF_language_tag) standard.
|
||||
3. Edit the generated `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
|
||||
|
||||
#### Updating an Existing Language
|
||||
|
||||
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||
2. Edit the corresponding `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
|
||||
|
||||
#### Adding New Entries
|
||||
|
||||
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||
2. Edit the `.template.json` file.
|
||||
3. Run `tarina-lang schema && tarina-lang model`.
|
||||
4. Modify the language files, adding new entries at least to `en-US.json`.
|
||||
57
CONTRIBUTING.md
Normal file
57
CONTRIBUTING.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# 我该如何参与开发?
|
||||
|
||||
## 配置环境
|
||||
|
||||
首先你需要安装 [uv](https://docs.astral.sh/uv/)。
|
||||
然后:
|
||||
|
||||
```bash
|
||||
# 配置基础 Python 环境
|
||||
uv python install 3.10
|
||||
|
||||
# 克隆仓库
|
||||
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
|
||||
cd nonebot-plugin-tetris-stats
|
||||
|
||||
# 安装依赖
|
||||
uv sync
|
||||
```
|
||||
|
||||
## 开发
|
||||
|
||||
### 代码开发
|
||||
|
||||
1. 代码静态检查使用 [ruff](https://docs.astral.sh/ruff/),你可以为你的ide安装对应插件来使用,也可以在命令行使用`ruff check ./nonebot_plugin_tetris_stats/`来检查代码。
|
||||
2. 代码格式化使用 [ruff](https://docs.astral.sh/ruff/),你可以为你的ide安装对应插件来使用,也可以在命令行使用`ruff format ./nonebot_plugin_tetris_stats/`来格式化代码。
|
||||
3. 类型检查同时使用 [basedpyright](https://docs.basedpyright.com/latest/) 和 [mypy](https://www.mypy-lang.org/),你可以为你的ide安装对应插件来使用。
|
||||
也可以在命令行使用下面的命令来检查代码:
|
||||
|
||||
```bash
|
||||
# basedpyright
|
||||
basedpyright ./nonebot_plugin_tetris_stats/
|
||||
|
||||
# mypy
|
||||
mypy ./nonebot_plugin_tetris_stats/
|
||||
```
|
||||
|
||||
### 国际化
|
||||
|
||||
本项目使用 [Tarina](https://github.com/ArcletProject/Tarina) 提供国际化支持。
|
||||
|
||||
#### 添加新的语言
|
||||
|
||||
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||
2. 运行 `tarina-lang create {语言代码}` \* 请注意,语言代码最好符合 [IETF语言标签](https://zh.wikipedia.org/wiki/IETF%E8%AF%AD%E8%A8%80%E6%A0%87%E7%AD%BE) 的规范。
|
||||
3. 编辑生成的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
|
||||
|
||||
#### 更新已有语言
|
||||
|
||||
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||
2. 编辑对应的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
|
||||
|
||||
#### 添加新的条目
|
||||
|
||||
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||
2. 编辑 `.template.json` 文件。
|
||||
3. 运行 `tarina-lang schema && tarina-lang model`。
|
||||
4. 修改语言文件,至少为`en-US.json`添加新的条目。
|
||||
10
README.md
10
README.md
@@ -87,3 +87,13 @@ pip install nonebot-plugin-tetris-stats
|
||||
## 📝 开源
|
||||
|
||||
本项目使用 [AGPL-3.0](https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats/blob/main/LICENSE) 许可证开源
|
||||
|
||||
## 🤓☝ 给个 star 吧
|
||||
|
||||
<a href="https://star-history.com/#A-Minos/nonebot-plugin-tetris-stats&Date">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
@@ -1,23 +1,40 @@
|
||||
from nonebot import require
|
||||
from nonebot.plugin import PluginMetadata
|
||||
from nonebot.plugin import PluginMetadata, inherit_supported_adapters
|
||||
|
||||
require('nonebot_plugin_localstore')
|
||||
require('nonebot_plugin_orm')
|
||||
require('nonebot_plugin_alconna')
|
||||
require('nonebot_plugin_apscheduler')
|
||||
require_plugins = {
|
||||
'nonebot_plugin_alconna',
|
||||
'nonebot_plugin_apscheduler',
|
||||
'nonebot_plugin_localstore',
|
||||
'nonebot_plugin_orm',
|
||||
'nonebot_plugin_session_orm',
|
||||
'nonebot_plugin_session',
|
||||
'nonebot_plugin_user',
|
||||
'nonebot_plugin_userinfo',
|
||||
'nonebot_plugin_waiter',
|
||||
}
|
||||
|
||||
from .config.config import migrations # noqa: E402
|
||||
for i in require_plugins:
|
||||
require(i)
|
||||
|
||||
from nonebot_plugin_alconna import namespace # noqa: E402
|
||||
|
||||
with namespace('tetris_stats') as ns:
|
||||
ns.enable_message_cache = False
|
||||
|
||||
from .config import migrations # noqa: E402
|
||||
from .config.config import Config # noqa: E402
|
||||
|
||||
__plugin_meta__ = PluginMetadata(
|
||||
name='Tetris Stats',
|
||||
description='一个用于查询 Tetris 相关游戏玩家数据的插件',
|
||||
usage='发送 {游戏名} --help 查询使用方法',
|
||||
usage='发送 tstats --help 查询使用方法',
|
||||
type='application',
|
||||
homepage='https://github.com/A-minos/nonebot-plugin-tetris-stats',
|
||||
config=Config,
|
||||
supported_adapters=inherit_supported_adapters(*require_plugins),
|
||||
extra={
|
||||
'orm_version_location': migrations,
|
||||
},
|
||||
)
|
||||
|
||||
from . import game_data_processor # noqa: F401, E402
|
||||
from .utils import host # noqa: F401, E402
|
||||
from . import games # noqa: F401, E402
|
||||
|
||||
@@ -1,14 +1,27 @@
|
||||
from pathlib import Path
|
||||
from nonebot import get_plugin_config
|
||||
from nonebot_plugin_localstore import get_plugin_cache_dir, get_plugin_data_dir
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from nonebot_plugin_localstore import get_cache_dir # type: ignore[import-untyped]
|
||||
from pydantic import BaseModel
|
||||
CACHE_PATH = get_plugin_cache_dir()
|
||||
DATA_PATH = get_plugin_data_dir()
|
||||
|
||||
from . import migrations # noqa: F401
|
||||
|
||||
CACHE_PATH: Path = get_cache_dir('nonebot_plugin_tetris_stats')
|
||||
class Proxy(BaseModel):
|
||||
main: str | None = None
|
||||
github: str | None = None
|
||||
tetrio: str | None = None
|
||||
tos: str | None = None
|
||||
top: str | None = None
|
||||
|
||||
|
||||
class ScopedConfig(BaseModel):
|
||||
request_timeout: float = 30.0
|
||||
screenshot_quality: float = 2
|
||||
proxy: Proxy = Field(default_factory=Proxy)
|
||||
|
||||
|
||||
class Config(BaseModel):
|
||||
"""配置类"""
|
||||
tetris: ScopedConfig = Field(default_factory=ScopedConfig)
|
||||
|
||||
tetris_req_timeout: float = 30.0
|
||||
|
||||
config = get_plugin_config(Config)
|
||||
|
||||
@@ -8,11 +8,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '09d4bb60160d'
|
||||
down_revision: str | Sequence[str] | None = 'b9d65badc713'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
|
||||
@@ -8,11 +8,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '0d50142b780f'
|
||||
down_revision: str | Sequence[str] | None = '09d4bb60160d'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
|
||||
@@ -0,0 +1,283 @@
|
||||
"""Refactor Historical
|
||||
|
||||
迁移 ID: 3c25a5a8c050
|
||||
父迁移: b7fbdafc339a
|
||||
创建时间: 2024-05-14 09:16:35.193001
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from nonebot.log import logger
|
||||
from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn
|
||||
from sqlalchemy import desc, select
|
||||
from sqlalchemy.dialects import sqlite
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '3c25a5a8c050'
|
||||
down_revision: str | Sequence[str] | None = 'b7fbdafc339a'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def migrate_old_data() -> None: # noqa: C901
|
||||
from json import dumps, loads
|
||||
|
||||
Base = automap_base() # noqa: N806
|
||||
Base.prepare(autoload_with=op.get_bind())
|
||||
OldHistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||
TETRIOHistoricalData = Base.classes.nonebot_plugin_tetris_stats_tetriohistoricaldata # noqa: N806
|
||||
TOSHistoricalData = Base.classes.nonebot_plugin_tetris_stats_toshistoricaldata # noqa: N806
|
||||
with (
|
||||
Session(op.get_bind()) as session,
|
||||
Progress(
|
||||
TextColumn('[progress.description]{task.description}'),
|
||||
BarColumn(),
|
||||
MofNCompleteColumn(),
|
||||
TaskProgressColumn(),
|
||||
TimeRemainingColumn(),
|
||||
) as progress,
|
||||
):
|
||||
if session.query(OldHistoricalData).count() == 0:
|
||||
logger.info('空表, 跳过')
|
||||
return
|
||||
task_id = progress.add_task('[cyan]Migrating:', total=session.query(OldHistoricalData).count())
|
||||
pointer = 0
|
||||
while pointer < session.query(OldHistoricalData).order_by(desc(OldHistoricalData.id)).limit(1).one().id:
|
||||
result = session.scalars(
|
||||
select(OldHistoricalData)
|
||||
.where(OldHistoricalData.id > pointer)
|
||||
.order_by(OldHistoricalData.id)
|
||||
.limit(100)
|
||||
).all()
|
||||
for j in result:
|
||||
processed_data: dict[str, Any] = loads(j.processed_data)
|
||||
if j.game_platform == 'IO':
|
||||
if (data := processed_data.get('user_info')) is not None:
|
||||
session.add(
|
||||
TETRIOHistoricalData(
|
||||
user_unique_identifier=j.user_unique_identifier,
|
||||
api_type='User Info',
|
||||
data=dumps(data),
|
||||
update_time=datetime.fromisoformat(data['cache']['cached_at']),
|
||||
)
|
||||
)
|
||||
if (data := processed_data.get('user_records')) is not None:
|
||||
session.add(
|
||||
TETRIOHistoricalData(
|
||||
user_unique_identifier=j.user_unique_identifier,
|
||||
api_type='User Records',
|
||||
data=dumps(data),
|
||||
update_time=datetime.fromisoformat(data['cache']['cached_at']),
|
||||
)
|
||||
)
|
||||
if j.game_platform == 'TOS' and not j.user_unique_identifier.isdigit():
|
||||
if (data := processed_data.get('user_info')) is not None:
|
||||
session.add(
|
||||
TOSHistoricalData(
|
||||
user_unique_identifier=j.user_unique_identifier,
|
||||
api_type='User Info',
|
||||
data=dumps(data),
|
||||
update_time=j.finish_time,
|
||||
)
|
||||
)
|
||||
if (data := processed_data.get('user_profile')) is not None:
|
||||
for v in data.values():
|
||||
session.add(
|
||||
TOSHistoricalData(
|
||||
user_unique_identifier=j.user_unique_identifier,
|
||||
api_type='User Profile',
|
||||
data=dumps(v),
|
||||
update_time=j.finish_time,
|
||||
)
|
||||
)
|
||||
progress.update(task_id, advance=1)
|
||||
session.commit()
|
||||
pointer = result[-1].id
|
||||
logger.success('Migrate successfully')
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetriohistoricaldata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriohistoricaldata')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'), ['api_type'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'), ['update_time'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'),
|
||||
['user_unique_identifier'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tophistoricaldata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tophistoricaldata')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'), ['api_type'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'), ['update_time'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'),
|
||||
['user_unique_identifier'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_toshistoricaldata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_toshistoricaldata')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'), ['api_type'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'), ['update_time'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'),
|
||||
['user_unique_identifier'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_triggerhistoricaldata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('trigger_time', sa.DateTime(), nullable=False),
|
||||
sa.Column('session_persist_id', sa.Integer(), nullable=False),
|
||||
sa.Column('game_platform', sa.String(length=32), nullable=False),
|
||||
sa.Column('command_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('command_args', sa.JSON(), nullable=False),
|
||||
sa.Column('finish_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_triggerhistoricaldata')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'),
|
||||
['command_type'],
|
||||
unique=False,
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'),
|
||||
['game_platform'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
migrate_old_data()
|
||||
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier')
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_historicaldata',
|
||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||
sa.Column('trigger_time', sa.DATETIME(), nullable=False),
|
||||
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True),
|
||||
sa.Column('bot_account', sa.VARCHAR(), nullable=True),
|
||||
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True),
|
||||
sa.Column('source_account', sa.VARCHAR(), nullable=True),
|
||||
sa.Column('message', sa.BLOB(), nullable=True),
|
||||
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False),
|
||||
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False),
|
||||
sa.Column('command_args', sqlite.JSON(), nullable=False),
|
||||
sa.Column('game_user', sqlite.JSON(), nullable=False),
|
||||
sa.Column('processed_data', sqlite.JSON(), nullable=False),
|
||||
sa.Column('finish_time', sa.DATETIME(), nullable=False),
|
||||
sa.Column('user_unique_identifier', sa.VARCHAR(length=32), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
'ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier',
|
||||
['user_unique_identifier'],
|
||||
unique=False,
|
||||
)
|
||||
batch_op.create_index(
|
||||
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
|
||||
)
|
||||
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_triggerhistoricaldata')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_toshistoricaldata')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tophistoricaldata')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetriohistoricaldata')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,119 @@
|
||||
"""add TETRIOLeagueStats
|
||||
|
||||
迁移 ID: 5a1b93948494
|
||||
父迁移: cfeab6961dce
|
||||
创建时间: 2024-08-24 00:22:41.359500
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '5a1b93948494'
|
||||
down_revision: str | Sequence[str] | None = 'cfeab6961dce'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetrioleaguestats',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestats')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'), ['update_time'], unique=False
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetrioleaguehistorical',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('request_id', sa.Uuid(), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.Column('stats_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
['stats_id'],
|
||||
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
|
||||
name=op.f(
|
||||
'fk_nonebot_plugin_tetris_stats_tetrioleaguehistorical_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
|
||||
),
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguehistorical')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'), ['request_id'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'),
|
||||
['update_time'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetrioleaguestatsfield',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('rank', sa.String(length=2), nullable=False),
|
||||
sa.Column('tr_line', sa.Float(), nullable=False),
|
||||
sa.Column('player_count', sa.Integer(), nullable=False),
|
||||
sa.Column('low_pps', sa.JSON(), nullable=False),
|
||||
sa.Column('low_apm', sa.JSON(), nullable=False),
|
||||
sa.Column('low_vs', sa.JSON(), nullable=False),
|
||||
sa.Column('avg_pps', sa.Float(), nullable=False),
|
||||
sa.Column('avg_apm', sa.Float(), nullable=False),
|
||||
sa.Column('avg_vs', sa.Float(), nullable=False),
|
||||
sa.Column('high_pps', sa.JSON(), nullable=False),
|
||||
sa.Column('high_apm', sa.JSON(), nullable=False),
|
||||
sa.Column('high_vs', sa.JSON(), nullable=False),
|
||||
sa.Column('stats_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
['stats_id'],
|
||||
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
|
||||
name=op.f(
|
||||
'fk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
|
||||
),
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'), ['rank'], unique=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'))
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestats')
|
||||
# ### end Alembic commands ###
|
||||
@@ -5,14 +5,17 @@
|
||||
创建时间: 2023-11-26 20:15:56.033892
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from alembic import op
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
from ujson import dumps, loads
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '6c3206f90cc3'
|
||||
down_revision: str | Sequence[str] | None = '9f6582279ce2'
|
||||
@@ -23,6 +26,7 @@ depends_on: str | Sequence[str] | None = None
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
from json import dumps, loads
|
||||
|
||||
Base = automap_base() # noqa: N806
|
||||
connection = op.get_bind()
|
||||
@@ -46,6 +50,7 @@ def upgrade(name: str = '') -> None:
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
from json import dumps, loads
|
||||
|
||||
Base = automap_base() # noqa: N806
|
||||
connection = op.get_bind()
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from alembic import op
|
||||
from nonebot.log import logger
|
||||
@@ -16,20 +16,18 @@ from sqlalchemy import select
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '8a91210ce14d'
|
||||
down_revision: str | Sequence[str] | None = '0d50142b780f'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
def upgrade(name: str = '') -> None: # noqa: C901
|
||||
if name:
|
||||
return
|
||||
from nonebot_plugin_tetris_stats.version import __version__
|
||||
|
||||
if __version__ != '1.0.3':
|
||||
logger.critical('本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移')
|
||||
raise RuntimeError('本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移')
|
||||
|
||||
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
||||
from pydantic import BaseModel, ValidationError
|
||||
@@ -42,8 +40,6 @@ def upgrade(name: str = '') -> None:
|
||||
TimeRemainingColumn,
|
||||
)
|
||||
|
||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseProcessedData
|
||||
|
||||
Base = automap_base() # noqa: N806
|
||||
Base.prepare(autoload_with=op.get_bind())
|
||||
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||
@@ -56,18 +52,33 @@ def upgrade(name: str = '') -> None:
|
||||
def model_to_json(value: BaseModel) -> str:
|
||||
return value.json(by_alias=True)
|
||||
|
||||
models = BaseProcessedData.__subclasses__()
|
||||
|
||||
def json_to_model(value: str) -> BaseModel:
|
||||
for i in models:
|
||||
try:
|
||||
return type_validate_json(i, value)
|
||||
except ValidationError: # noqa: PERF203
|
||||
...
|
||||
raise ValueError
|
||||
|
||||
with Session(op.get_bind()) as session:
|
||||
count = session.query(HistoricalData).count()
|
||||
if count == 0:
|
||||
logger.info('空表, 跳过')
|
||||
return
|
||||
|
||||
from nonebot_plugin_tetris_stats.version import __version__
|
||||
|
||||
if __version__ != '1.0.3':
|
||||
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
|
||||
logger.critical(msg)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped]
|
||||
BaseProcessedData,
|
||||
)
|
||||
|
||||
models = BaseProcessedData.__subclasses__()
|
||||
|
||||
def json_to_model(value: str) -> BaseModel:
|
||||
for i in models:
|
||||
try:
|
||||
return type_validate_json(i, value)
|
||||
except ValidationError: # noqa: PERF203
|
||||
...
|
||||
raise ValueError
|
||||
|
||||
with Progress(
|
||||
TextColumn('[progress.description]{task.description}'),
|
||||
BarColumn(),
|
||||
|
||||
@@ -5,13 +5,17 @@
|
||||
创建时间: 2023-11-11 16:24:11.826667
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '9866f53ce44f'
|
||||
down_revision: str | Sequence[str] | None = None
|
||||
branch_labels: str | Sequence[str] | None = ('nonebot_plugin_tetris_stats',)
|
||||
|
||||
@@ -5,11 +5,12 @@
|
||||
创建时间: 2023-11-11 16:51:30.718277
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
from shutil import copyfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from alembic import op
|
||||
from nonebot import get_driver
|
||||
@@ -18,6 +19,9 @@ from sqlalchemy import Connection, create_engine, inspect, text
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '9cd1647db502'
|
||||
down_revision: str | Sequence[str] | None = '9866f53ce44f'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
@@ -33,14 +37,10 @@ def migrate_old_data(connection: Connection) -> None:
|
||||
Bind = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
|
||||
|
||||
def non_empty(obj: str) -> bool:
|
||||
if obj != '' and not obj.isspace():
|
||||
return True
|
||||
return False
|
||||
return bool(obj != '' and not obj.isspace())
|
||||
|
||||
def is_int(obj: int | str) -> bool:
|
||||
if isinstance(obj, int) or obj.isdigit():
|
||||
return True
|
||||
return False
|
||||
return bool(isinstance(obj, int) or obj.isdigit())
|
||||
|
||||
bind_list = [
|
||||
Bind(chat_platform='OneBot V11', chat_account=int(row.QQ), game_platform='IO', game_account=row.USER)
|
||||
@@ -80,8 +80,9 @@ def upgrade(name: str = '') -> None:
|
||||
logger.success('nonebot_plugin_tetris_stats: 跳过迁移')
|
||||
return
|
||||
if 'IORANK' not in tables:
|
||||
logger.warning('nonebot_plugin_tetris_stats: 发现过早版本的数据, 请先更新到 0.4.4 版本')
|
||||
raise RuntimeError('nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级')
|
||||
msg = 'nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级'
|
||||
logger.warning(msg)
|
||||
raise RuntimeError(msg)
|
||||
logger.info('nonebot_plugin_tetris_stats: 发现来自老版本的数据, 正在迁移...')
|
||||
migrate_old_data(connection)
|
||||
db_path.unlink()
|
||||
|
||||
@@ -5,15 +5,17 @@
|
||||
创建时间: 2023-11-21 08:35:50.393246
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import sqlite
|
||||
|
||||
from nonebot_plugin_tetris_stats.db.models import PydanticType
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = '9f6582279ce2'
|
||||
down_revision: str | Sequence[str] | None = '9cd1647db502'
|
||||
@@ -45,8 +47,8 @@ def upgrade(name: str = '') -> None:
|
||||
sa.Column('game_platform', sa.String(length=32), nullable=False),
|
||||
sa.Column('command_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('command_args', sa.JSON(), nullable=False),
|
||||
sa.Column('game_user', PydanticType(list), nullable=False),
|
||||
sa.Column('processed_data', PydanticType(list), nullable=False),
|
||||
sa.Column('game_user', sa.JSON(), nullable=False),
|
||||
sa.Column('processed_data', sa.JSON(), nullable=False),
|
||||
sa.Column('finish_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_historicaldata')),
|
||||
)
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
"""Add TETRIO user configuration
|
||||
|
||||
迁移 ID: a1195e989cc6
|
||||
父迁移: b15844837693
|
||||
创建时间: 2024-06-09 04:20:07.819194
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'a1195e989cc6'
|
||||
down_revision: str | Sequence[str] | None = 'b15844837693'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetriouserconfig',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('query_template', sa.String(length=2), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriouserconfig')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetriouserconfig')
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,71 @@
|
||||
"""Migrate to nonobot-plugin-user
|
||||
|
||||
迁移 ID: b15844837693
|
||||
父迁移: 3c25a5a8c050
|
||||
创建时间: 2024-06-08 02:27:35.227596
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'b15844837693'
|
||||
down_revision: str | Sequence[str] | None = '3c25a5a8c050'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_account')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_platform')
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_bind')
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_bind',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('game_platform', sa.String(length=32), nullable=False),
|
||||
sa.Column('game_account', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_bind')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
|
||||
batch_op.create_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id'), ['user_id'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
|
||||
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id'))
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_bind')
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_bind',
|
||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||
sa.Column('chat_platform', sa.VARCHAR(length=32), nullable=False),
|
||||
sa.Column('chat_account', sa.VARCHAR(), nullable=False),
|
||||
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False),
|
||||
sa.Column('game_account', sa.VARCHAR(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_bind'),
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_nonebot_plugin_tetris_stats_bind_chat_platform', ['chat_platform'], unique=False)
|
||||
batch_op.create_index('ix_nonebot_plugin_tetris_stats_bind_chat_account', ['chat_account'], unique=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -8,12 +8,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from nonebot.log import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'b7fbdafc339a'
|
||||
down_revision: str | Sequence[str] | None = '8a91210ce14d'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
@@ -23,11 +26,7 @@ depends_on: str | Sequence[str] | None = None
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
from nonebot_plugin_tetris_stats.version import __version__
|
||||
|
||||
if __version__ != '1.0.4':
|
||||
logger.critical('本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移')
|
||||
raise RuntimeError('本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移')
|
||||
from nonebot.compat import type_validate_json
|
||||
from pydantic import ValidationError
|
||||
from rich.progress import (
|
||||
@@ -42,8 +41,6 @@ def upgrade(name: str = '') -> None:
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser
|
||||
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
|
||||
batch_op.create_index(
|
||||
@@ -56,37 +53,48 @@ def upgrade(name: str = '') -> None:
|
||||
Base.prepare(autoload_with=connection)
|
||||
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||
|
||||
models: list[type[BaseUser]] = BaseUser.__subclasses__()
|
||||
|
||||
def json_to_model(value: str) -> BaseUser:
|
||||
for i in models:
|
||||
try:
|
||||
return type_validate_json(i, value)
|
||||
except ValidationError: # noqa: PERF203
|
||||
...
|
||||
raise ValueError
|
||||
|
||||
with Session(op.get_bind()) as session:
|
||||
count = session.query(HistoricalData).count()
|
||||
with Progress(
|
||||
TextColumn('[progress.description]{task.description}'),
|
||||
BarColumn(),
|
||||
MofNCompleteColumn(),
|
||||
TaskProgressColumn(),
|
||||
TimeRemainingColumn(),
|
||||
) as progress:
|
||||
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
||||
for i in range(0, count, 100):
|
||||
for j in session.scalars(
|
||||
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
||||
):
|
||||
model = json_to_model(j.game_user)
|
||||
if count == 0:
|
||||
logger.info('空表, 跳过')
|
||||
else:
|
||||
from nonebot_plugin_tetris_stats.version import __version__
|
||||
|
||||
if __version__ != '1.0.4':
|
||||
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
|
||||
logger.critical(msg)
|
||||
raise RuntimeError(msg)
|
||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser # type: ignore[import-untyped]
|
||||
|
||||
models: list[type[BaseUser]] = BaseUser.__subclasses__()
|
||||
|
||||
def json_to_model(value: str) -> BaseUser:
|
||||
for i in models:
|
||||
try:
|
||||
j.user_unique_identifier = model.unique_identifier
|
||||
except ValueError:
|
||||
session.delete(j)
|
||||
progress.update(task_id, advance=1)
|
||||
session.commit()
|
||||
return type_validate_json(i, value)
|
||||
except ValidationError: # noqa: PERF203
|
||||
...
|
||||
raise ValueError
|
||||
|
||||
with Progress(
|
||||
TextColumn('[progress.description]{task.description}'),
|
||||
BarColumn(),
|
||||
MofNCompleteColumn(),
|
||||
TaskProgressColumn(),
|
||||
TimeRemainingColumn(),
|
||||
) as progress:
|
||||
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
||||
for i in range(0, count, 100):
|
||||
for j in session.scalars(
|
||||
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
||||
):
|
||||
model = json_to_model(j.game_user)
|
||||
try:
|
||||
j.user_unique_identifier = model.unique_identifier
|
||||
except ValueError:
|
||||
session.delete(j)
|
||||
progress.update(task_id, advance=1)
|
||||
session.commit()
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||
batch_op.alter_column('user_unique_identifier', existing_type=sa.VARCHAR(length=32), nullable=False)
|
||||
logger.success('database upgrade success')
|
||||
|
||||
@@ -5,14 +5,18 @@
|
||||
创建时间: 2023-12-30 00:27:40.991704
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from alembic import op
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'b9d65badc713'
|
||||
down_revision: str | Sequence[str] | None = '6c3206f90cc3'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
"""Extend api_type field length
|
||||
|
||||
迁移 ID: cfeab6961dce
|
||||
父迁移: f5b4a6d1325b
|
||||
创建时间: 2024-08-09 14:20:59.789030
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from nonebot.log import logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'cfeab6961dce'
|
||||
down_revision: str | Sequence[str] | None = 'f5b4a6d1325b'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
'api_type', existing_type=sa.VARCHAR(length=16), type_=sa.String(length=32), existing_nullable=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
logger.warning('新数据可能不支持降级!')
|
||||
logger.warning('请确认数据库内数据可以迁移到旧版本!')
|
||||
input('如果确认可以迁移, 请按回车键继续!')
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
'api_type', existing_type=sa.String(length=32), type_=sa.VARCHAR(length=16), existing_nullable=False
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,91 @@
|
||||
"""TETR.IO new season
|
||||
|
||||
迁移 ID: f5b4a6d1325b
|
||||
父迁移: a1195e989cc6
|
||||
创建时间: 2024-08-01 20:44:48.644912
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
revision: str = 'f5b4a6d1325b'
|
||||
down_revision: str | Sequence[str] | None = 'a1195e989cc6'
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_rank')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_update_time')
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_iorank')
|
||||
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time')
|
||||
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier')
|
||||
|
||||
op.drop_table('nonebot_plugin_tetris_stats_tetriohistoricaldata')
|
||||
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_tetriohistoricaldata',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||
sa.Column('data', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriohistoricaldata')),
|
||||
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'), ['api_type'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'), ['update_time'], unique=False
|
||||
)
|
||||
batch_op.create_index(
|
||||
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'),
|
||||
['user_unique_identifier'],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade(name: str = '') -> None:
|
||||
if name:
|
||||
return
|
||||
op.create_table(
|
||||
'nonebot_plugin_tetris_stats_iorank',
|
||||
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||
sa.Column('rank', sa.VARCHAR(length=2), nullable=False),
|
||||
sa.Column('tr_line', sa.FLOAT(), nullable=False),
|
||||
sa.Column('player_count', sa.INTEGER(), nullable=False),
|
||||
sa.Column('low_pps', sa.JSON(), nullable=False),
|
||||
sa.Column('low_apm', sa.JSON(), nullable=False),
|
||||
sa.Column('low_vs', sa.JSON(), nullable=False),
|
||||
sa.Column('avg_pps', sa.FLOAT(), nullable=False),
|
||||
sa.Column('avg_apm', sa.FLOAT(), nullable=False),
|
||||
sa.Column('avg_vs', sa.FLOAT(), nullable=False),
|
||||
sa.Column('high_pps', sa.JSON(), nullable=False),
|
||||
sa.Column('high_apm', sa.JSON(), nullable=False),
|
||||
sa.Column('high_vs', sa.JSON(), nullable=False),
|
||||
sa.Column('update_time', sa.DATETIME(), nullable=False),
|
||||
sa.Column('file_hash', sa.VARCHAR(length=128), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_iorank'),
|
||||
)
|
||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_update_time', ['update_time'], unique=False)
|
||||
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_rank', ['rank'], unique=False)
|
||||
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash', ['file_hash'], unique=False)
|
||||
@@ -1,10 +1,25 @@
|
||||
from asyncio import Lock
|
||||
from collections.abc import AsyncGenerator
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum, auto
|
||||
from typing import TYPE_CHECKING, Literal, TypeVar, overload
|
||||
|
||||
from nonebot_plugin_orm import AsyncSession
|
||||
from nonebot.exception import FinishedException
|
||||
from nonebot.log import logger
|
||||
from nonebot_plugin_orm import AsyncSession, get_session
|
||||
from nonebot_plugin_user import User
|
||||
from sqlalchemy import select
|
||||
|
||||
from ..utils.typing import GameType
|
||||
from .models import Bind
|
||||
from ..utils.typing import AllCommandType, BaseCommandType, GameType, TETRIOCommandType
|
||||
from .models import Bind, TriggerHistoricalData
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..games.tetrio.api.models import TETRIOHistoricalData
|
||||
from ..games.top.api.models import TOPHistoricalData
|
||||
from ..games.tos.api.models import TOSHistoricalData
|
||||
|
||||
|
||||
class BindStatus(Enum):
|
||||
@@ -14,44 +29,124 @@ class BindStatus(Enum):
|
||||
|
||||
async def query_bind_info(
|
||||
session: AsyncSession,
|
||||
chat_platform: str,
|
||||
chat_account: str,
|
||||
user: User,
|
||||
game_platform: GameType,
|
||||
) -> Bind | None:
|
||||
return (
|
||||
await session.scalars(
|
||||
select(Bind)
|
||||
.where(Bind.chat_platform == chat_platform)
|
||||
.where(Bind.chat_account == chat_account)
|
||||
.where(Bind.game_platform == game_platform)
|
||||
)
|
||||
await session.scalars(select(Bind).where(Bind.user_id == user.id).where(Bind.game_platform == game_platform))
|
||||
).one_or_none()
|
||||
|
||||
|
||||
async def create_or_update_bind(
|
||||
session: AsyncSession,
|
||||
chat_platform: str,
|
||||
chat_account: str,
|
||||
user: User,
|
||||
game_platform: GameType,
|
||||
game_account: str,
|
||||
) -> BindStatus:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
chat_platform=chat_platform,
|
||||
chat_account=chat_account,
|
||||
user=user,
|
||||
game_platform=game_platform,
|
||||
)
|
||||
if bind is None:
|
||||
bind = Bind(
|
||||
chat_platform=chat_platform,
|
||||
chat_account=chat_account,
|
||||
user_id=user.id,
|
||||
game_platform=game_platform,
|
||||
game_account=game_account,
|
||||
)
|
||||
session.add(bind)
|
||||
message = BindStatus.SUCCESS
|
||||
status = BindStatus.SUCCESS
|
||||
else:
|
||||
bind.game_account = game_account
|
||||
message = BindStatus.UPDATE
|
||||
status = BindStatus.UPDATE
|
||||
await session.commit()
|
||||
return message
|
||||
return status
|
||||
|
||||
|
||||
async def remove_bind(
|
||||
session: AsyncSession,
|
||||
user: User,
|
||||
game_platform: GameType,
|
||||
) -> bool:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
user=user,
|
||||
game_platform=game_platform,
|
||||
)
|
||||
if bind is not None:
|
||||
await session.delete(bind)
|
||||
await session.commit()
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
T = TypeVar('T', 'TETRIOHistoricalData', 'TOPHistoricalData', 'TOSHistoricalData')
|
||||
|
||||
lock = Lock()
|
||||
|
||||
|
||||
async def anti_duplicate_add(model: T) -> None:
|
||||
async with lock, get_session() as session:
|
||||
result = (
|
||||
await session.scalars(
|
||||
select(cls := model.__class__)
|
||||
.where(cls.update_time == model.update_time)
|
||||
.where(cls.user_unique_identifier == model.user_unique_identifier)
|
||||
.where(cls.api_type == model.api_type)
|
||||
)
|
||||
).all()
|
||||
if result:
|
||||
for i in result:
|
||||
if i.data == model.data:
|
||||
logger.debug('Anti duplicate successfully')
|
||||
return
|
||||
session.add(model)
|
||||
await session.commit()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@overload
|
||||
async def trigger(
|
||||
session_persist_id: int,
|
||||
game_platform: Literal['IO'],
|
||||
command_type: TETRIOCommandType,
|
||||
command_args: list[str],
|
||||
) -> AsyncGenerator:
|
||||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@overload
|
||||
async def trigger(
|
||||
session_persist_id: int,
|
||||
game_platform: GameType,
|
||||
command_type: BaseCommandType,
|
||||
command_args: list[str],
|
||||
) -> AsyncGenerator:
|
||||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def trigger(
|
||||
session_persist_id: int,
|
||||
game_platform: GameType,
|
||||
command_type: AllCommandType,
|
||||
command_args: list[str],
|
||||
) -> AsyncGenerator:
|
||||
trigger_time = datetime.now(UTC)
|
||||
try:
|
||||
yield
|
||||
except FinishedException:
|
||||
async with get_session() as session:
|
||||
session.add(
|
||||
TriggerHistoricalData(
|
||||
trigger_time=trigger_time,
|
||||
session_persist_id=session_persist_id,
|
||||
game_platform=game_platform,
|
||||
command_type=command_type,
|
||||
command_args=command_args,
|
||||
finish_time=datetime.now(UTC),
|
||||
)
|
||||
)
|
||||
await session.commit()
|
||||
raise
|
||||
|
||||
@@ -2,24 +2,29 @@ from collections.abc import Callable, Sequence
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from nonebot.adapters import Message
|
||||
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
||||
from nonebot_plugin_orm import Model
|
||||
from pydantic import BaseModel, ValidationError
|
||||
from sqlalchemy import JSON, DateTime, Dialect, PickleType, String, TypeDecorator
|
||||
from sqlalchemy import JSON, DateTime, Dialect, String, TypeDecorator
|
||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||
from typing_extensions import override
|
||||
|
||||
from ..game_data_processor.schemas import BaseProcessedData, BaseUser
|
||||
from ..utils.typing import CommandType, GameType
|
||||
from ..utils.typing import AllCommandType, GameType
|
||||
|
||||
|
||||
class PydanticType(TypeDecorator):
|
||||
impl = JSON
|
||||
|
||||
@override
|
||||
def __init__(self, get_model: Callable[[], Sequence[type[BaseModel]]], *args: Any, **kwargs: Any):
|
||||
def __init__(
|
||||
self,
|
||||
get_model: Sequence[Callable[[], Sequence[type[BaseModel]]]],
|
||||
models: set[type[BaseModel]],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
):
|
||||
self.get_model = get_model
|
||||
self._models = models
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
if PYDANTIC_V2:
|
||||
@@ -27,7 +32,7 @@ class PydanticType(TypeDecorator):
|
||||
@override
|
||||
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
|
||||
# 将 Pydantic 模型实例转换为 JSON
|
||||
if isinstance(value, tuple(self.get_model())):
|
||||
if isinstance(value, tuple(self.models)):
|
||||
return value.model_dump_json(by_alias=True) # type: ignore[union-attr]
|
||||
raise TypeError
|
||||
else:
|
||||
@@ -35,7 +40,7 @@ class PydanticType(TypeDecorator):
|
||||
@override
|
||||
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
|
||||
# 将 Pydantic 模型实例转换为 JSON
|
||||
if isinstance(value, tuple(self.get_model())):
|
||||
if isinstance(value, tuple(self.models)):
|
||||
return value.json(by_alias=True) # type: ignore[union-attr]
|
||||
raise TypeError
|
||||
|
||||
@@ -43,36 +48,34 @@ class PydanticType(TypeDecorator):
|
||||
def process_result_value(self, value: Any | None, dialect: Dialect) -> BaseModel:
|
||||
# 将 JSON 转换回 Pydantic 模型实例
|
||||
if isinstance(value, str | bytes):
|
||||
for i in self.get_model():
|
||||
for i in self.models:
|
||||
try:
|
||||
return type_validate_json(i, value)
|
||||
except ValidationError: # noqa: PERF203
|
||||
...
|
||||
raise ValueError
|
||||
|
||||
@property
|
||||
def models(self) -> tuple[type[BaseModel], ...]:
|
||||
models: set[type[BaseModel]] = set()
|
||||
for i in self.get_model:
|
||||
models.update(i())
|
||||
models.update(self._models)
|
||||
return tuple(models)
|
||||
|
||||
|
||||
class Bind(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
chat_platform: Mapped[str] = mapped_column(String(32), index=True)
|
||||
chat_account: Mapped[str] = mapped_column(index=True)
|
||||
user_id: Mapped[int] = mapped_column(index=True)
|
||||
game_platform: Mapped[GameType] = mapped_column(String(32))
|
||||
game_account: Mapped[str]
|
||||
|
||||
|
||||
class HistoricalData(MappedAsDataclass, Model):
|
||||
class TriggerHistoricalData(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
trigger_time: Mapped[datetime] = mapped_column(DateTime)
|
||||
bot_platform: Mapped[str | None] = mapped_column(String(32))
|
||||
bot_account: Mapped[str | None]
|
||||
source_type: Mapped[str | None] = mapped_column(String(32), index=True)
|
||||
source_account: Mapped[str | None] = mapped_column(index=True)
|
||||
message: Mapped[Message | None] = mapped_column(PickleType)
|
||||
game_platform: Mapped[GameType] = mapped_column(String(32), index=True, init=False)
|
||||
command_type: Mapped[CommandType] = mapped_column(String(16), index=True, init=False)
|
||||
command_args: Mapped[list[str]] = mapped_column(JSON, init=False)
|
||||
user_unique_identifier: Mapped[str] = mapped_column(String(32), index=True, init=False)
|
||||
game_user: Mapped[BaseUser] = mapped_column(PydanticType(get_model=BaseUser.__subclasses__), init=False)
|
||||
processed_data: Mapped[BaseProcessedData] = mapped_column(
|
||||
PydanticType(get_model=BaseProcessedData.__subclasses__), init=False
|
||||
)
|
||||
finish_time: Mapped[datetime] = mapped_column(DateTime, init=False)
|
||||
session_persist_id: Mapped[int]
|
||||
game_platform: Mapped[GameType] = mapped_column(String(32), index=True)
|
||||
command_type: Mapped[AllCommandType] = mapped_column(String(16), index=True)
|
||||
command_args: Mapped[list[str]] = mapped_column(JSON)
|
||||
finish_time: Mapped[datetime] = mapped_column(DateTime)
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import AlcMatches, AlconnaMatcher
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_userinfo import UserInfo # type: ignore[import-untyped]
|
||||
|
||||
from ..utils.exception import MessageFormatError
|
||||
from ..utils.recorder import Recorder
|
||||
from ..utils.typing import CommandType, GameType
|
||||
from .schemas import BaseProcessedData as ProcessedData
|
||||
from .schemas import BaseRawResponse as RawResponse
|
||||
from .schemas import BaseUser as User
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class Processor(ABC):
|
||||
event_id: int
|
||||
command_type: CommandType
|
||||
command_args: list[str]
|
||||
user: User
|
||||
raw_response: RawResponse
|
||||
processed_data: ProcessedData
|
||||
|
||||
@abstractmethod
|
||||
def __init__(
|
||||
self,
|
||||
event_id: int,
|
||||
user: User,
|
||||
command_args: list[str],
|
||||
) -> None:
|
||||
self.event_id = event_id
|
||||
self.user = user
|
||||
self.command_args = command_args
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def game_platform(self) -> GameType:
|
||||
"""游戏平台"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def handle_bind(
|
||||
self,
|
||||
platform: str,
|
||||
account: str,
|
||||
bot_info: UserInfo,
|
||||
*args: Any, # noqa: ANN401
|
||||
**kwargs: Any, # noqa: ANN401
|
||||
) -> UniMessage:
|
||||
"""处理绑定消息"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
async def handle_query(self) -> UniMessage:
|
||||
"""处理查询消息"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __del__(self) -> None:
|
||||
finish_time = datetime.now(tz=UTC)
|
||||
if Recorder.is_error_event(self.event_id):
|
||||
Recorder.del_error_event(self.event_id)
|
||||
return
|
||||
historical_data = Recorder.get_historical_data(self.event_id)
|
||||
historical_data.game_platform = self.game_platform
|
||||
historical_data.command_type = self.command_type
|
||||
historical_data.command_args = self.command_args
|
||||
historical_data.user_unique_identifier = self.user.unique_identifier
|
||||
historical_data.game_user = self.user
|
||||
historical_data.processed_data = self.processed_data
|
||||
historical_data.finish_time = finish_time
|
||||
Recorder.update_historical_data(self.event_id, historical_data)
|
||||
|
||||
|
||||
def add_default_handlers(matcher: type[AlconnaMatcher]) -> None:
|
||||
@matcher.handle()
|
||||
async def _(matcher: Matcher, account: MessageFormatError):
|
||||
await matcher.finish(str(account))
|
||||
|
||||
@matcher.handle()
|
||||
async def _(matcher: Matcher, matches: AlcMatches):
|
||||
if matches.head_matched and matches.options != {} or matches.main_args == {}:
|
||||
await matcher.finish(
|
||||
(f'{matches.error_info!r}\n' if matches.error_info is not None else '')
|
||||
+ f'输入"{matches.header_result} --help"查看帮助'
|
||||
)
|
||||
|
||||
@matcher.handle()
|
||||
async def _(matcher: Matcher, other: Any): # noqa: ANN401
|
||||
await matcher.finish()
|
||||
|
||||
|
||||
from . import ( # noqa: F401, E402
|
||||
io_data_processor,
|
||||
top_data_processor,
|
||||
tos_data_processor,
|
||||
)
|
||||
@@ -1,2 +0,0 @@
|
||||
BIND_COMMAND: list[str] = ['绑定', 'bind']
|
||||
QUERY_COMMAND: list[str] = ['查', '查询', 'query', 'stats']
|
||||
@@ -1,196 +0,0 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||
from nonebot.adapters import Bot, Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import At, on_alconna
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import BotUserInfo, UserInfo # type: ignore[import-untyped]
|
||||
from sqlalchemy import func, select
|
||||
|
||||
from ...db import query_bind_info
|
||||
from ...utils.exception import HandleNotFinishedError, NeedCatchError
|
||||
from ...utils.metrics import get_metrics
|
||||
from ...utils.platform import get_platform
|
||||
from ...utils.typing import Me
|
||||
from .. import add_default_handlers
|
||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||
from .constant import GAME_TYPE
|
||||
from .model import IORank
|
||||
from .processor import Processor, User, identify_user_info
|
||||
from .typing import Rank
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
alc = on_alconna(
|
||||
Alconna(
|
||||
'io',
|
||||
Option(
|
||||
BIND_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info,
|
||||
notice='IO 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN],
|
||||
)
|
||||
),
|
||||
alias=BIND_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='bind',
|
||||
help_text='绑定 IO 账号',
|
||||
),
|
||||
Option(
|
||||
QUERY_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 | 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info,
|
||||
notice='IO 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
),
|
||||
alias=QUERY_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='query',
|
||||
help_text='查询 IO 游戏信息',
|
||||
),
|
||||
Option(
|
||||
'rank',
|
||||
Args(Arg('rank', Rank, notice='IO 段位')),
|
||||
alias={'Rank', 'RANK', '段位'},
|
||||
compact=True,
|
||||
dest='rank',
|
||||
help_text='查询 IO 段位信息',
|
||||
),
|
||||
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||
meta=CommandMeta(
|
||||
description='查询 TETR.IO 的信息',
|
||||
example='io绑定scdhh\nio查我\niorankx',
|
||||
compact=True,
|
||||
fuzzy_match=True,
|
||||
),
|
||||
),
|
||||
skip_for_unmatch=False,
|
||||
auto_send_output=True,
|
||||
aliases={'IO'},
|
||||
)
|
||||
|
||||
alc.shortcut('fkosk', {'command': 'io查', 'args': ['我']})
|
||||
|
||||
|
||||
@alc.assign('bind')
|
||||
async def _(bot: Bot, event: Event, matcher: Matcher, account: User, bot_info: UserInfo = BotUserInfo()): # noqa: B008
|
||||
proc = Processor(event_id=id(event), user=account, command_args=[])
|
||||
try:
|
||||
await (
|
||||
await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id(), bot_info=bot_info)
|
||||
).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
chat_platform=get_platform(bot),
|
||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(ID=bind.game_account),
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (UniMessage(message) + await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(event: Event, matcher: Matcher, account: User):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=account,
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('rank')
|
||||
async def _(matcher: Matcher, rank: Rank):
|
||||
if rank == 'z':
|
||||
await matcher.finish('暂不支持查询未知段位')
|
||||
async with get_session() as session:
|
||||
latest_data = (
|
||||
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(1))
|
||||
).one()
|
||||
compare_data = (
|
||||
await session.scalars(
|
||||
select(IORank)
|
||||
.where(IORank.rank == rank)
|
||||
.order_by(
|
||||
func.abs(
|
||||
func.julianday(IORank.update_time)
|
||||
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
)
|
||||
).one()
|
||||
message = ''
|
||||
if (datetime.now(UTC) - latest_data.update_time.replace(tzinfo=UTC)) > timedelta(hours=7):
|
||||
message += 'Warning: 数据超过7小时未更新, 请联系Bot主人查看后台\n'
|
||||
message += f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
|
||||
if compare_data.id != latest_data.id:
|
||||
message += f'对比 {(latest_data.update_time-compare_data.update_time).total_seconds()/3600:.2f} 小时前趋势: {f"↑{difference:.2f}" if (difference:=latest_data.tr_line-compare_data.tr_line) > 0 else f"↓{-difference:.2f}" if difference < 0 else "→"}'
|
||||
else:
|
||||
message += '暂无对比数据'
|
||||
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
|
||||
low_pps = get_metrics(pps=latest_data.low_pps[1])
|
||||
low_vs = get_metrics(vs=latest_data.low_vs[1])
|
||||
max_pps = get_metrics(pps=latest_data.high_pps[1])
|
||||
max_vs = get_metrics(vs=latest_data.high_vs[1])
|
||||
message += (
|
||||
'\n'
|
||||
'平均数据:\n'
|
||||
f"L'PM: {avg.lpm} ( {avg.pps} pps )\n"
|
||||
f'APM: {avg.apm} ( x{avg.apl} )\n'
|
||||
f'ADPM: {avg.adpm} ( x{avg.adpl} ) ( {avg.vs}vs )\n'
|
||||
'\n'
|
||||
'最低数据:\n'
|
||||
f"L'PM: {low_pps.lpm} ( {low_pps.pps} pps ) By: {latest_data.low_pps[0]['name'].upper()}\n"
|
||||
f'APM: {latest_data.low_apm[1]} By: {latest_data.low_apm[0]["name"].upper()}\n'
|
||||
f'ADPM: {low_vs.adpm} ( {low_vs.vs}vs ) By: {latest_data.low_vs[0]["name"].upper()}\n'
|
||||
'\n'
|
||||
'最高数据:\n'
|
||||
f"L'PM: {max_pps.lpm} ( {max_pps.pps} pps ) By: {latest_data.high_pps[0]['name'].upper()}\n"
|
||||
f'APM: {latest_data.high_apm[1]} By: {latest_data.high_apm[0]["name"].upper()}\n'
|
||||
f'ADPM: {max_vs.adpm} ( {max_vs.vs}vs ) By: {latest_data.high_vs[0]["name"].upper()}\n'
|
||||
'\n'
|
||||
f'数据更新时间: {latest_data.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")}'
|
||||
)
|
||||
await matcher.finish(message)
|
||||
|
||||
|
||||
add_default_handlers(alc)
|
||||
@@ -1,30 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from aiocache import Cache as ACache # type: ignore[import-untyped]
|
||||
from nonebot.compat import type_validate_json
|
||||
from nonebot.log import logger
|
||||
|
||||
from ...utils.request import Request
|
||||
from .schemas.base import FailedModel, SuccessModel
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class Cache:
|
||||
cache = ACache(ACache.MEMORY)
|
||||
|
||||
@classmethod
|
||||
async def get(cls, url: str) -> bytes:
|
||||
cached_data = await cls.cache.get(url)
|
||||
if cached_data is None:
|
||||
response_data = await Request.request(url)
|
||||
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
|
||||
if isinstance(parsed_data, SuccessModel):
|
||||
await cls.cache.add(
|
||||
url,
|
||||
response_data,
|
||||
(parsed_data.cache.cached_until - datetime.now(UTC)).total_seconds(),
|
||||
)
|
||||
return response_data
|
||||
logger.debug(f'{url}: Cache hit!')
|
||||
return cached_data
|
||||
@@ -1,30 +0,0 @@
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from nonebot_plugin_orm import Model
|
||||
from sqlalchemy import JSON, DateTime, String
|
||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||
|
||||
from .typing import Rank
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class IORank(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
rank: Mapped[Rank] = mapped_column(String(2), index=True)
|
||||
tr_line: Mapped[float]
|
||||
player_count: Mapped[int]
|
||||
low_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
low_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
low_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
avg_pps: Mapped[float]
|
||||
avg_apm: Mapped[float]
|
||||
avg_vs: Mapped[float]
|
||||
high_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
high_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
high_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||
update_time: Mapped[datetime] = mapped_column(
|
||||
DateTime,
|
||||
index=True,
|
||||
)
|
||||
file_hash: Mapped[str | None] = mapped_column(String(128), index=True)
|
||||
@@ -1,467 +0,0 @@
|
||||
from asyncio import gather
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from hashlib import md5, sha512
|
||||
from math import ceil, floor
|
||||
from re import match
|
||||
from statistics import mean
|
||||
from typing import Literal
|
||||
from urllib.parse import urlunparse
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from aiofiles import open
|
||||
from nonebot import get_driver
|
||||
from nonebot.compat import type_validate_json
|
||||
from nonebot.utils import run_sync
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
|
||||
from nonebot_plugin_localstore import get_data_file # type: ignore[import-untyped]
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import UserInfo as NBUserInfo # type: ignore[import-untyped]
|
||||
from sqlalchemy import select
|
||||
from typing_extensions import override
|
||||
from zstandard import ZstdCompressor
|
||||
|
||||
from ...db import BindStatus, create_or_update_bind
|
||||
from ...db.models import HistoricalData
|
||||
from ...utils.avatar import get_avatar
|
||||
from ...utils.exception import MessageFormatError, RequestError, WhatTheFuckError
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.render import Bind, TETRIOInfo, render
|
||||
from ...utils.request import splice_url
|
||||
from ...utils.retry import retry
|
||||
from ...utils.screenshot import screenshot
|
||||
from .. import Processor as ProcessorMeta
|
||||
from .cache import Cache
|
||||
from .constant import BASE_URL, GAME_TYPE, RANK_PERCENTILE, TR_MAX, TR_MIN
|
||||
from .model import IORank
|
||||
from .schemas.base import FailedModel
|
||||
from .schemas.league_all import LeagueAll
|
||||
from .schemas.league_all import ValidUser as LeagueAllUser
|
||||
from .schemas.response import ProcessedData, RawResponse
|
||||
from .schemas.user import User
|
||||
from .schemas.user_info import NeverPlayedLeague, NeverRatedLeague, RatedLeague, UserInfo
|
||||
from .schemas.user_info import SuccessModel as InfoSuccess
|
||||
from .schemas.user_records import MultiRecord, SoloRecord, UserRecords
|
||||
from .schemas.user_records import SuccessModel as RecordsSuccess
|
||||
from .typing import Rank
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
driver = get_driver()
|
||||
|
||||
|
||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
||||
if match(r'^[a-f0-9]{24}$', info):
|
||||
return User(ID=info)
|
||||
if match(r'^[a-zA-Z0-9_-]{3,16}$', info):
|
||||
return User(name=info.lower())
|
||||
return MessageFormatError('用户名/ID不合法')
|
||||
|
||||
|
||||
def get_value_bounds(values: list[int | float]) -> tuple[int, int]:
|
||||
value_max = 10 * ceil(max(values) / 10)
|
||||
value_min = 10 * floor(min(values) / 10)
|
||||
return value_max, value_min
|
||||
|
||||
|
||||
def get_split(value_max: int, value_min: int) -> tuple[int, int]:
|
||||
offset = 0
|
||||
overflow = 0
|
||||
|
||||
while True:
|
||||
if (new_max_value := value_max + offset + overflow) > TR_MAX:
|
||||
overflow -= 1
|
||||
continue
|
||||
if (new_min_value := value_min - offset + overflow) < TR_MIN:
|
||||
overflow += 1
|
||||
continue
|
||||
if ((new_max_value - new_min_value) / 40).is_integer():
|
||||
split_value = int((value_max + offset - (value_min - offset)) / 4)
|
||||
break
|
||||
offset += 1
|
||||
return split_value, offset + overflow
|
||||
|
||||
|
||||
def get_specified_point(
|
||||
previous_point: TETRIOInfo.TetraLeagueHistory.Data,
|
||||
behind_point: TETRIOInfo.TetraLeagueHistory.Data,
|
||||
point_time: datetime,
|
||||
) -> TETRIOInfo.TetraLeagueHistory.Data:
|
||||
"""根据给出的 previous_point 和 behind_point, 推算 point_time 点处的数据
|
||||
|
||||
Args:
|
||||
previous_point (TETRIOInfo.TetraLeagueHistory.Data): 前面的数据点
|
||||
behind_point (TETRIOInfo.TetraLeagueHistory.Data): 后面的数据点
|
||||
point_time (datetime): 要推算的点的位置
|
||||
|
||||
Returns:
|
||||
TETRIOInfo.TetraLeagueHistory.Data: 要推算的点的数据
|
||||
"""
|
||||
# 求两个点的斜率
|
||||
slope = (behind_point.tr - previous_point.tr) / (
|
||||
datetime.timestamp(behind_point.record_at) - datetime.timestamp(previous_point.record_at)
|
||||
)
|
||||
return TETRIOInfo.TetraLeagueHistory.Data(
|
||||
record_at=point_time,
|
||||
tr=previous_point.tr + slope * (datetime.timestamp(point_time) - datetime.timestamp(previous_point.record_at)),
|
||||
)
|
||||
|
||||
|
||||
class Processor(ProcessorMeta):
|
||||
user: User
|
||||
raw_response: RawResponse
|
||||
processed_data: ProcessedData
|
||||
|
||||
@override
|
||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
||||
super().__init__(event_id, user, command_args)
|
||||
self.raw_response = RawResponse()
|
||||
self.processed_data = ProcessedData()
|
||||
|
||||
@property
|
||||
@override
|
||||
def game_platform(self) -> Literal['IO']:
|
||||
return GAME_TYPE
|
||||
|
||||
@override
|
||||
async def handle_bind(self, platform: str, account: str, bot_info: NBUserInfo) -> UniMessage:
|
||||
"""处理绑定消息"""
|
||||
self.command_type = 'bind'
|
||||
await self.get_user()
|
||||
if self.user.ID is None:
|
||||
raise # FIXME: 不知道怎么才能把这类型给变过来了
|
||||
async with get_session() as session:
|
||||
bind_status = await create_or_update_bind(
|
||||
session=session,
|
||||
chat_platform=platform,
|
||||
chat_account=account,
|
||||
game_platform=GAME_TYPE,
|
||||
game_account=self.user.ID,
|
||||
)
|
||||
bot_avatar = await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg')
|
||||
user_info = await self.get_user_info()
|
||||
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||
async with HostPage(
|
||||
await render(
|
||||
'binding',
|
||||
Bind(
|
||||
platform='TETR.IO',
|
||||
status='unknown',
|
||||
user=Bind.People(
|
||||
avatar=f'https://tetr.io/user-content/avatars/{user_info.data.user.id}.jpg?rv={user_info.data.user.avatar_revision}'
|
||||
if user_info.data.user.avatar_revision is not None
|
||||
else f'{{"type":"identicon","hash":"{md5(user_info.data.user.id.encode()).hexdigest()}"}}', # noqa: S324
|
||||
name=user_info.data.user.username.upper(),
|
||||
),
|
||||
bot=Bind.People(
|
||||
avatar=bot_avatar,
|
||||
name=bot_info.user_name,
|
||||
),
|
||||
command='io查我',
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
message = UniMessage.image(
|
||||
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||
)
|
||||
return message
|
||||
|
||||
@override
|
||||
async def handle_query(self) -> UniMessage:
|
||||
"""处理查询消息"""
|
||||
self.command_type = 'query'
|
||||
await self.get_user()
|
||||
user_info, user_records = await gather(self.get_user_info(), self.get_user_records())
|
||||
sprint = user_records.data.records.sprint
|
||||
blitz = user_records.data.records.blitz
|
||||
if isinstance(sprint.record, MultiRecord) or isinstance(blitz.record, MultiRecord):
|
||||
raise WhatTheFuckError('单人游戏记录是多人游戏记录')
|
||||
try:
|
||||
return UniMessage.image(raw=await self.make_query_image(self.user, user_info, sprint.record, blitz.record))
|
||||
except TypeError:
|
||||
...
|
||||
# fallback
|
||||
league = user_info.data.user.league
|
||||
user_name = user_info.data.user.username.upper()
|
||||
ret_message = ''
|
||||
if isinstance(league, NeverPlayedLeague):
|
||||
ret_message += f'用户 {user_name} 没有排位统计数据'
|
||||
else:
|
||||
if isinstance(league, NeverRatedLeague):
|
||||
ret_message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
|
||||
else:
|
||||
if league.rank == 'z':
|
||||
ret_message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
|
||||
else:
|
||||
ret_message += (
|
||||
f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
|
||||
)
|
||||
ret_message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
|
||||
lpm = league.pps * 24
|
||||
ret_message += f"\nL'PM: {round(lpm, 2)} ( {league.pps} pps )"
|
||||
ret_message += f'\nAPM: {league.apm} ( x{round(league.apm/lpm,2)} )'
|
||||
if league.vs is not None:
|
||||
adpm = league.vs * 0.6
|
||||
ret_message += f'\nADPM: {round(adpm,2)} ( x{round(adpm/lpm,2)} ) ( {league.vs}vs )'
|
||||
if sprint.record is not None:
|
||||
ret_message += f'\n40L: {round(sprint.record.endcontext.final_time/1000,2)}s'
|
||||
ret_message += f' ( #{sprint.rank} )' if sprint.rank is not None else ''
|
||||
if blitz.record is not None:
|
||||
ret_message += f'\nBlitz: {blitz.record.endcontext.score}'
|
||||
ret_message += f' ( #{blitz.rank} )' if blitz.rank is not None else ''
|
||||
return UniMessage(ret_message)
|
||||
|
||||
@staticmethod
|
||||
async def query_historical_data(user: User, user_info: InfoSuccess) -> list[TETRIOInfo.TetraLeagueHistory.Data]:
|
||||
today = datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
forward = timedelta(days=9)
|
||||
start_time = (today - forward).astimezone(UTC)
|
||||
async with get_session() as session:
|
||||
historical_data = (
|
||||
await session.scalars(
|
||||
select(HistoricalData)
|
||||
.where(HistoricalData.trigger_time >= start_time)
|
||||
.where(HistoricalData.game_platform == GAME_TYPE)
|
||||
.where(HistoricalData.user_unique_identifier == user.unique_identifier)
|
||||
)
|
||||
).all()
|
||||
if historical_data:
|
||||
extra = (
|
||||
await session.scalars(
|
||||
select(HistoricalData)
|
||||
.where(HistoricalData.game_platform == GAME_TYPE)
|
||||
.where(HistoricalData.user_unique_identifier == user.unique_identifier)
|
||||
.order_by(HistoricalData.id.desc())
|
||||
.where(HistoricalData.id < min([i.id for i in historical_data]))
|
||||
.limit(1)
|
||||
)
|
||||
).one_or_none()
|
||||
if extra is not None:
|
||||
historical_data = list(historical_data)
|
||||
historical_data.append(extra)
|
||||
if not historical_data:
|
||||
return [
|
||||
TETRIOInfo.TetraLeagueHistory.Data(record_at=today - forward, tr=user_info.data.user.league.rating),
|
||||
TETRIOInfo.TetraLeagueHistory.Data(
|
||||
record_at=today.replace(microsecond=1000), tr=user_info.data.user.league.rating
|
||||
),
|
||||
]
|
||||
histories = [
|
||||
TETRIOInfo.TetraLeagueHistory.Data(
|
||||
record_at=i.processed_data.user_info.cache.cached_at.astimezone(ZoneInfo('Asia/Shanghai')),
|
||||
tr=i.processed_data.user_info.data.user.league.rating,
|
||||
)
|
||||
for i in historical_data
|
||||
if isinstance(i.processed_data, ProcessedData)
|
||||
and i.processed_data.user_info is not None
|
||||
and isinstance(i.processed_data.user_info.data.user.league, RatedLeague)
|
||||
]
|
||||
|
||||
# 按照时间排序
|
||||
histories = sorted(histories, key=lambda x: x.record_at)
|
||||
for index, value in enumerate(histories):
|
||||
# 在历史记录里找有没有今天0点后的数据
|
||||
if value.record_at > today:
|
||||
histories = histories[:index] + [
|
||||
get_specified_point(histories[index - 1], histories[index], today.replace(microsecond=1000))
|
||||
]
|
||||
break
|
||||
else:
|
||||
histories.append(
|
||||
get_specified_point(
|
||||
histories[-1],
|
||||
TETRIOInfo.TetraLeagueHistory.Data(
|
||||
record_at=user_info.cache.cached_at, tr=user_info.data.user.league.rating
|
||||
),
|
||||
today.replace(microsecond=1000),
|
||||
)
|
||||
)
|
||||
if histories[0].record_at < (today - forward):
|
||||
histories[0] = get_specified_point(
|
||||
histories[0],
|
||||
histories[1],
|
||||
today - forward,
|
||||
)
|
||||
else:
|
||||
histories.insert(0, TETRIOInfo.TetraLeagueHistory.Data(record_at=today - forward, tr=histories[0].tr))
|
||||
return histories
|
||||
|
||||
@staticmethod
|
||||
async def make_query_image(
|
||||
user: User, user_info: InfoSuccess, sprint: SoloRecord | None, blitz: SoloRecord | None
|
||||
) -> bytes:
|
||||
league = user_info.data.user.league
|
||||
if not isinstance(league, RatedLeague) or league.vs is None:
|
||||
raise TypeError
|
||||
user_name = user_info.data.user.username.upper()
|
||||
histories = await Processor.query_historical_data(user, user_info)
|
||||
value_max, value_min = get_value_bounds([i.tr for i in histories])
|
||||
split_value, offset = get_split(value_max, value_min)
|
||||
if sprint is not None:
|
||||
duration = timedelta(milliseconds=sprint.endcontext.final_time).total_seconds()
|
||||
sprint_value = f'{duration:.1f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.1f}s' # noqa: PLR2004
|
||||
else:
|
||||
sprint_value = 'N/A'
|
||||
blitz_value = f'{blitz.endcontext.score:,}' if blitz is not None else 'N/A'
|
||||
async with HostPage(
|
||||
await render(
|
||||
'tetrio/info',
|
||||
TETRIOInfo(
|
||||
user=TETRIOInfo.User(
|
||||
avatar=f'https://tetr.io/user-content/avatars/{user_info.data.user.id}.jpg?rv={user_info.data.user.avatar_revision}'
|
||||
if user_info.data.user.avatar_revision is not None
|
||||
else TETRIOInfo.User.Avatar(
|
||||
type='identicon',
|
||||
hash=md5(user_info.data.user.id.encode()).hexdigest(), # noqa: S324
|
||||
),
|
||||
name=user_name,
|
||||
bio=user_info.data.user.bio,
|
||||
),
|
||||
ranking=TETRIOInfo.Ranking(
|
||||
rating=round(league.glicko, 2),
|
||||
rd=round(league.rd, 2),
|
||||
),
|
||||
tetra_league=TETRIOInfo.TetraLeague(
|
||||
rank=league.rank,
|
||||
tr=round(league.rating, 2),
|
||||
global_rank=league.standing,
|
||||
pps=league.pps,
|
||||
lpm=round(lpm := (league.pps * 24), 2),
|
||||
apm=league.apm,
|
||||
apl=round(league.apm / lpm, 2),
|
||||
vs=league.vs,
|
||||
adpm=round(adpm := (league.vs * 0.6), 2),
|
||||
adpl=round(adpm / lpm, 2),
|
||||
),
|
||||
tetra_league_history=TETRIOInfo.TetraLeagueHistory(
|
||||
data=histories,
|
||||
split_interval=split_value,
|
||||
min_tr=value_min,
|
||||
max_tr=value_max,
|
||||
offset=offset,
|
||||
),
|
||||
radar=TETRIOInfo.Radar(
|
||||
app=(app := (league.apm / (60 * league.pps))),
|
||||
dsps=(dsps := ((league.vs / 100) - (league.apm / 60))),
|
||||
dspp=(dspp := (dsps / league.pps)),
|
||||
ci=150 * dspp - 125 * app + 50 * (league.vs / league.apm) - 25,
|
||||
ge=2 * ((app * dsps) / league.pps),
|
||||
),
|
||||
sprint=sprint_value,
|
||||
blitz=blitz_value,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||
|
||||
async def get_user(self) -> None:
|
||||
"""
|
||||
用于获取 UserName 和 UserID 的函数
|
||||
"""
|
||||
if self.user.name is None:
|
||||
self.user.name = (await self.get_user_info()).data.user.username
|
||||
if self.user.ID is None:
|
||||
self.user.ID = (await self.get_user_info()).data.user.id
|
||||
|
||||
async def get_user_info(self) -> InfoSuccess:
|
||||
"""获取用户数据"""
|
||||
if self.processed_data.user_info is None:
|
||||
self.raw_response.user_info = await Cache.get(
|
||||
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}'])
|
||||
)
|
||||
user_info: UserInfo = type_validate_json(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
|
||||
if isinstance(user_info, FailedModel):
|
||||
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
|
||||
self.processed_data.user_info = user_info
|
||||
return self.processed_data.user_info
|
||||
|
||||
async def get_user_records(self) -> RecordsSuccess:
|
||||
"""获取Solo数据"""
|
||||
if self.processed_data.user_records is None:
|
||||
self.raw_response.user_records = await Cache.get(
|
||||
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}/', 'records'])
|
||||
)
|
||||
user_records: UserRecords = type_validate_json(UserRecords, self.raw_response.user_records) # type: ignore[arg-type]
|
||||
if isinstance(user_records, FailedModel):
|
||||
raise RequestError(f'用户Solo数据请求错误:\n{user_records.error}')
|
||||
self.processed_data.user_records = user_records
|
||||
return self.processed_data.user_records
|
||||
|
||||
|
||||
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
|
||||
@retry(exception_type=RequestError, delay=timedelta(minutes=15))
|
||||
async def get_io_rank_data() -> None:
|
||||
league_all: LeagueAll = type_validate_json(
|
||||
LeagueAll, # type: ignore[arg-type]
|
||||
(data := await Cache.get(splice_url([BASE_URL, 'users/lists/league/all']))),
|
||||
)
|
||||
if isinstance(league_all, FailedModel):
|
||||
raise RequestError(f'排行榜数据请求错误:\n{league_all.error}')
|
||||
|
||||
def pps(user: LeagueAllUser) -> float:
|
||||
return user.league.pps
|
||||
|
||||
def apm(user: LeagueAllUser) -> float:
|
||||
return user.league.apm
|
||||
|
||||
def vs(user: LeagueAllUser) -> float:
|
||||
return user.league.vs
|
||||
|
||||
def _min(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
|
||||
return min(users, key=field)
|
||||
|
||||
def _max(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
|
||||
return max(users, key=field)
|
||||
|
||||
def build_extremes_data(
|
||||
users: list[LeagueAllUser],
|
||||
field: Callable[[LeagueAllUser], float],
|
||||
sort: Callable[[list[LeagueAllUser], Callable[[LeagueAllUser], float]], LeagueAllUser],
|
||||
) -> tuple[dict[str, str], float]:
|
||||
user = sort(users, field)
|
||||
return User(ID=user.id, name=user.username).dict(), field(user)
|
||||
|
||||
data_hash: str | None = await run_sync((await run_sync(sha512)(data)).hexdigest)()
|
||||
async with open(get_data_file('nonebot_plugin_tetris_stats', f'{data_hash}.json.zst'), mode='wb') as file:
|
||||
await file.write(await run_sync(ZstdCompressor(level=12, threads=-1).compress)(data))
|
||||
|
||||
users = [i for i in league_all.data.users if isinstance(i, LeagueAllUser)]
|
||||
rank_to_users: defaultdict[Rank, list[LeagueAllUser]] = defaultdict(list)
|
||||
for i in users:
|
||||
rank_to_users[i.league.rank].append(i)
|
||||
rank_info: list[IORank] = []
|
||||
for rank, percentile in RANK_PERCENTILE.items():
|
||||
offset = floor((percentile / 100) * len(users)) - 1
|
||||
tr_line = users[offset].league.rating
|
||||
rank_users = rank_to_users[rank]
|
||||
rank_info.append(
|
||||
IORank(
|
||||
rank=rank,
|
||||
tr_line=tr_line,
|
||||
player_count=len(rank_users),
|
||||
low_pps=(build_extremes_data(rank_users, pps, _min)),
|
||||
low_apm=(build_extremes_data(rank_users, apm, _min)),
|
||||
low_vs=(build_extremes_data(rank_users, vs, _min)),
|
||||
avg_pps=mean({i.league.pps for i in rank_users}),
|
||||
avg_apm=mean({i.league.apm for i in rank_users}),
|
||||
avg_vs=mean({i.league.vs for i in rank_users}),
|
||||
high_pps=(build_extremes_data(rank_users, pps, _max)),
|
||||
high_apm=(build_extremes_data(rank_users, apm, _max)),
|
||||
high_vs=(build_extremes_data(rank_users, vs, _max)),
|
||||
update_time=league_all.cache.cached_at,
|
||||
file_hash=data_hash,
|
||||
)
|
||||
)
|
||||
async with get_session() as session:
|
||||
session.add_all(rank_info)
|
||||
await session.commit()
|
||||
|
||||
|
||||
@driver.on_startup
|
||||
async def _() -> None:
|
||||
async with get_session() as session:
|
||||
latest_time = await session.scalar(select(IORank.update_time).order_by(IORank.id.desc()).limit(1))
|
||||
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
|
||||
await get_io_rank_data()
|
||||
@@ -1,20 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Cache(BaseModel):
|
||||
status: str
|
||||
cached_at: datetime
|
||||
cached_until: datetime
|
||||
|
||||
|
||||
class SuccessModel(BaseModel):
|
||||
success: Literal[True]
|
||||
cache: Cache
|
||||
|
||||
|
||||
class FailedModel(BaseModel):
|
||||
success: Literal[False]
|
||||
error: str
|
||||
@@ -1,59 +0,0 @@
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..typing import Rank
|
||||
from .base import FailedModel
|
||||
from .base import SuccessModel as BaseSuccessModel
|
||||
|
||||
|
||||
class _User(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
username: str
|
||||
role: str
|
||||
xp: float
|
||||
supporter: bool
|
||||
verified: bool
|
||||
country: str | None = None
|
||||
|
||||
|
||||
class SuccessModel(BaseSuccessModel):
|
||||
class Data(BaseModel):
|
||||
class ValidUser(_User):
|
||||
class League(BaseModel):
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
rating: float
|
||||
glicko: float
|
||||
rd: float
|
||||
rank: Rank
|
||||
bestrank: Rank
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
decaying: bool
|
||||
|
||||
league: League
|
||||
|
||||
class InvalidUser(_User):
|
||||
class League(BaseModel):
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
rating: float
|
||||
glicko: float | None = None
|
||||
rd: float | None = None
|
||||
rank: Rank
|
||||
bestrank: Rank
|
||||
apm: float | None = None
|
||||
pps: float | None = None
|
||||
vs: float | None = None
|
||||
decaying: bool
|
||||
|
||||
league: League
|
||||
|
||||
users: list[ValidUser | InvalidUser]
|
||||
|
||||
data: Data
|
||||
|
||||
|
||||
LeagueAll = SuccessModel | FailedModel
|
||||
ValidUser = SuccessModel.Data.ValidUser
|
||||
InvalidUser = SuccessModel.Data.InvalidUser
|
||||
@@ -1,21 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from ... import ProcessedData as ProcessedDataMeta
|
||||
from ... import RawResponse as RawResponseMeta
|
||||
from ..constant import GAME_TYPE
|
||||
from .user_info import SuccessModel as InfoSuccess
|
||||
from .user_records import SuccessModel as RecordsSuccess
|
||||
|
||||
|
||||
class RawResponse(RawResponseMeta):
|
||||
platform: Literal['IO'] = GAME_TYPE
|
||||
|
||||
user_info: bytes | None = None
|
||||
user_records: bytes | None = None
|
||||
|
||||
|
||||
class ProcessedData(ProcessedDataMeta):
|
||||
platform: Literal['IO'] = GAME_TYPE
|
||||
|
||||
user_info: InfoSuccess | None = None
|
||||
user_records: RecordsSuccess | None = None
|
||||
@@ -1,17 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from ...schemas import BaseUser
|
||||
from ..constant import GAME_TYPE
|
||||
|
||||
|
||||
class User(BaseUser):
|
||||
platform: Literal['IO'] = GAME_TYPE
|
||||
|
||||
ID: str | None = None
|
||||
name: str | None = None
|
||||
|
||||
@property
|
||||
def unique_identifier(self) -> str:
|
||||
if self.ID is None:
|
||||
raise ValueError('不完整的User!')
|
||||
return self.ID
|
||||
@@ -1,126 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..typing import Rank
|
||||
from .base import FailedModel
|
||||
from .base import SuccessModel as BaseSuccessModel
|
||||
|
||||
|
||||
class SuccessModel(BaseSuccessModel):
|
||||
class Data(BaseModel):
|
||||
class User(BaseModel):
|
||||
class Badge(BaseModel):
|
||||
id: str
|
||||
label: str
|
||||
group: str | None = None
|
||||
ts: datetime | Literal[False] | None = None
|
||||
|
||||
class NeverPlayedLeague(BaseModel):
|
||||
gamesplayed: Literal[0]
|
||||
gameswon: Literal[0]
|
||||
rating: Literal[-1]
|
||||
rank: Literal['z']
|
||||
standing: Literal[-1]
|
||||
standing_local: Literal[-1]
|
||||
next_rank: None
|
||||
prev_rank: None
|
||||
next_at: Literal[-1]
|
||||
prev_at: Literal[-1]
|
||||
percentile: Literal[-1]
|
||||
percentile_rank: Literal['z']
|
||||
apm: None = Field(None)
|
||||
pps: None = Field(None)
|
||||
vs: None = Field(None)
|
||||
decaying: bool
|
||||
|
||||
class NeverRatedLeague(BaseModel):
|
||||
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
gameswon: int
|
||||
rating: Literal[-1]
|
||||
rank: Literal['z']
|
||||
standing: Literal[-1]
|
||||
standing_local: Literal[-1]
|
||||
next_rank: None
|
||||
prev_rank: None
|
||||
next_at: Literal[-1]
|
||||
prev_at: Literal[-1]
|
||||
percentile: Literal[-1]
|
||||
percentile_rank: Literal['z']
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
decaying: bool
|
||||
|
||||
class RatedLeague(BaseModel):
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
rating: float
|
||||
rank: Rank
|
||||
bestrank: Rank
|
||||
standing: int
|
||||
standing_local: int
|
||||
next_rank: Rank | None = None
|
||||
prev_rank: Rank | None = None
|
||||
next_at: int
|
||||
prev_at: int
|
||||
percentile: float
|
||||
percentile_rank: str
|
||||
glicko: float
|
||||
rd: float
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float | None = None
|
||||
decaying: bool
|
||||
|
||||
class Connections(BaseModel):
|
||||
class Discord(BaseModel):
|
||||
id: str
|
||||
username: str
|
||||
|
||||
discord: Discord | None = None
|
||||
|
||||
class Distinguishment(BaseModel):
|
||||
type: str
|
||||
|
||||
id: str = Field(..., alias='_id')
|
||||
username: str
|
||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
|
||||
ts: datetime | None = None
|
||||
botmaster: str | None = None
|
||||
badges: list[Badge]
|
||||
xp: float
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
gametime: float
|
||||
country: str | None = None
|
||||
badstanding: bool | None = None
|
||||
supporter: bool | None = None # osk说是必有, 但实际上不是 fk osk
|
||||
supporter_tier: int
|
||||
verified: bool
|
||||
league: NeverPlayedLeague | NeverRatedLeague | RatedLeague
|
||||
avatar_revision: int | None = None
|
||||
"""This user's avatar ID. Get their avatar at
|
||||
|
||||
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
|
||||
banner_revision: int | None = None
|
||||
"""This user's banner ID. Get their banner at
|
||||
|
||||
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
|
||||
|
||||
Ignore this field if the user is not a supporter."""
|
||||
bio: str | None = None
|
||||
connections: Connections
|
||||
friend_count: int | None = None
|
||||
distinguishment: Distinguishment | None = None
|
||||
|
||||
user: User
|
||||
|
||||
data: Data
|
||||
|
||||
|
||||
NeverPlayedLeague = SuccessModel.Data.User.NeverPlayedLeague
|
||||
NeverRatedLeague = SuccessModel.Data.User.NeverRatedLeague
|
||||
RatedLeague = SuccessModel.Data.User.RatedLeague
|
||||
UserInfo = SuccessModel | FailedModel
|
||||
@@ -1,118 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .base import FailedModel
|
||||
from .base import SuccessModel as BaseSuccessModel
|
||||
|
||||
|
||||
class EndContext(BaseModel):
|
||||
class Time(BaseModel):
|
||||
start: int
|
||||
zero: bool
|
||||
locked: bool
|
||||
prev: int
|
||||
frameoffset: int | None = None
|
||||
|
||||
class Clears(BaseModel):
|
||||
singles: int
|
||||
doubles: int
|
||||
triples: int
|
||||
quads: int
|
||||
pentas: int | None = None
|
||||
realtspins: int
|
||||
minitspins: int
|
||||
minitspinsingles: int
|
||||
tspinsingles: int
|
||||
minitspindoubles: int
|
||||
tspindoubles: int
|
||||
tspintriples: int
|
||||
tspinquads: int
|
||||
allclear: int
|
||||
|
||||
class Garbage(BaseModel):
|
||||
sent: int
|
||||
received: int
|
||||
attack: int | None = None
|
||||
cleared: int
|
||||
|
||||
class Finesse(BaseModel):
|
||||
combo: int
|
||||
faults: int
|
||||
perfectpieces: int
|
||||
|
||||
seed: int
|
||||
lines: int
|
||||
level_lines: int
|
||||
level_lines_needed: int
|
||||
inputs: int
|
||||
holds: int | None = None
|
||||
time: Time
|
||||
score: int
|
||||
zenlevel: int | None = None
|
||||
zenprogress: int | None = None
|
||||
level: int
|
||||
combo: int
|
||||
currentcombopower: int | None = None # WTF
|
||||
topcombo: int
|
||||
btb: int
|
||||
topbtb: int
|
||||
currentbtbchainpower: int | None = None # WTF * 2
|
||||
tspins: int
|
||||
piecesplaced: int
|
||||
clears: Clears
|
||||
garbage: Garbage
|
||||
kills: int
|
||||
finesse: Finesse
|
||||
final_time: float = Field(..., alias='finalTime')
|
||||
gametype: str
|
||||
|
||||
|
||||
class _User(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
username: str
|
||||
|
||||
|
||||
class _Record(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
stream: str
|
||||
replayid: str
|
||||
user: _User
|
||||
ts: datetime
|
||||
ismulti: bool | None = None
|
||||
|
||||
|
||||
class BaseModeRecord(BaseModel):
|
||||
class SoloRecord(_Record):
|
||||
endcontext: EndContext
|
||||
|
||||
class MultiRecord(_Record):
|
||||
endcontext: list[EndContext]
|
||||
|
||||
record: SoloRecord | MultiRecord | None = None
|
||||
rank: int | None = None
|
||||
|
||||
|
||||
class SuccessModel(BaseSuccessModel):
|
||||
class Data(BaseModel):
|
||||
class Records(BaseModel):
|
||||
class Sprint(BaseModeRecord): ...
|
||||
|
||||
class Blitz(BaseModeRecord): ...
|
||||
|
||||
sprint: Sprint = Field(..., alias='40l')
|
||||
blitz: Blitz
|
||||
|
||||
class Zen(BaseModel):
|
||||
level: int
|
||||
score: int
|
||||
|
||||
records: Records
|
||||
zen: Zen
|
||||
|
||||
data: Data
|
||||
|
||||
|
||||
SoloRecord = BaseModeRecord.SoloRecord
|
||||
MultiRecord = BaseModeRecord.MultiRecord
|
||||
UserRecords = SuccessModel | FailedModel
|
||||
@@ -1,22 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
Rank = Literal[
|
||||
'x',
|
||||
'u',
|
||||
'ss',
|
||||
's+',
|
||||
's',
|
||||
's-',
|
||||
'a+',
|
||||
'a',
|
||||
'a-',
|
||||
'b+',
|
||||
'b',
|
||||
'b-',
|
||||
'c+',
|
||||
'c',
|
||||
'c-',
|
||||
'd+',
|
||||
'd',
|
||||
'z', # 未定级
|
||||
]
|
||||
@@ -1,31 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..utils.typing import GameType
|
||||
|
||||
|
||||
class Base(BaseModel):
|
||||
platform: GameType
|
||||
|
||||
|
||||
class BaseUser(ABC, Base):
|
||||
"""游戏用户"""
|
||||
|
||||
def __eq__(self, __value: object) -> bool:
|
||||
if isinstance(__value, BaseUser):
|
||||
return self.unique_identifier == __value.unique_identifier
|
||||
return False
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def unique_identifier(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BaseRawResponse(Base):
|
||||
"""原始请求数据"""
|
||||
|
||||
|
||||
class BaseProcessedData(Base):
|
||||
"""处理/验证后的数据"""
|
||||
@@ -1,134 +0,0 @@
|
||||
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||
from nonebot.adapters import Bot, Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import At, on_alconna
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import BotUserInfo, EventUserInfo, UserInfo # type: ignore[import-untyped]
|
||||
|
||||
from ...db import query_bind_info
|
||||
from ...utils.exception import HandleNotFinishedError, NeedCatchError
|
||||
from ...utils.platform import get_platform
|
||||
from ...utils.typing import Me
|
||||
from .. import add_default_handlers
|
||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||
from .constant import GAME_TYPE
|
||||
from .processor import Processor, User, identify_user_info
|
||||
|
||||
alc = on_alconna(
|
||||
Alconna(
|
||||
'top',
|
||||
Option(
|
||||
BIND_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info,
|
||||
notice='TOP 用户名',
|
||||
flags=[ArgFlag.HIDDEN],
|
||||
)
|
||||
),
|
||||
alias=BIND_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='bind',
|
||||
help_text='绑定 TOP 账号',
|
||||
),
|
||||
Option(
|
||||
QUERY_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 | 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info | Me | At,
|
||||
notice='TOP 用户名',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
),
|
||||
alias=QUERY_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='query',
|
||||
help_text='查询 TOP 游戏信息',
|
||||
),
|
||||
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||
meta=CommandMeta(
|
||||
description='查询 TetrisOnline波兰服 的信息',
|
||||
example='top绑定scdhh\ntop查我',
|
||||
compact=True,
|
||||
fuzzy_match=True,
|
||||
),
|
||||
),
|
||||
skip_for_unmatch=False,
|
||||
auto_send_output=True,
|
||||
aliases={'TOP'},
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('bind')
|
||||
async def _( # noqa: PLR0913
|
||||
bot: Bot,
|
||||
event: Event,
|
||||
matcher: Matcher,
|
||||
account: User,
|
||||
bot_info: UserInfo = BotUserInfo(), # noqa: B008
|
||||
user_info: UserInfo = EventUserInfo(), # noqa: B008
|
||||
):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=account,
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (
|
||||
await proc.handle_bind(
|
||||
platform=get_platform(bot), account=event.get_user_id(), bot_info=bot_info, user_info=user_info
|
||||
)
|
||||
).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
chat_platform=get_platform(bot),
|
||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(name=bind.game_account),
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (UniMessage(message) + await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(event: Event, matcher: Matcher, account: User):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=account,
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
add_default_handlers(alc)
|
||||
@@ -1,4 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
GAME_TYPE: Literal['TOP'] = 'TOP'
|
||||
BASE_URL = 'http://tetrisonline.pl/top/'
|
||||
@@ -1,165 +0,0 @@
|
||||
from contextlib import suppress
|
||||
from dataclasses import dataclass
|
||||
from io import StringIO
|
||||
from re import match
|
||||
from typing import Literal
|
||||
from urllib.parse import urlencode, urlunparse
|
||||
|
||||
from lxml import etree
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import UserInfo # type: ignore[import-untyped]
|
||||
from pandas import read_html
|
||||
from typing_extensions import override
|
||||
|
||||
from ...db import BindStatus, create_or_update_bind
|
||||
from ...utils.avatar import get_avatar
|
||||
from ...utils.exception import MessageFormatError, RequestError
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.render import Bind, render
|
||||
from ...utils.request import Request, splice_url
|
||||
from ...utils.screenshot import screenshot
|
||||
from .. import Processor as ProcessorMeta
|
||||
from ..schemas import BaseUser
|
||||
from .constant import BASE_URL, GAME_TYPE
|
||||
from .schemas.response import ProcessedData, RawResponse
|
||||
|
||||
|
||||
class User(BaseUser):
|
||||
platform: Literal['TOP'] = GAME_TYPE
|
||||
|
||||
name: str
|
||||
|
||||
@property
|
||||
@override
|
||||
def unique_identifier(self) -> str:
|
||||
return self.name
|
||||
|
||||
|
||||
@dataclass
|
||||
class Data:
|
||||
lpm: float
|
||||
apm: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class GameData:
|
||||
day: Data | None
|
||||
total: Data | None
|
||||
|
||||
|
||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
||||
if match(r'^[a-zA-Z0-9_]{1,16}$', info):
|
||||
return User(name=info)
|
||||
return MessageFormatError('用户名不合法')
|
||||
|
||||
|
||||
class Processor(ProcessorMeta):
|
||||
user: User
|
||||
raw_response: RawResponse
|
||||
processed_data: ProcessedData
|
||||
|
||||
@override
|
||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
||||
super().__init__(event_id, user, command_args)
|
||||
self.raw_response = RawResponse()
|
||||
self.processed_data = ProcessedData()
|
||||
|
||||
@property
|
||||
@override
|
||||
def game_platform(self) -> Literal['TOP']:
|
||||
return GAME_TYPE
|
||||
|
||||
@override
|
||||
async def handle_bind(self, platform: str, account: str, bot_info: UserInfo, user_info: UserInfo) -> UniMessage:
|
||||
"""处理绑定消息"""
|
||||
self.command_type = 'bind'
|
||||
await self.check_user()
|
||||
async with get_session() as session:
|
||||
bind_status = await create_or_update_bind(
|
||||
session=session,
|
||||
chat_platform=platform,
|
||||
chat_account=account,
|
||||
game_platform=GAME_TYPE,
|
||||
game_account=self.user.name,
|
||||
)
|
||||
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||
async with HostPage(
|
||||
await render(
|
||||
'binding',
|
||||
Bind(
|
||||
platform=self.game_platform,
|
||||
status='unknown',
|
||||
user=Bind.People(
|
||||
avatar=await get_avatar(user_info, 'Data URI', None),
|
||||
name=(await self.get_user_name()).upper(),
|
||||
),
|
||||
bot=Bind.People(
|
||||
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||
name=bot_info.user_name,
|
||||
),
|
||||
command='top查我',
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
message = UniMessage.image(
|
||||
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||
)
|
||||
return message
|
||||
|
||||
@override
|
||||
async def handle_query(self) -> UniMessage:
|
||||
"""处理查询消息"""
|
||||
self.command_type = 'query'
|
||||
await self.check_user()
|
||||
game_data = await self.get_game_data()
|
||||
message = ''
|
||||
if game_data.day is not None:
|
||||
message += f'用户 {self.user.name} 24小时内统计数据为: '
|
||||
message += f"\nL'PM: {round(game_data.day.lpm,2)} ( {round(game_data.day.lpm/24,2)} pps )"
|
||||
message += f'\nAPM: {round(game_data.day.apm,2)} ( x{round(game_data.day.apm/game_data.day.lpm,2)} )'
|
||||
else:
|
||||
message += f'用户 {self.user.name} 暂无24小时内统计数据'
|
||||
if game_data.total is not None:
|
||||
message += '\n历史统计数据为: '
|
||||
message += f"\nL'PM: {round(game_data.total.lpm,2)} ( {round(game_data.total.lpm/24,2)} pps )"
|
||||
message += f'\nAPM: {round(game_data.total.apm,2)} ( x{round(game_data.total.apm/game_data.total.lpm,2)} )'
|
||||
else:
|
||||
message += '\n暂无历史统计数据'
|
||||
return UniMessage(message)
|
||||
|
||||
async def get_user_profile(self) -> str:
|
||||
"""获取用户信息"""
|
||||
if self.processed_data.user_profile is None:
|
||||
url = splice_url([BASE_URL, 'profile.php', f'?{urlencode({"user":self.user.name})}'])
|
||||
self.raw_response.user_profile = await Request.request(url, is_json=False)
|
||||
self.processed_data.user_profile = self.raw_response.user_profile.decode()
|
||||
return self.processed_data.user_profile
|
||||
|
||||
async def check_user(self) -> None:
|
||||
if 'user not found!' in await self.get_user_profile():
|
||||
raise RequestError('用户不存在!')
|
||||
|
||||
async def get_user_name(self) -> str:
|
||||
"""获取用户名"""
|
||||
data = etree.HTML(await self.get_user_profile()).xpath('//div[@class="mycontent"]/h1/text()')
|
||||
return data[0].replace("'s profile", '')
|
||||
|
||||
async def get_game_data(self) -> GameData:
|
||||
"""获取游戏统计数据"""
|
||||
html = etree.HTML(await self.get_user_profile())
|
||||
day = None
|
||||
with suppress(ValueError):
|
||||
day = Data(
|
||||
lpm=float(str(html.xpath('//div[@class="mycontent"]/text()[3]')[0]).replace('lpm:', '').strip()),
|
||||
apm=float(str(html.xpath('//div[@class="mycontent"]/text()[4]')[0]).replace('apm:', '').strip()),
|
||||
)
|
||||
table = StringIO(
|
||||
etree.tostring(
|
||||
html.xpath('//div[@class="mycontent"]/table[@class="mytable"]')[0],
|
||||
encoding='utf-8',
|
||||
).decode()
|
||||
)
|
||||
dataframe = read_html(table, encoding='utf-8', header=0)[0]
|
||||
total = Data(lpm=dataframe['lpm'].mean(), apm=dataframe['apm'].mean()) if len(dataframe) != 0 else None
|
||||
return GameData(day=day, total=total)
|
||||
@@ -1,16 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from ...schemas import BaseProcessedData, BaseRawResponse
|
||||
from ..constant import GAME_TYPE
|
||||
|
||||
|
||||
class RawResponse(BaseRawResponse):
|
||||
platform: Literal['TOP'] = GAME_TYPE
|
||||
|
||||
user_profile: bytes | None = None
|
||||
|
||||
|
||||
class ProcessedData(BaseProcessedData):
|
||||
platform: Literal['TOP'] = GAME_TYPE
|
||||
|
||||
user_profile: str | None = None
|
||||
@@ -1,197 +0,0 @@
|
||||
from typing import NoReturn
|
||||
|
||||
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||
from nonebot.adapters import Bot, Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import At, on_alconna
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import BotUserInfo, EventUserInfo, UserInfo # type: ignore[import-untyped]
|
||||
|
||||
from ...db import query_bind_info
|
||||
from ...utils.exception import HandleNotFinishedError, NeedCatchError, RequestError
|
||||
from ...utils.platform import get_platform
|
||||
from ...utils.typing import Me
|
||||
from .. import add_default_handlers
|
||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||
from .constant import GAME_TYPE
|
||||
from .processor import Processor, User, identify_user_info
|
||||
|
||||
alc = on_alconna(
|
||||
Alconna(
|
||||
'茶服',
|
||||
Option(
|
||||
BIND_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info,
|
||||
notice='茶服 用户名 / TeaID',
|
||||
flags=[ArgFlag.HIDDEN],
|
||||
)
|
||||
),
|
||||
alias=BIND_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='bind',
|
||||
help_text='绑定 茶服 账号',
|
||||
),
|
||||
Option(
|
||||
QUERY_COMMAND[0],
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 | 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
identify_user_info,
|
||||
notice='茶服 用户名 / TeaID',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
# 如果放在一个 Union Args 里, 验证顺序不能保证, 可能出错
|
||||
),
|
||||
alias=QUERY_COMMAND[1:],
|
||||
compact=True,
|
||||
dest='query',
|
||||
help_text='查询 茶服 游戏信息',
|
||||
),
|
||||
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||
meta=CommandMeta(
|
||||
description='查询 TetrisOnline茶服 的信息',
|
||||
example='茶服查我',
|
||||
compact=True,
|
||||
fuzzy_match=True,
|
||||
),
|
||||
),
|
||||
skip_for_unmatch=False,
|
||||
auto_send_output=True,
|
||||
aliases={'tos', 'TOS'},
|
||||
)
|
||||
|
||||
|
||||
async def finish_special_query(matcher: Matcher, proc: Processor) -> NoReturn:
|
||||
try:
|
||||
await (await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
if isinstance(e, RequestError) and '未找到此用户' in e.message:
|
||||
matcher.skip()
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
try:
|
||||
from nonebot.adapters.onebot.v11 import GROUP as OB11GROUP
|
||||
from nonebot.adapters.onebot.v11 import Bot as OB11Bot
|
||||
from nonebot.adapters.onebot.v11 import MessageEvent as OB11MessageEvent
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(bot: OB11Bot, event: OB11MessageEvent, matcher: Matcher, target: At | Me):
|
||||
if event.is_tome() and await OB11GROUP(bot, event):
|
||||
await matcher.finish('不能查询bot的信息')
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(teaid=f'onebot-{target.target}' if isinstance(target, At) else f'onebot-{event.get_user_id()}'),
|
||||
command_args=[],
|
||||
)
|
||||
await finish_special_query(matcher, proc)
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from nonebot.adapters.kaiheila.event import MessageEvent as KookMessageEvent
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(event: KookMessageEvent, matcher: Matcher, target: At | Me):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(teaid=f'kook-{target.target}' if isinstance(target, At) else f'kook-{event.get_user_id()}'),
|
||||
command_args=[],
|
||||
)
|
||||
await finish_special_query(matcher, proc)
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from nonebot.adapters.discord import MessageEvent as DiscordMessageEvent
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(event: DiscordMessageEvent, matcher: Matcher, target: At | Me):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(teaid=f'discord-{target.target}' if isinstance(target, At) else f'discord-{event.get_user_id()}'),
|
||||
command_args=[],
|
||||
)
|
||||
await finish_special_query(matcher, proc)
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
@alc.assign('bind')
|
||||
async def _( # noqa: PLR0913
|
||||
bot: Bot,
|
||||
event: Event,
|
||||
matcher: Matcher,
|
||||
account: User,
|
||||
bot_info: UserInfo = BotUserInfo(), # noqa: B008
|
||||
user_info: UserInfo = EventUserInfo(), # noqa: B008
|
||||
):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=account,
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (
|
||||
await proc.handle_bind(
|
||||
platform=get_platform(bot), account=event.get_user_id(), bot_info=bot_info, nb_user_info=user_info
|
||||
)
|
||||
).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
chat_platform=get_platform(bot),
|
||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=User(teaid=bind.game_account),
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (UniMessage(message) + await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
@alc.assign('query')
|
||||
async def _(event: Event, matcher: Matcher, account: User):
|
||||
proc = Processor(
|
||||
event_id=id(event),
|
||||
user=account,
|
||||
command_args=[],
|
||||
)
|
||||
try:
|
||||
await (await proc.handle_query()).finish()
|
||||
except NeedCatchError as e:
|
||||
await matcher.send(str(e))
|
||||
raise HandleNotFinishedError from e
|
||||
|
||||
|
||||
add_default_handlers(alc)
|
||||
@@ -1,10 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
GAME_TYPE: Literal['TOS'] = 'TOS'
|
||||
BASE_URL = {
|
||||
'https://teatube.cn:8888/',
|
||||
'http://cafuuchino1.studio26f.org:19970',
|
||||
'http://cafuuchino2.studio26f.org:19970',
|
||||
'http://cafuuchino3.studio26f.org:19970',
|
||||
'http://cafuuchino4.studio26f.org:19970',
|
||||
}
|
||||
@@ -1,257 +0,0 @@
|
||||
from dataclasses import dataclass
|
||||
from re import match
|
||||
from typing import Literal
|
||||
from urllib.parse import urlencode, urlunparse
|
||||
|
||||
from httpx import TimeoutException
|
||||
from nonebot.compat import type_validate_json
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_userinfo import UserInfo as NBUserInfo # type: ignore[import-untyped]
|
||||
from typing_extensions import override
|
||||
|
||||
from ...db import BindStatus, create_or_update_bind
|
||||
from ...utils.avatar import get_avatar
|
||||
from ...utils.exception import MessageFormatError, RequestError
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.render import Bind, render
|
||||
from ...utils.request import Request, splice_url
|
||||
from ...utils.screenshot import screenshot
|
||||
from .. import Processor as ProcessorMeta
|
||||
from ..schemas import BaseUser
|
||||
from .constant import BASE_URL, GAME_TYPE
|
||||
from .schemas.response import ProcessedData, RawResponse
|
||||
from .schemas.user_info import SuccessModel as InfoSuccess
|
||||
from .schemas.user_info import UserInfo
|
||||
from .schemas.user_profile import UserProfile
|
||||
|
||||
|
||||
class User(BaseUser):
|
||||
platform: Literal['TOS'] = GAME_TYPE
|
||||
|
||||
teaid: str | None = None
|
||||
name: str | None = None
|
||||
|
||||
@property
|
||||
@override
|
||||
def unique_identifier(self) -> str:
|
||||
if self.teaid is None:
|
||||
raise ValueError('不完整的User!')
|
||||
return self.teaid
|
||||
|
||||
|
||||
@dataclass
|
||||
class GameData:
|
||||
num: int
|
||||
pps: float
|
||||
lpm: float
|
||||
apm: float
|
||||
adpm: float
|
||||
apl: float
|
||||
adpl: float
|
||||
vs: float
|
||||
|
||||
|
||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
||||
if (
|
||||
match(
|
||||
r'^(?!\.)(?!com[0-9]$)(?!con$)(?!lpt[0-9]$)(?!nul$)(?!prn$)[^\-][^\+][^\|\*\?\\\s\!:<>/$"]*[^\.\|\*\?\\\s\!:<>/$"]+$',
|
||||
info,
|
||||
)
|
||||
and info.isdigit() is False
|
||||
and 2 <= len(info) <= 18 # noqa: PLR2004
|
||||
):
|
||||
return User(name=info)
|
||||
if info.startswith(('onebot-', 'qqguild-', 'kook-', 'discord-')) and info.split('-', maxsplit=1)[1].isdigit():
|
||||
return User(teaid=info)
|
||||
return MessageFormatError('用户名/QQ号不合法')
|
||||
|
||||
|
||||
class Processor(ProcessorMeta):
|
||||
user: User
|
||||
raw_response: RawResponse
|
||||
processed_data: ProcessedData
|
||||
|
||||
@override
|
||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
||||
super().__init__(event_id, user, command_args)
|
||||
self.raw_response = RawResponse(user_profile={})
|
||||
self.processed_data = ProcessedData(user_profile={})
|
||||
|
||||
@property
|
||||
@override
|
||||
def game_platform(self) -> Literal['TOS']:
|
||||
return GAME_TYPE
|
||||
|
||||
@override
|
||||
async def handle_bind(
|
||||
self, platform: str, account: str, bot_info: NBUserInfo, nb_user_info: NBUserInfo
|
||||
) -> UniMessage:
|
||||
"""处理绑定消息"""
|
||||
self.command_type = 'bind'
|
||||
await self.get_user()
|
||||
async with get_session() as session:
|
||||
bind_status = await create_or_update_bind(
|
||||
session=session,
|
||||
chat_platform=platform,
|
||||
chat_account=account,
|
||||
game_platform=GAME_TYPE,
|
||||
game_account=self.user.unique_identifier,
|
||||
)
|
||||
user_info = await self.get_user_info()
|
||||
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||
async with HostPage(
|
||||
await render(
|
||||
'binding',
|
||||
Bind(
|
||||
platform=self.game_platform,
|
||||
status='unknown',
|
||||
user=Bind.People(
|
||||
avatar=await get_avatar(nb_user_info, 'Data URI', None), name=user_info.data.name
|
||||
),
|
||||
bot=Bind.People(
|
||||
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||
name=bot_info.user_name,
|
||||
),
|
||||
command='茶服查我',
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
message = UniMessage.image(
|
||||
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||
)
|
||||
return message
|
||||
|
||||
@override
|
||||
async def handle_query(self) -> UniMessage:
|
||||
"""处理查询消息"""
|
||||
self.command_type = 'query'
|
||||
await self.get_user()
|
||||
user_info = (await self.get_user_info()).data
|
||||
message = f'用户 {user_info.name} ({user_info.teaid}) '
|
||||
if user_info.ranked_games == '0':
|
||||
message += '暂无段位统计数据'
|
||||
else:
|
||||
message += f', 段位分 {round(float(user_info.rating_now),2)}±{round(float(user_info.rd_now),2)} ({round(float(user_info.vol_now),2)}) '
|
||||
game_data = await self.get_game_data()
|
||||
if game_data is None:
|
||||
message += ', 暂无游戏数据'
|
||||
else:
|
||||
message += f', 最近 {game_data.num} 局数据'
|
||||
message += f"\nL'PM: {game_data.lpm} ( {game_data.pps} pps )"
|
||||
message += f'\nAPM: {game_data.apm} ( x{game_data.apl} )'
|
||||
message += f'\nADPM: {game_data.adpm} ( x{game_data.adpl} ) ( {game_data.vs}vs )'
|
||||
message += f'\n40L: {float(user_info.pb_sprint)/1000:.2f}s' if user_info.pb_sprint != '2147483647' else ''
|
||||
message += f'\nMarathon: {user_info.pb_marathon}' if user_info.pb_marathon != '0' else ''
|
||||
message += f'\nChallenge: {user_info.pb_challenge}' if user_info.pb_challenge != '0' else ''
|
||||
return UniMessage(message)
|
||||
|
||||
async def get_user(self) -> None:
|
||||
"""
|
||||
用于获取 UserName 和 UserID 的函数
|
||||
"""
|
||||
if self.user.name is None:
|
||||
self.user.name = (await self.get_user_info()).data.name
|
||||
if self.user.teaid is None:
|
||||
self.user.teaid = (await self.get_user_info()).data.teaid
|
||||
|
||||
async def get_user_info(self) -> InfoSuccess:
|
||||
"""获取用户信息"""
|
||||
if self.processed_data.user_info is None:
|
||||
if self.user.teaid is not None:
|
||||
url = [
|
||||
splice_url(
|
||||
[
|
||||
i,
|
||||
'getTeaIdInfo',
|
||||
f'?{urlencode({"teaId":self.user.teaid})}',
|
||||
]
|
||||
)
|
||||
for i in BASE_URL
|
||||
]
|
||||
else:
|
||||
url = [
|
||||
splice_url(
|
||||
[
|
||||
i,
|
||||
'getUsernameInfo',
|
||||
f'?{urlencode({"username":self.user.name})}',
|
||||
]
|
||||
)
|
||||
for i in BASE_URL
|
||||
]
|
||||
self.raw_response.user_info = await Request.failover_request(
|
||||
url, failover_code=[502], failover_exc=(TimeoutException,)
|
||||
)
|
||||
user_info: UserInfo = type_validate_json(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
|
||||
if not isinstance(user_info, InfoSuccess):
|
||||
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
|
||||
self.processed_data.user_info = user_info
|
||||
return self.processed_data.user_info
|
||||
|
||||
async def get_user_profile(self, other_parameter: dict[str, str | bytes] | None = None) -> UserProfile:
|
||||
"""获取用户数据"""
|
||||
if other_parameter is None:
|
||||
other_parameter = {}
|
||||
params = urlencode(dict(sorted(other_parameter.items())))
|
||||
if self.processed_data.user_profile.get(params) is None:
|
||||
self.raw_response.user_profile[params] = await Request.failover_request(
|
||||
[
|
||||
splice_url(
|
||||
[
|
||||
i,
|
||||
'getProfile',
|
||||
f'?{urlencode({"id":self.user.teaid or self.user.name,**other_parameter})}',
|
||||
]
|
||||
)
|
||||
for i in BASE_URL
|
||||
],
|
||||
failover_code=[502],
|
||||
failover_exc=(TimeoutException,),
|
||||
)
|
||||
self.processed_data.user_profile[params] = type_validate_json(
|
||||
UserProfile, self.raw_response.user_profile[params]
|
||||
)
|
||||
return self.processed_data.user_profile[params]
|
||||
|
||||
async def get_game_data(self) -> GameData | None:
|
||||
"""获取游戏数据"""
|
||||
user_profile = await self.get_user_profile()
|
||||
if user_profile.data == []:
|
||||
return None
|
||||
weighted_total_lpm = weighted_total_apm = weighted_total_adpm = 0.0
|
||||
total_time = 0.0
|
||||
num = 0
|
||||
for i in user_profile.data:
|
||||
# 排除单人局和时间为0的游戏
|
||||
# 茶: 不计算没挖掘的局, 即使apm和lpm也如此
|
||||
if i.num_players == 1 or i.time == 0 or i.dig is None:
|
||||
continue
|
||||
# 加权计算
|
||||
time = i.time / 1000
|
||||
lpm = 24 * (i.pieces / time)
|
||||
apm = (i.attack / time) * 60
|
||||
adpm = ((i.attack + i.dig) / time) * 60
|
||||
weighted_total_lpm += lpm * time
|
||||
weighted_total_apm += apm * time
|
||||
weighted_total_adpm += adpm * time
|
||||
total_time += time
|
||||
num += 1
|
||||
if num == 50: # noqa: PLR2004 # TODO: 将查询局数作为可选命令参数
|
||||
break
|
||||
if num == 0:
|
||||
return None
|
||||
# TODO: 如果有效局数不满50, 没有无dig信息的局, 且userData['data']内有50个局, 则继续往前获取信息
|
||||
lpm = weighted_total_lpm / total_time
|
||||
apm = weighted_total_apm / total_time
|
||||
adpm = weighted_total_adpm / total_time
|
||||
return GameData(
|
||||
num=num,
|
||||
pps=round(lpm / 24, 2),
|
||||
lpm=round(lpm, 2),
|
||||
apm=round(apm, 2),
|
||||
adpm=round(adpm, 2),
|
||||
apl=round((apm / lpm), 2),
|
||||
adpl=round((adpm / lpm), 2),
|
||||
vs=round((adpm / 60 * 100), 2),
|
||||
)
|
||||
@@ -1,20 +0,0 @@
|
||||
from typing import Literal
|
||||
|
||||
from ...schemas import BaseProcessedData, BaseRawResponse
|
||||
from ..constant import GAME_TYPE
|
||||
from .user_info import SuccessModel as InfoSuccess
|
||||
from .user_profile import UserProfile
|
||||
|
||||
|
||||
class RawResponse(BaseRawResponse):
|
||||
platform: Literal['TOS'] = GAME_TYPE
|
||||
|
||||
user_profile: dict[str, bytes]
|
||||
user_info: bytes | None = None
|
||||
|
||||
|
||||
class ProcessedData(BaseProcessedData):
|
||||
platform: Literal['TOS'] = GAME_TYPE
|
||||
|
||||
user_profile: dict[str, UserProfile]
|
||||
user_info: InfoSuccess | None = None
|
||||
@@ -1,86 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SuccessModel(BaseModel):
|
||||
class Data(BaseModel):
|
||||
class PeriodMatch(BaseModel):
|
||||
name: str
|
||||
teaid: str = Field(..., alias='teaId')
|
||||
rating: str
|
||||
rd: str
|
||||
start_time: datetime = Field(..., alias='startTime')
|
||||
end_time: datetime = Field(..., alias='endTime')
|
||||
win: str
|
||||
lose: str
|
||||
score: str
|
||||
|
||||
class UserDataTotalItem(BaseModel):
|
||||
time_map: str = Field(..., alias='timeMap')
|
||||
pieces_map: str = Field(..., alias='piecesMap')
|
||||
clear_lines_map: str = Field(..., alias='clearLinesMap')
|
||||
attacks_map: str = Field(..., alias='attacksMap')
|
||||
dig_map: str = Field(..., alias='digMap')
|
||||
send_map: str = Field(..., alias='sendMap')
|
||||
rise_map: str = Field(..., alias='riseMap')
|
||||
offset_map: str = Field(..., alias='offsetMap')
|
||||
receive_map: str = Field(..., alias='receiveMap')
|
||||
games_map: str = Field(..., alias='gamesMap')
|
||||
tetris_map: str = Field(..., alias='tetrisMap')
|
||||
combo_map: str = Field(..., alias='comboMap')
|
||||
tspin_map: str = Field(..., alias='tspinMap')
|
||||
b2b_map: str = Field(..., alias='b2bMap')
|
||||
perfect_clear_map: str = Field(..., alias='perfectClearMap')
|
||||
time_no_map: str = Field(..., alias='timeNoMap')
|
||||
pieces_no_map: str = Field(..., alias='piecesNoMap')
|
||||
clear_lines_no_map: str = Field(..., alias='clearLinesNoMap')
|
||||
attacks_no_map: str = Field(..., alias='attacksNoMap')
|
||||
dig_no_map: str = Field(..., alias='digNoMap')
|
||||
send_no_map: str = Field(..., alias='sendNoMap')
|
||||
rise_no_map: str = Field(..., alias='riseNoMap')
|
||||
offset_no_map: str = Field(..., alias='offsetNoMap')
|
||||
receive_no_map: str = Field(..., alias='receiveNoMap')
|
||||
games_no_map: str = Field(..., alias='gamesNoMap')
|
||||
tetris_no_map: str = Field(..., alias='tetrisNoMap')
|
||||
combo_no_map: str = Field(..., alias='comboNoMap')
|
||||
tspin_no_map: str = Field(..., alias='tspinNoMap')
|
||||
b2b_no_map: str = Field(..., alias='b2bNoMap')
|
||||
perfect_clear_no_map: str = Field(..., alias='perfectClearNoMap')
|
||||
|
||||
teaid: str = Field(..., alias='teaId')
|
||||
name: str
|
||||
total_exp: str = Field(..., alias='totalExp')
|
||||
ranking: str
|
||||
ranked_games: str = Field(..., alias='rankedGames')
|
||||
rating_now: str = Field(..., alias='ratingNow')
|
||||
rd_now: str = Field(..., alias='rdNow')
|
||||
vol_now: str = Field(..., alias='volNow')
|
||||
rating_last: str = Field(..., alias='ratingLast')
|
||||
rd_last: str = Field(..., alias='rdLast')
|
||||
vol_last: str = Field(..., alias='volLast')
|
||||
period_matches: list[PeriodMatch] = Field(..., alias='periodMatches')
|
||||
user_data_total: list[UserDataTotalItem] = Field(..., alias='userDataTotal')
|
||||
ranking_items: str = Field(..., alias='rankingItems')
|
||||
ranking_game_items: str = Field(..., alias='rankingGameItems')
|
||||
training_level: str = Field(..., alias='trainingLevel')
|
||||
training_wins: str = Field(..., alias='trainingWins')
|
||||
pb_sprint: str = Field(..., alias='PBSprint')
|
||||
pb_marathon: str = Field(..., alias='PBMarathon')
|
||||
pb_challenge: str = Field(..., alias='PBChallenge')
|
||||
register_date: datetime = Field(..., alias='registerDate')
|
||||
last_login_date: datetime = Field(..., alias='lastLoginDate')
|
||||
|
||||
code: int
|
||||
success: Literal[True]
|
||||
data: Data
|
||||
|
||||
|
||||
class FailedModel(BaseModel):
|
||||
code: int
|
||||
success: Literal[False]
|
||||
error: str
|
||||
|
||||
|
||||
UserInfo = SuccessModel | FailedModel
|
||||
@@ -1,33 +0,0 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class UserProfile(BaseModel):
|
||||
class Data(BaseModel):
|
||||
idmultiplayergameresult: int
|
||||
iduser: str
|
||||
teaid: str
|
||||
time: int
|
||||
clear_lines: int
|
||||
attack: int
|
||||
send: int
|
||||
offset: int
|
||||
receive: int
|
||||
rise: int
|
||||
dig: int
|
||||
pieces: int
|
||||
max_combo: int
|
||||
pc_count: int
|
||||
place: int
|
||||
num_players: int
|
||||
fumen_code: Literal['0', '1'] # wtf
|
||||
rule_set: str
|
||||
garbage: str
|
||||
idmultiplayergame: int
|
||||
datetime: datetime
|
||||
|
||||
code: int
|
||||
success: bool
|
||||
data: list[Data]
|
||||
56
nonebot_plugin_tetris_stats/games/__init__.py
Normal file
56
nonebot_plugin_tetris_stats/games/__init__.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from collections.abc import Callable
|
||||
|
||||
from nonebot.adapters import Bot
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.message import run_postprocessor
|
||||
from nonebot.typing import T_Handler
|
||||
from nonebot_plugin_alconna import AlcMatches, Alconna, At, CommandMeta, on_alconna
|
||||
|
||||
from .. import ns
|
||||
from ..i18n.model import Lang
|
||||
from ..utils.exception import MessageFormatError, NeedCatchError
|
||||
|
||||
command: Alconna = Alconna(
|
||||
['tetris-stats', 'tstats'],
|
||||
namespace=ns,
|
||||
meta=CommandMeta(
|
||||
description='俄罗斯方块相关游戏数据查询',
|
||||
fuzzy_match=True,
|
||||
),
|
||||
)
|
||||
|
||||
alc = on_alconna(
|
||||
command=command,
|
||||
skip_for_unmatch=False,
|
||||
auto_send_output=True,
|
||||
use_origin=True,
|
||||
)
|
||||
|
||||
|
||||
def add_block_handlers(handler: Callable[[T_Handler], T_Handler]) -> None:
|
||||
@handler
|
||||
async def _(bot: Bot, matcher: Matcher, target: At):
|
||||
if isinstance(target, At) and target.target == bot.self_id:
|
||||
await matcher.finish(Lang.interaction.wrong.query_bot())
|
||||
|
||||
|
||||
from . import tetrio, top, tos # noqa: F401, E402
|
||||
|
||||
|
||||
@alc.handle()
|
||||
async def _(matcher: Matcher, account: MessageFormatError):
|
||||
await matcher.finish(str(account))
|
||||
|
||||
|
||||
@alc.handle()
|
||||
async def _(matcher: Matcher, matches: AlcMatches):
|
||||
if (matches.head_matched and matches.options != {}) or matches.main_args == {}:
|
||||
await matcher.finish(
|
||||
(f'{matches.error_info!r}\n' if matches.error_info is not None else '')
|
||||
+ f'输入"{matches.header_result} --help"查看帮助'
|
||||
)
|
||||
|
||||
|
||||
@run_postprocessor
|
||||
async def _(matcher: Matcher, exception: NeedCatchError):
|
||||
await matcher.send(str(exception))
|
||||
24
nonebot_plugin_tetris_stats/games/schemas.py
Normal file
24
nonebot_plugin_tetris_stats/games/schemas.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..utils.typing import GameType
|
||||
|
||||
T = TypeVar('T', bound=GameType)
|
||||
|
||||
|
||||
class BaseUser(BaseModel, ABC, Generic[T]):
|
||||
"""游戏用户"""
|
||||
|
||||
platform: T
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, BaseUser):
|
||||
return self.unique_identifier == other.unique_identifier
|
||||
return False
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def unique_identifier(self) -> str:
|
||||
raise NotImplementedError
|
||||
39
nonebot_plugin_tetris_stats/games/tetrio/__init__.py
Normal file
39
nonebot_plugin_tetris_stats/games/tetrio/__init__.py
Normal file
@@ -0,0 +1,39 @@
|
||||
from nonebot_plugin_alconna import Subcommand
|
||||
|
||||
from ...utils.exception import MessageFormatError
|
||||
from .. import alc
|
||||
from .. import command as main_command
|
||||
from .api import Player
|
||||
from .constant import USER_ID, USER_NAME
|
||||
|
||||
|
||||
def get_player(user_id_or_name: str) -> Player | MessageFormatError:
|
||||
if USER_ID.match(user_id_or_name):
|
||||
return Player(user_id=user_id_or_name, trust=True)
|
||||
if USER_NAME.match(user_id_or_name):
|
||||
return Player(user_name=user_id_or_name, trust=True)
|
||||
return MessageFormatError('用户名/ID不合法')
|
||||
|
||||
|
||||
command = Subcommand(
|
||||
'TETR.IO',
|
||||
alias=['TETRIO', 'tetr.io', 'tetrio', 'io'],
|
||||
dest='TETRIO',
|
||||
help_text='TETR.IO 游戏相关指令',
|
||||
)
|
||||
|
||||
|
||||
from . import bind, config, list, query, rank, record, unbind # noqa: A004, E402
|
||||
|
||||
main_command.add(command)
|
||||
|
||||
__all__ = [
|
||||
'alc',
|
||||
'bind',
|
||||
'config',
|
||||
'list',
|
||||
'query',
|
||||
'rank',
|
||||
'record',
|
||||
'unbind',
|
||||
]
|
||||
5
nonebot_plugin_tetris_stats/games/tetrio/api/__init__.py
Normal file
5
nonebot_plugin_tetris_stats/games/tetrio/api/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from .player import Player
|
||||
from .schemas.user import User
|
||||
from .schemas.user_info import UserInfoSuccess
|
||||
|
||||
__all__ = ['Player', 'User', 'UserInfoSuccess']
|
||||
42
nonebot_plugin_tetris_stats/games/tetrio/api/cache.py
Normal file
42
nonebot_plugin_tetris_stats/games/tetrio/api/cache.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from asyncio import Lock
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import ClassVar
|
||||
from weakref import WeakValueDictionary
|
||||
|
||||
from aiocache import Cache as ACache # type: ignore[import-untyped]
|
||||
from nonebot.compat import type_validate_json
|
||||
from nonebot.log import logger
|
||||
from yarl import URL
|
||||
|
||||
from ....config.config import config
|
||||
from ....utils.limit import limit
|
||||
from ....utils.request import Request
|
||||
from .schemas.base import FailedModel, SuccessModel
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
request = Request(config.tetris.proxy.tetrio or config.tetris.proxy.main)
|
||||
request.request = limit(timedelta(seconds=1))(request.request) # type: ignore[method-assign]
|
||||
|
||||
|
||||
class Cache:
|
||||
cache = ACache(ACache.MEMORY)
|
||||
task: ClassVar[WeakValueDictionary[URL, Lock]] = WeakValueDictionary()
|
||||
|
||||
@classmethod
|
||||
async def get(cls, url: URL, extra_headers: dict | None = None) -> bytes:
|
||||
lock = cls.task.setdefault(url, Lock())
|
||||
async with lock:
|
||||
if (cached_data := await cls.cache.get(url)) is not None:
|
||||
logger.debug(f'{url}: Cache hit!')
|
||||
return cached_data
|
||||
response_data = await request.request(url, extra_headers, enable_anti_cloudflare=True)
|
||||
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
|
||||
if isinstance(parsed_data, SuccessModel):
|
||||
await cls.cache.add(
|
||||
url,
|
||||
response_data,
|
||||
(parsed_data.cache.cached_until - datetime.now(UTC)).total_seconds(),
|
||||
)
|
||||
return response_data
|
||||
96
nonebot_plugin_tetris_stats/games/tetrio/api/leaderboards.py
Normal file
96
nonebot_plugin_tetris_stats/games/tetrio/api/leaderboards.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from typing import Literal, overload
|
||||
from uuid import UUID
|
||||
|
||||
from nonebot import __version__ as __nonebot_version__
|
||||
from nonebot.compat import type_validate_json
|
||||
from yarl import URL
|
||||
|
||||
from ....utils.exception import RequestError
|
||||
from ....version import __version__
|
||||
from ..constant import BASE_URL
|
||||
from .cache import Cache
|
||||
from .schemas.base import FailedModel
|
||||
from .schemas.leaderboards import Parameter
|
||||
from .schemas.leaderboards.by import By, BySuccessModel
|
||||
from .schemas.leaderboards.solo import Solo, SoloSuccessModel
|
||||
from .schemas.leaderboards.zenith import Zenith, ZenithSuccessModel
|
||||
|
||||
|
||||
async def by(
|
||||
by_type: Literal['league', 'xp', 'ar'], parameter: Parameter, x_session_id: UUID | None = None
|
||||
) -> BySuccessModel:
|
||||
model: By = type_validate_json(
|
||||
By, # type: ignore[arg-type]
|
||||
await get(
|
||||
BASE_URL / f'users/by/{by_type}',
|
||||
parameter,
|
||||
{
|
||||
'X-Session-ID': str(x_session_id),
|
||||
'User-Agent': f'nonebot-plugin-tetris-stats/{__version__} (Windows NT 10.0; Win64; x64) NoneBot2/{__nonebot_version__}',
|
||||
}
|
||||
if x_session_id is not None
|
||||
else None,
|
||||
),
|
||||
)
|
||||
if isinstance(model, FailedModel):
|
||||
msg = f'排行榜信息请求错误:\n{model.error}'
|
||||
raise RequestError(msg)
|
||||
return model
|
||||
|
||||
|
||||
@overload
|
||||
async def records(
|
||||
records_type: Literal['40l', 'blitz'],
|
||||
scope: str = '_global',
|
||||
revolution_id: str | None = None,
|
||||
*,
|
||||
parameter: Parameter,
|
||||
) -> SoloSuccessModel: ...
|
||||
|
||||
|
||||
@overload
|
||||
async def records(
|
||||
records_type: Literal['zenith', 'zenithex'],
|
||||
scope: str = '_global',
|
||||
revolution_id: str | None = None,
|
||||
*,
|
||||
parameter: Parameter,
|
||||
) -> ZenithSuccessModel: ...
|
||||
|
||||
|
||||
async def records(
|
||||
records_type: Literal['40l', 'blitz', 'zenith', 'zenithex'],
|
||||
scope: str = '_global',
|
||||
revolution_id: str | None = None,
|
||||
*,
|
||||
parameter: Parameter,
|
||||
) -> SoloSuccessModel | ZenithSuccessModel:
|
||||
model: Solo | Zenith
|
||||
match records_type:
|
||||
case '40l' | 'blitz':
|
||||
model = type_validate_json(
|
||||
Solo, # type: ignore[arg-type]
|
||||
await get(
|
||||
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
|
||||
parameter,
|
||||
),
|
||||
)
|
||||
case 'zenith' | 'zenithex':
|
||||
model = type_validate_json(
|
||||
Zenith, # type: ignore[arg-type]
|
||||
await get(
|
||||
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
|
||||
parameter,
|
||||
),
|
||||
)
|
||||
case _:
|
||||
msg = f'records_type: {records_type} is not supported'
|
||||
raise ValueError(msg)
|
||||
if isinstance(model, FailedModel):
|
||||
msg = f'排行榜信息请求错误:\n{model.error}' # type: ignore[attr-defined]
|
||||
raise RequestError(msg)
|
||||
return model
|
||||
|
||||
|
||||
async def get(url: URL, parameter: Parameter, extra_headers: dict | None = None) -> bytes:
|
||||
return await Cache.get(url % parameter.to_params(), extra_headers)
|
||||
18
nonebot_plugin_tetris_stats/games/tetrio/api/models.py
Normal file
18
nonebot_plugin_tetris_stats/games/tetrio/api/models.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from nonebot_plugin_orm import Model
|
||||
from sqlalchemy import DateTime, String
|
||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||
|
||||
from ....db.models import PydanticType
|
||||
from .schemas.base import SuccessModel
|
||||
from .typing import Records, Summaries
|
||||
|
||||
|
||||
class TETRIOHistoricalData(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
|
||||
api_type: Mapped[Literal['User Info', Records, Summaries]] = mapped_column(String(32), index=True)
|
||||
data: Mapped[SuccessModel] = mapped_column(PydanticType(get_model=[SuccessModel.__subclasses__], models=set()))
|
||||
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||
243
nonebot_plugin_tetris_stats/games/tetrio/api/player.py
Normal file
243
nonebot_plugin_tetris_stats/games/tetrio/api/player.py
Normal file
@@ -0,0 +1,243 @@
|
||||
from enum import Enum
|
||||
from types import MappingProxyType
|
||||
from typing import Literal, NamedTuple, cast, overload
|
||||
|
||||
from async_lru import alru_cache
|
||||
from nonebot.compat import type_validate_json
|
||||
|
||||
from ....db import anti_duplicate_add
|
||||
from ....utils.exception import RequestError
|
||||
from ..constant import BASE_URL, USER_ID, USER_NAME
|
||||
from .cache import Cache
|
||||
from .models import TETRIOHistoricalData
|
||||
from .schemas.base import FailedModel
|
||||
from .schemas.labs.leagueflow import LeagueFlow, LeagueFlowSuccess
|
||||
from .schemas.records.solo import Solo as SoloRecord
|
||||
from .schemas.records.solo import SoloSuccessModel as RecordsSoloSuccessModel
|
||||
from .schemas.summaries import (
|
||||
AchievementsSuccessModel,
|
||||
SummariesModel,
|
||||
ZenithSuccessModel,
|
||||
ZenSuccessModel,
|
||||
)
|
||||
from .schemas.summaries import (
|
||||
SoloSuccessModel as SummariesSoloSuccessModel,
|
||||
)
|
||||
from .schemas.summaries.base import User as SummariesUser
|
||||
from .schemas.summaries.league import LeagueSuccessModel
|
||||
from .schemas.user import User
|
||||
from .schemas.user_info import UserInfo, UserInfoSuccess
|
||||
from .typing import Records, Summaries
|
||||
|
||||
|
||||
class RecordModeType(str, Enum):
|
||||
Sprint = '40l'
|
||||
Blitz = 'blitz'
|
||||
|
||||
|
||||
class RecordType(str, Enum):
|
||||
Top = 'top'
|
||||
Recent = 'recent'
|
||||
Progression = 'progression'
|
||||
|
||||
|
||||
class RecordKey(NamedTuple):
|
||||
mode_type: RecordModeType
|
||||
record_type: RecordType
|
||||
|
||||
def to_records(self) -> Records:
|
||||
return cast(Records, f'{self.mode_type.value}_{self.record_type.value}')
|
||||
|
||||
|
||||
class Player:
|
||||
__SUMMARIES_MAPPING: MappingProxyType[Summaries, type[SummariesModel]] = MappingProxyType(
|
||||
{
|
||||
'40l': SummariesSoloSuccessModel,
|
||||
'blitz': SummariesSoloSuccessModel,
|
||||
'zenith': ZenithSuccessModel,
|
||||
'zenithex': ZenithSuccessModel,
|
||||
'league': LeagueSuccessModel,
|
||||
'zen': ZenSuccessModel,
|
||||
'achievements': AchievementsSuccessModel,
|
||||
}
|
||||
)
|
||||
|
||||
@overload
|
||||
def __init__(self, *, user_id: str, trust: bool = False): ...
|
||||
@overload
|
||||
def __init__(self, *, user_name: str, trust: bool = False): ...
|
||||
def __init__(self, *, user_id: str | None = None, user_name: str | None = None, trust: bool = False):
|
||||
self.user_id = user_id
|
||||
self.user_name = user_name
|
||||
if not trust:
|
||||
if self.user_id is not None:
|
||||
if not USER_ID.match(self.user_id):
|
||||
msg = 'Invalid user id'
|
||||
raise ValueError(msg)
|
||||
elif self.user_name is not None:
|
||||
if not USER_NAME.match(self.user_name):
|
||||
msg = 'Invalid user name'
|
||||
raise ValueError(msg)
|
||||
else:
|
||||
msg = 'Invalid user'
|
||||
raise ValueError(msg)
|
||||
self.__user: User | None = None
|
||||
self._user_info: UserInfoSuccess | None = None
|
||||
self._summaries: dict[Summaries, SummariesModel] = {}
|
||||
self._records: dict[RecordKey, RecordsSoloSuccessModel] = {}
|
||||
self._leagueflow: LeagueFlowSuccess | None = None
|
||||
|
||||
@property
|
||||
def _request_user_parameter(self) -> str:
|
||||
return self.user_id or cast(str, self.user_name).lower()
|
||||
|
||||
@property
|
||||
async def user(self) -> User:
|
||||
if self.__user is not None:
|
||||
return self.__user
|
||||
if (user := (await self._get_local_summaries_user())) is not None:
|
||||
self.__user = User(
|
||||
ID=user.id,
|
||||
name=user.username,
|
||||
)
|
||||
else:
|
||||
user_info = await self.get_info()
|
||||
self.__user = User(
|
||||
ID=user_info.data.id,
|
||||
name=user_info.data.username,
|
||||
)
|
||||
self.user_id = self.__user.ID
|
||||
self.user_name = self.__user.name
|
||||
return self.__user
|
||||
|
||||
async def get_info(self) -> UserInfoSuccess:
|
||||
"""Get User Info"""
|
||||
if self._user_info is None:
|
||||
raw_user_info = await Cache.get(BASE_URL / 'users' / self._request_user_parameter)
|
||||
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
||||
if isinstance(user_info, FailedModel):
|
||||
msg = f'用户信息请求错误:\n{user_info.error}'
|
||||
raise RequestError(msg)
|
||||
self._user_info = user_info
|
||||
await anti_duplicate_add(
|
||||
TETRIOHistoricalData(
|
||||
user_unique_identifier=(await self.user).unique_identifier,
|
||||
api_type='User Info',
|
||||
data=user_info,
|
||||
update_time=user_info.cache.cached_at,
|
||||
),
|
||||
)
|
||||
return self._user_info
|
||||
|
||||
@overload
|
||||
async def get_summaries(self, summaries_type: Literal['40l', 'blitz']) -> SummariesSoloSuccessModel: ...
|
||||
@overload
|
||||
async def get_summaries(self, summaries_type: Literal['zenith', 'zenithex']) -> ZenithSuccessModel: ...
|
||||
@overload
|
||||
async def get_summaries(self, summaries_type: Literal['zen']) -> ZenSuccessModel: ...
|
||||
@overload
|
||||
async def get_summaries(self, summaries_type: Literal['league']) -> LeagueSuccessModel: ...
|
||||
@overload
|
||||
async def get_summaries(self, summaries_type: Literal['achievements']) -> AchievementsSuccessModel: ...
|
||||
|
||||
async def get_summaries(self, summaries_type: Summaries) -> SummariesModel:
|
||||
if summaries_type not in self._summaries:
|
||||
raw_summaries = await Cache.get(
|
||||
BASE_URL / 'users' / self._request_user_parameter / 'summaries' / summaries_type
|
||||
)
|
||||
summaries: SummariesModel | FailedModel = type_validate_json(
|
||||
self.__SUMMARIES_MAPPING[summaries_type] | FailedModel, # type: ignore[arg-type]
|
||||
raw_summaries,
|
||||
)
|
||||
if isinstance(summaries, FailedModel):
|
||||
msg = f'用户Summaries数据请求错误:\n{summaries.error}'
|
||||
raise RequestError(msg)
|
||||
self._summaries[summaries_type] = summaries
|
||||
await anti_duplicate_add(
|
||||
TETRIOHistoricalData(
|
||||
user_unique_identifier=(await self.user).unique_identifier,
|
||||
api_type=summaries_type,
|
||||
data=summaries,
|
||||
update_time=summaries.cache.cached_at,
|
||||
),
|
||||
)
|
||||
return self._summaries[summaries_type]
|
||||
|
||||
async def get_leagueflow(self) -> LeagueFlowSuccess:
|
||||
if self._leagueflow is None:
|
||||
leagueflow: LeagueFlow = type_validate_json(
|
||||
LeagueFlow, # type: ignore[arg-type]
|
||||
await Cache.get(BASE_URL / 'labs/leagueflow' / self._request_user_parameter),
|
||||
)
|
||||
if isinstance(leagueflow, FailedModel):
|
||||
msg = f'League 历史记录请求错误:\n{leagueflow.error}'
|
||||
raise RequestError(msg)
|
||||
self._leagueflow = leagueflow
|
||||
return self._leagueflow
|
||||
|
||||
@property
|
||||
async def sprint(self) -> SummariesSoloSuccessModel:
|
||||
return await self.get_summaries('40l')
|
||||
|
||||
@property
|
||||
async def blitz(self) -> SummariesSoloSuccessModel:
|
||||
return await self.get_summaries('blitz')
|
||||
|
||||
@property
|
||||
async def zen(self) -> ZenSuccessModel:
|
||||
return await self.get_summaries('zen')
|
||||
|
||||
@property
|
||||
async def league(self) -> LeagueSuccessModel:
|
||||
return await self.get_summaries('league')
|
||||
|
||||
async def _get_local_summaries_user(self) -> SummariesUser | None:
|
||||
allow_summaries: set[Literal['40l', 'blitz', 'zenith', 'zenithex']] = {
|
||||
'40l',
|
||||
'blitz',
|
||||
'zenith',
|
||||
'zenithex',
|
||||
}
|
||||
if has_summaries := (allow_summaries & self._summaries.keys()):
|
||||
for i in has_summaries:
|
||||
if (record := (await self.get_summaries(i)).data.record) is not None:
|
||||
return record.user
|
||||
return None
|
||||
|
||||
@property
|
||||
@alru_cache
|
||||
async def avatar_revision(self) -> int | None:
|
||||
if self._user_info is not None:
|
||||
return self._user_info.data.avatar_revision
|
||||
if (user := (await self._get_local_summaries_user())) is not None:
|
||||
return user.avatar_revision
|
||||
return (await self.get_info()).data.avatar_revision
|
||||
|
||||
@property
|
||||
@alru_cache
|
||||
async def banner_revision(self) -> int | None:
|
||||
if self._user_info is not None:
|
||||
return self._user_info.data.banner_revision
|
||||
if (user := (await self._get_local_summaries_user())) is not None:
|
||||
return user.banner_revision
|
||||
return (await self.get_info()).data.banner_revision
|
||||
|
||||
async def get_records(self, mode_type: RecordModeType, records_type: RecordType) -> RecordsSoloSuccessModel:
|
||||
if (record_key := RecordKey(mode_type, records_type)) not in self._records:
|
||||
raw_records = await Cache.get(
|
||||
BASE_URL / 'users' / self._request_user_parameter / 'records' / mode_type / records_type,
|
||||
)
|
||||
records: RecordsSoloSuccessModel | FailedModel = type_validate_json(SoloRecord, raw_records) # type: ignore[arg-type]
|
||||
if isinstance(records, FailedModel):
|
||||
msg = f'用户Summaries数据请求错误:\n{records.error}'
|
||||
raise RequestError(msg)
|
||||
self._records[record_key] = records
|
||||
await anti_duplicate_add(
|
||||
TETRIOHistoricalData(
|
||||
user_unique_identifier=(await self.user).unique_identifier,
|
||||
api_type=record_key.to_records(),
|
||||
data=records,
|
||||
update_time=records.cache.cached_at,
|
||||
),
|
||||
)
|
||||
return self._records[record_key]
|
||||
@@ -0,0 +1,83 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...typing import Prisecter
|
||||
|
||||
|
||||
class AggregateStats(BaseModel):
|
||||
apm: float
|
||||
pps: float
|
||||
vsscore: float
|
||||
|
||||
|
||||
class Finesse(BaseModel):
|
||||
combo: int
|
||||
faults: int
|
||||
perfectpieces: int
|
||||
|
||||
|
||||
class Clears(BaseModel):
|
||||
singles: int
|
||||
doubles: int
|
||||
triples: int
|
||||
quads: int
|
||||
realtspins: int
|
||||
minitspins: int
|
||||
minitspinsingles: int
|
||||
tspinsingles: int
|
||||
minitspindoubles: int
|
||||
tspindoubles: int
|
||||
tspintriples: int
|
||||
tspinquads: int
|
||||
allclear: int
|
||||
|
||||
|
||||
class Garbage(BaseModel):
|
||||
sent: int
|
||||
received: int
|
||||
attack: int | None
|
||||
cleared: int
|
||||
|
||||
|
||||
class P(BaseModel):
|
||||
pri: float
|
||||
sec: float
|
||||
ter: float
|
||||
|
||||
def to_prisecter(self) -> Prisecter:
|
||||
return Prisecter(f'{self.pri}:{self.sec}:{self.ter}')
|
||||
|
||||
|
||||
# fmt: off
|
||||
class ArCounts(BaseModel):
|
||||
bronze: int | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
|
||||
silver: int | None = Field(default=None, alias='2') # pyright: ignore [reportGeneralTypeIssues]
|
||||
gold: int | None = Field(default=None, alias='3') # pyright: ignore [reportGeneralTypeIssues]
|
||||
platinum: int | None = Field(default=None, alias='4') # pyright: ignore [reportGeneralTypeIssues]
|
||||
diamond: int | None = Field(default=None, alias='5') # pyright: ignore [reportGeneralTypeIssues]
|
||||
issued: int | None = Field(default=None, alias='100') # pyright: ignore [reportGeneralTypeIssues]
|
||||
top3: int | None = Field(default=None, alias='t3')
|
||||
top5: int | None = Field(default=None, alias='t5')
|
||||
top10: int | None = Field(default=None, alias='t10')
|
||||
top25: int | None = Field(default=None, alias='t25')
|
||||
top50: int | None = Field(default=None, alias='t50')
|
||||
top100: int | None = Field(default=None, alias='t100')
|
||||
# fmt: on
|
||||
|
||||
|
||||
class Cache(BaseModel):
|
||||
status: str
|
||||
cached_at: datetime
|
||||
cached_until: datetime
|
||||
|
||||
|
||||
class SuccessModel(BaseModel):
|
||||
success: Literal[True]
|
||||
cache: Cache
|
||||
|
||||
|
||||
class FailedModel(BaseModel):
|
||||
success: Literal[False]
|
||||
error: str
|
||||
@@ -0,0 +1,65 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base import P
|
||||
from . import AggregateStats, Clears, Finesse, Garbage
|
||||
|
||||
|
||||
class Time(BaseModel):
|
||||
start: int
|
||||
zero: bool
|
||||
locked: bool
|
||||
prev: int
|
||||
frameoffset: int | None = None
|
||||
|
||||
|
||||
class Stats(BaseModel):
|
||||
seed: float | None = None # ?: 不知道是之后都没有了还是还会有
|
||||
lines: int
|
||||
level_lines: int
|
||||
level_lines_needed: int
|
||||
inputs: int
|
||||
holds: int = 0
|
||||
time: Time | None = None # ?: 不知道是之后都没有了还是还会有
|
||||
score: int
|
||||
zenlevel: int | None = None
|
||||
zenprogress: int | None = None
|
||||
level: int
|
||||
combo: int
|
||||
currentcombopower: int | None = None
|
||||
topcombo: int
|
||||
btb: int
|
||||
topbtb: int
|
||||
currentbtbchainpower: int | None = None
|
||||
tspins: int
|
||||
piecesplaced: int
|
||||
clears: Clears
|
||||
garbage: Garbage
|
||||
kills: int
|
||||
finesse: Finesse
|
||||
finaltime: float
|
||||
|
||||
|
||||
class Results(BaseModel):
|
||||
aggregatestats: AggregateStats
|
||||
stats: Stats
|
||||
gameoverreason: str
|
||||
|
||||
|
||||
class Record(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
replayid: str
|
||||
stub: bool
|
||||
gamemode: Literal['40l', 'blitz']
|
||||
pb: bool
|
||||
oncepb: bool
|
||||
ts: datetime
|
||||
revolution: None
|
||||
otherusers: list
|
||||
leaderboards: list[str]
|
||||
results: Results
|
||||
extras: dict
|
||||
disputed: bool
|
||||
p: P
|
||||
@@ -0,0 +1,43 @@
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from typing import Literal, NamedTuple
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base import FailedModel
|
||||
from ..base import SuccessModel as BaseSuccessModel
|
||||
|
||||
|
||||
class Result(IntEnum):
|
||||
VICTORY = 1
|
||||
DEFEAT = 2
|
||||
VICTORY_BY_DISQUALIFICATION = 3
|
||||
DEFEAT_BY_DISQUALIFICATION = 4
|
||||
TIE = 5
|
||||
NO_CONTEST = 6
|
||||
MATCH_NULLIFIED = 7
|
||||
|
||||
|
||||
class Point(NamedTuple):
|
||||
timestamp_offset: int
|
||||
result: Result
|
||||
post_match_tr: int
|
||||
opponent_pre_match_tr: int
|
||||
"""If the opponent was unranked, same as post_match_tr."""
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
start_time: datetime = Field(..., alias='startTime')
|
||||
points: list[Point] = Field(..., min_length=1)
|
||||
|
||||
|
||||
class Empty(BaseModel):
|
||||
start_time: Literal[9007199254740991] = Field(..., alias='startTime')
|
||||
points: list = Field(..., max_length=0)
|
||||
|
||||
|
||||
class LeagueFlowSuccess(BaseSuccessModel):
|
||||
data: Data | Empty
|
||||
|
||||
|
||||
LeagueFlow = LeagueFlowSuccess | FailedModel
|
||||
@@ -0,0 +1,18 @@
|
||||
from typing import Any
|
||||
|
||||
from nonebot.compat import PYDANTIC_V2
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...typing import Prisecter
|
||||
|
||||
|
||||
class Parameter(BaseModel):
|
||||
after: Prisecter | None = None
|
||||
before: Prisecter | None = None
|
||||
limit: int = Field(default=25, ge=1, le=100)
|
||||
country: str | None = None
|
||||
|
||||
def to_params(self) -> dict[str, Any]:
|
||||
if PYDANTIC_V2:
|
||||
return self.model_dump(exclude_defaults=True)
|
||||
return self.dict(exclude_defaults=True)
|
||||
@@ -0,0 +1,50 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...typing import Rank, ValidRank
|
||||
from ..base import ArCounts, FailedModel, P, SuccessModel
|
||||
|
||||
|
||||
class League(BaseModel):
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
tr: float
|
||||
gxe: float
|
||||
rank: Rank
|
||||
bestrank: ValidRank
|
||||
glicko: float
|
||||
rd: float
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
decaying: bool
|
||||
|
||||
|
||||
class Entry(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
username: str
|
||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop']
|
||||
ts: datetime | None = None
|
||||
xp: float
|
||||
country: str | None = None
|
||||
supporter: bool | None = None
|
||||
league: League
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
gametime: float
|
||||
ar: int
|
||||
ar_counts: ArCounts
|
||||
p: P
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
entries: list[Entry]
|
||||
|
||||
|
||||
class BySuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
By = BySuccessModel | FailedModel
|
||||
@@ -0,0 +1,15 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
from ..summaries.solo import Record
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
entries: list[Record]
|
||||
|
||||
|
||||
class SoloSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Solo = SoloSuccessModel | FailedModel
|
||||
@@ -0,0 +1,15 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
from ..summaries.zenith import Record
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
entries: list[Record]
|
||||
|
||||
|
||||
class ZenithSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Zenith = ZenithSuccessModel | FailedModel
|
||||
@@ -0,0 +1,17 @@
|
||||
from typing import TypeAlias
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
from ..base.solo import Record
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
entries: list[Record]
|
||||
|
||||
|
||||
class SoloSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Solo: TypeAlias = SoloSuccessModel | FailedModel
|
||||
@@ -0,0 +1,21 @@
|
||||
from .achievements import Achievements, AchievementsSuccessModel
|
||||
from .league import LeagueSuccessModel
|
||||
from .solo import Solo, SoloSuccessModel
|
||||
from .zen import Zen, ZenSuccessModel
|
||||
from .zenith import Zenith, ZenithEx, ZenithSuccessModel
|
||||
|
||||
SummariesModel = AchievementsSuccessModel | SoloSuccessModel | ZenSuccessModel | LeagueSuccessModel | ZenithSuccessModel
|
||||
|
||||
__all__ = [
|
||||
'Achievements',
|
||||
'AchievementsSuccessModel',
|
||||
'LeagueSuccessModel',
|
||||
'Solo',
|
||||
'SoloSuccessModel',
|
||||
'SummariesModel',
|
||||
'Zen',
|
||||
'ZenSuccessModel',
|
||||
'Zenith',
|
||||
'ZenithEx',
|
||||
'ZenithSuccessModel',
|
||||
]
|
||||
@@ -0,0 +1,29 @@
|
||||
from typing import TypeAlias
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
|
||||
|
||||
class Achievement(BaseModel):
|
||||
# 这**都是些啥
|
||||
k: int
|
||||
o: int
|
||||
rt: int
|
||||
vt: int
|
||||
min: int
|
||||
deci: int
|
||||
name: str
|
||||
object: str
|
||||
category: str
|
||||
hidden: bool
|
||||
desc: str
|
||||
n: str
|
||||
stub: bool
|
||||
|
||||
|
||||
class AchievementsSuccessModel(SuccessModel):
|
||||
data: list[Achievement]
|
||||
|
||||
|
||||
Achievements: TypeAlias = AchievementsSuccessModel | FailedModel
|
||||
@@ -0,0 +1,10 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class User(BaseModel):
|
||||
id: str
|
||||
username: str
|
||||
avatar_revision: int | None
|
||||
banner_revision: int | None
|
||||
country: str | None
|
||||
supporter: int
|
||||
@@ -0,0 +1,130 @@
|
||||
from typing import Literal
|
||||
|
||||
from nonebot.compat import PYDANTIC_V2
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ...typing import Rank, S1Rank, S1ValidRank
|
||||
from ..base import SuccessModel
|
||||
|
||||
if PYDANTIC_V2:
|
||||
from pydantic import field_validator
|
||||
else:
|
||||
from pydantic import validator
|
||||
|
||||
|
||||
class PastInner(BaseModel):
|
||||
season: str
|
||||
username: str
|
||||
country: str | None = None
|
||||
placement: int | None = None
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
glicko: float
|
||||
gxe: float
|
||||
tr: float
|
||||
rd: float
|
||||
rank: S1Rank
|
||||
bestrank: S1ValidRank
|
||||
ranked: bool
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
|
||||
|
||||
class Past(BaseModel):
|
||||
first: PastInner | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
|
||||
|
||||
|
||||
class BaseData(BaseModel):
|
||||
decaying: bool
|
||||
past: Past
|
||||
|
||||
|
||||
class NeverPlayedData(BaseData):
|
||||
gamesplayed: Literal[0]
|
||||
gameswon: Literal[0]
|
||||
glicko: Literal[-1]
|
||||
rd: Literal[-1]
|
||||
gxe: Literal[-1]
|
||||
tr: Literal[-1]
|
||||
rank: Literal['z']
|
||||
apm: None = None
|
||||
pps: None = None
|
||||
vs: None = None
|
||||
standing: Literal[-1]
|
||||
standing_local: Literal[-1]
|
||||
prev_rank: None
|
||||
prev_at: Literal[-1]
|
||||
next_rank: None
|
||||
next_at: Literal[-1]
|
||||
percentile: Literal[-1]
|
||||
percentile_rank: Literal['z']
|
||||
|
||||
|
||||
class NeverRatedData(BaseData):
|
||||
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||
gameswon: int
|
||||
glicko: Literal[-1]
|
||||
rd: Literal[-1]
|
||||
gxe: Literal[-1]
|
||||
tr: Literal[-1]
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
rank: Literal['z']
|
||||
standing: Literal[-1]
|
||||
standing_local: Literal[-1]
|
||||
prev_rank: None
|
||||
prev_at: Literal[-1]
|
||||
next_rank: None
|
||||
next_at: Literal[-1]
|
||||
percentile: Literal[-1]
|
||||
percentile_rank: Literal['z']
|
||||
|
||||
if PYDANTIC_V2:
|
||||
|
||||
@field_validator('apm', 'pps', 'vs', mode='before')
|
||||
@classmethod
|
||||
def _(cls, value: float | None) -> float:
|
||||
if value is None:
|
||||
return 0
|
||||
return value
|
||||
|
||||
else:
|
||||
|
||||
@validator('apm', 'pps', 'vs', pre=True, always=True)
|
||||
@classmethod
|
||||
def _(cls, value: float | None) -> float:
|
||||
if value is None:
|
||||
return 0
|
||||
return value
|
||||
|
||||
|
||||
class RatedData(BaseData):
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
glicko: float
|
||||
rd: float
|
||||
gxe: float
|
||||
tr: float
|
||||
rank: Rank
|
||||
bestrank: Rank
|
||||
standing: int
|
||||
apm: float
|
||||
pps: float
|
||||
vs: float
|
||||
standing_local: int
|
||||
prev_rank: Rank | None = None
|
||||
prev_at: int
|
||||
next_rank: Rank | None = None
|
||||
next_at: int
|
||||
percentile: float
|
||||
percentile_rank: str
|
||||
|
||||
|
||||
class InvalidData(BaseModel):
|
||||
"""I don't know what osk is doing, but the return value is an empty dictionary"""
|
||||
|
||||
|
||||
class LeagueSuccessModel(SuccessModel):
|
||||
data: NeverPlayedData | NeverRatedData | RatedData | InvalidData
|
||||
@@ -0,0 +1,24 @@
|
||||
from typing import TypeAlias
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
from ..base.solo import Record as BaseRecord
|
||||
from .base import User
|
||||
|
||||
|
||||
class Record(BaseRecord):
|
||||
user: User
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
record: Record | None
|
||||
rank: int
|
||||
rank_local: int
|
||||
|
||||
|
||||
class SoloSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Solo: TypeAlias = SoloSuccessModel | FailedModel
|
||||
@@ -0,0 +1,17 @@
|
||||
from typing import TypeAlias
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..base import FailedModel, SuccessModel
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
level: int
|
||||
score: int
|
||||
|
||||
|
||||
class ZenSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Zen: TypeAlias = ZenSuccessModel | FailedModel
|
||||
@@ -0,0 +1,116 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal, TypeAlias
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from ..base import AggregateStats, FailedModel, Finesse, P, SuccessModel
|
||||
from ..base import Clears as BaseClears
|
||||
from ..base import Garbage as BaseGarbage
|
||||
from .base import User
|
||||
|
||||
|
||||
class Clears(BaseClears):
|
||||
pentas: int
|
||||
minitspintriples: int
|
||||
minitspinquads: int
|
||||
tspinpentas: int
|
||||
|
||||
|
||||
class Garbage(BaseGarbage):
|
||||
sent_nomult: int
|
||||
maxspike: int
|
||||
maxspike_nomult: int
|
||||
|
||||
|
||||
class _Zenith(BaseModel):
|
||||
altitude: float
|
||||
rank: float
|
||||
peakrank: float
|
||||
avgrankpts: float
|
||||
floor: int
|
||||
targetingfactor: float
|
||||
targetinggrace: float
|
||||
totalbonus: float
|
||||
revives: int
|
||||
revives_total: int = Field(..., alias='revivesTotal')
|
||||
speedrun: bool
|
||||
speedrun_seen: bool
|
||||
splits: list[int]
|
||||
|
||||
|
||||
class Stats(BaseModel):
|
||||
lines: int
|
||||
level_lines: int
|
||||
level_lines_needed: int
|
||||
inputs: int
|
||||
holds: int
|
||||
score: int
|
||||
zenlevel: int
|
||||
zenprogress: int
|
||||
level: int
|
||||
combo: int
|
||||
topcombo: int
|
||||
combopower: int
|
||||
btb: int
|
||||
topbtb: int
|
||||
btbpower: int
|
||||
tspins: int
|
||||
piecesplaced: int
|
||||
clears: Clears
|
||||
garbage: Garbage
|
||||
kills: int
|
||||
finesse: Finesse
|
||||
zenith: _Zenith
|
||||
finaltime: float
|
||||
|
||||
|
||||
class Results(BaseModel):
|
||||
aggregatestats: AggregateStats
|
||||
stats: Stats
|
||||
gameoverreason: str
|
||||
|
||||
|
||||
class ExtrasZenith(BaseModel):
|
||||
mods: list[str]
|
||||
|
||||
|
||||
class Extras(BaseModel):
|
||||
zenith: ExtrasZenith
|
||||
|
||||
|
||||
class Record(BaseModel):
|
||||
id: str = Field(..., alias='_id')
|
||||
replayid: str
|
||||
stub: bool
|
||||
gamemode: Literal['zenith', 'zenithex']
|
||||
pb: bool
|
||||
oncepb: bool
|
||||
ts: datetime
|
||||
revolution: None
|
||||
user: User
|
||||
otherusers: list
|
||||
leaderboards: list[str]
|
||||
results: Results
|
||||
extras: Extras
|
||||
disputed: bool
|
||||
p: P
|
||||
|
||||
|
||||
class Best(BaseModel):
|
||||
record: None # WTF
|
||||
rank: int
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
record: Record | None
|
||||
rank: int
|
||||
rank_local: int
|
||||
best: Best
|
||||
|
||||
|
||||
class ZenithSuccessModel(SuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
Zenith: TypeAlias = ZenithSuccessModel | FailedModel
|
||||
ZenithEx: TypeAlias = ZenithSuccessModel | FailedModel
|
||||
18
nonebot_plugin_tetris_stats/games/tetrio/api/schemas/user.py
Normal file
18
nonebot_plugin_tetris_stats/games/tetrio/api/schemas/user.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from typing import Literal
|
||||
|
||||
from typing_extensions import override
|
||||
|
||||
from ....schemas import BaseUser
|
||||
from ...constant import GAME_TYPE
|
||||
|
||||
|
||||
class User(BaseUser[Literal['IO']]):
|
||||
platform: Literal['IO'] = GAME_TYPE
|
||||
|
||||
ID: str
|
||||
name: str
|
||||
|
||||
@property
|
||||
@override
|
||||
def unique_identifier(self) -> str:
|
||||
return self.ID
|
||||
@@ -0,0 +1,74 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from .base import ArCounts, FailedModel
|
||||
from .base import SuccessModel as BaseSuccessModel
|
||||
|
||||
|
||||
class Badge(BaseModel):
|
||||
id: str
|
||||
label: str
|
||||
group: str | None = None
|
||||
ts: datetime | Literal[False] | None = None
|
||||
|
||||
|
||||
class Connection(BaseModel):
|
||||
id: str
|
||||
username: str
|
||||
display_username: str
|
||||
|
||||
|
||||
class Connections(BaseModel):
|
||||
discord: Connection | None = None
|
||||
twitch: Connection | None = None
|
||||
twitter: Connection | None = None
|
||||
reddit: Connection | None = None
|
||||
youtube: Connection | None = None
|
||||
steam: Connection | None = None
|
||||
|
||||
|
||||
class Distinguishment(BaseModel):
|
||||
type: str
|
||||
|
||||
|
||||
class Data(BaseModel):
|
||||
id: str = Field(default=..., alias='_id')
|
||||
username: str
|
||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'hidden', 'banned']
|
||||
ts: datetime | None = None
|
||||
botmaster: str | None = None
|
||||
badges: list[Badge]
|
||||
xp: float
|
||||
gamesplayed: int
|
||||
gameswon: int
|
||||
gametime: float
|
||||
country: str | None = None
|
||||
badstanding: bool | None = None
|
||||
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
|
||||
supporter_tier: int
|
||||
avatar_revision: int | None = None
|
||||
"""This user's avatar ID. Get their avatar at
|
||||
|
||||
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
|
||||
banner_revision: int | None = None
|
||||
"""This user's banner ID. Get their banner at
|
||||
|
||||
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
|
||||
|
||||
Ignore this field if the user is not a supporter."""
|
||||
bio: str | None = None
|
||||
connections: Connections
|
||||
friend_count: int | None = None
|
||||
distinguishment: Distinguishment | None = None
|
||||
achievements: list[int]
|
||||
ar: int
|
||||
ar_counts: ArCounts
|
||||
|
||||
|
||||
class UserInfoSuccess(BaseSuccessModel):
|
||||
data: Data
|
||||
|
||||
|
||||
UserInfo = UserInfoSuccess | FailedModel
|
||||
46
nonebot_plugin_tetris_stats/games/tetrio/api/typing.py
Normal file
46
nonebot_plugin_tetris_stats/games/tetrio/api/typing.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from typing import Literal, NewType
|
||||
|
||||
S1ValidRank = Literal[
|
||||
'x',
|
||||
'u',
|
||||
'ss',
|
||||
's+',
|
||||
's',
|
||||
's-',
|
||||
'a+',
|
||||
'a',
|
||||
'a-',
|
||||
'b+',
|
||||
'b',
|
||||
'b-',
|
||||
'c+',
|
||||
'c',
|
||||
'c-',
|
||||
'd+',
|
||||
'd',
|
||||
]
|
||||
S1Rank = S1ValidRank | Literal['z']
|
||||
|
||||
ValidRank = Literal['x+'] | S1ValidRank
|
||||
Rank = ValidRank | Literal['z'] # 未定级
|
||||
|
||||
Summaries = Literal[
|
||||
'40l',
|
||||
'blitz',
|
||||
'zenith',
|
||||
'zenithex',
|
||||
'league',
|
||||
'zen',
|
||||
'achievements',
|
||||
]
|
||||
|
||||
Records = Literal[
|
||||
'40l_top',
|
||||
'40l_recent',
|
||||
'40l_progression',
|
||||
'blitz_top',
|
||||
'blitz_recent',
|
||||
'blitz_progression',
|
||||
]
|
||||
|
||||
Prisecter = NewType('Prisecter', str)
|
||||
86
nonebot_plugin_tetris_stats/games/tetrio/bind.py
Normal file
86
nonebot_plugin_tetris_stats/games/tetrio/bind.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from hashlib import md5
|
||||
|
||||
from arclet.alconna import Arg, ArgFlag
|
||||
from nonebot_plugin_alconna import Args, Subcommand
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import User
|
||||
from nonebot_plugin_userinfo import BotUserInfo, UserInfo
|
||||
from yarl import URL
|
||||
|
||||
from ...db import BindStatus, create_or_update_bind, trigger
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.image import get_avatar
|
||||
from ...utils.render import Bind, render
|
||||
from ...utils.render.schemas.base import Avatar, People
|
||||
from ...utils.screenshot import screenshot
|
||||
from . import alc, command, get_player
|
||||
from .api import Player
|
||||
from .constant import GAME_TYPE
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'bind',
|
||||
Args(
|
||||
Arg(
|
||||
'account',
|
||||
get_player,
|
||||
notice='TETR.IO 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN],
|
||||
)
|
||||
),
|
||||
help_text='绑定 TETR.IO 账号',
|
||||
)
|
||||
)
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:绑定|绑|bind)',
|
||||
command='tstats TETR.IO bind',
|
||||
humanized='io绑定',
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.bind')
|
||||
async def _(nb_user: User, account: Player, event_session: EventSession, bot_info: UserInfo = BotUserInfo()): # noqa: B008
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='bind',
|
||||
command_args=[],
|
||||
):
|
||||
user = await account.user
|
||||
async with get_session() as session:
|
||||
bind_status = await create_or_update_bind(
|
||||
session=session,
|
||||
user=nb_user,
|
||||
game_platform=GAME_TYPE,
|
||||
game_account=user.unique_identifier,
|
||||
)
|
||||
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v1/binding',
|
||||
Bind(
|
||||
platform='TETR.IO',
|
||||
status='unknown',
|
||||
user=People(
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}')
|
||||
% {'revision': avatar_revision}
|
||||
)
|
||||
if (avatar_revision := (await account.avatar_revision)) is not None and avatar_revision != 0
|
||||
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
|
||||
name=user.name.upper(),
|
||||
),
|
||||
bot=People(
|
||||
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||
name=bot_info.user_name,
|
||||
),
|
||||
command='io查我',
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
await UniMessage.image(raw=await screenshot(f'http://{netloc}/host/{page_hash}.html')).finish()
|
||||
51
nonebot_plugin_tetris_stats/games/tetrio/config.py
Normal file
51
nonebot_plugin_tetris_stats/games/tetrio/config.py
Normal file
@@ -0,0 +1,51 @@
|
||||
from arclet.alconna import Arg
|
||||
from nonebot_plugin_alconna import Option, Subcommand
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import async_scoped_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import User
|
||||
from sqlalchemy import select
|
||||
|
||||
from ...db import trigger
|
||||
from . import alc, command
|
||||
from .constant import GAME_TYPE
|
||||
from .models import TETRIOUserConfig
|
||||
from .typing import Template
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'config',
|
||||
Option(
|
||||
'--default-template',
|
||||
Arg('template', Template, notice='模板版本'),
|
||||
alias=['-DT', 'DefaultTemplate'],
|
||||
help_text='设置默认查询模板',
|
||||
),
|
||||
help_text='TETR.IO 查询个性化配置',
|
||||
),
|
||||
)
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:配置|配|config)',
|
||||
command='tstats TETR.IO config',
|
||||
humanized='io配置',
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.config')
|
||||
async def _(user: User, session: async_scoped_session, event_session: EventSession, template: Template):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='config',
|
||||
command_args=[f'--default-template {template}'],
|
||||
):
|
||||
config = (await session.scalars(select(TETRIOUserConfig).where(TETRIOUserConfig.id == user.id))).one_or_none()
|
||||
if config is None:
|
||||
config = TETRIOUserConfig(id=user.id, query_template=template)
|
||||
session.add(config)
|
||||
else:
|
||||
config.query_template = template
|
||||
await session.commit()
|
||||
await UniMessage('配置成功').finish()
|
||||
@@ -1,10 +1,16 @@
|
||||
from re import compile # noqa: A004
|
||||
from typing import Literal
|
||||
|
||||
from .typing import Rank
|
||||
from yarl import URL
|
||||
|
||||
from .api.typing import ValidRank
|
||||
|
||||
GAME_TYPE: Literal['IO'] = 'IO'
|
||||
BASE_URL = 'https://ch.tetr.io/api/'
|
||||
RANK_PERCENTILE: dict[Rank, float] = {
|
||||
|
||||
BASE_URL = URL('https://ch.tetr.io/api/')
|
||||
|
||||
RANK_PERCENTILE: dict[ValidRank, float] = {
|
||||
'x+': 0.2,
|
||||
'x': 1,
|
||||
'u': 5,
|
||||
'ss': 11,
|
||||
@@ -23,5 +29,9 @@ RANK_PERCENTILE: dict[Rank, float] = {
|
||||
'd+': 97.5,
|
||||
'd': 100,
|
||||
}
|
||||
|
||||
TR_MIN = 0
|
||||
TR_MAX = 25000
|
||||
|
||||
USER_ID = compile(r'^[a-f0-9]{24}$')
|
||||
USER_NAME = compile(r'^[a-zA-Z0-9_-]{3,16}$')
|
||||
91
nonebot_plugin_tetris_stats/games/tetrio/list.py
Normal file
91
nonebot_plugin_tetris_stats/games/tetrio/list.py
Normal file
@@ -0,0 +1,91 @@
|
||||
from nonebot_plugin_alconna import Args, Option, Subcommand
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
|
||||
from ...db import trigger
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.metrics import get_metrics
|
||||
from ...utils.render import render
|
||||
from ...utils.render.schemas.tetrio.user.list_v2 import List, TetraLeague, User
|
||||
from ...utils.screenshot import screenshot
|
||||
from .. import alc
|
||||
from . import command
|
||||
from .api.leaderboards import by
|
||||
from .api.schemas.base import P
|
||||
from .api.schemas.leaderboards import Parameter
|
||||
from .constant import GAME_TYPE
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'list',
|
||||
Option('--max-tr', Args['max_tr', float], help_text='TR的上限'),
|
||||
Option('--min-tr', Args['min_tr', float], help_text='TR的下限'),
|
||||
Option('--limit', Args['limit', int], help_text='查询数量'),
|
||||
Option('--country', Args['country', str], help_text='国家代码'),
|
||||
help_text='查询 TETR.IO 段位排行榜',
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.list')
|
||||
async def _(
|
||||
event_session: EventSession,
|
||||
max_tr: float | None = None,
|
||||
min_tr: float | None = None,
|
||||
limit: int | None = None,
|
||||
country: str | None = None,
|
||||
):
|
||||
country = country.upper() if country is not None else None
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='list',
|
||||
command_args=[
|
||||
f'{key} {value}'
|
||||
for key, value in zip(
|
||||
('--max-tr', '--min-tr', '--limit', '--country'), (max_tr, min_tr, limit, country), strict=True
|
||||
)
|
||||
if value is not None
|
||||
],
|
||||
):
|
||||
parameter = Parameter(
|
||||
# ?: 似乎是只需要 pri 至少 league 榜的返回值只有 pri
|
||||
after=P(pri=max_tr, sec=0, ter=0).to_prisecter() if max_tr is not None else None,
|
||||
before=P(pri=min_tr, sec=0, ter=0).to_prisecter() if min_tr is not None else None,
|
||||
limit=limit or 25,
|
||||
country=country,
|
||||
)
|
||||
league = await by('league', parameter)
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v2/tetrio/user/list',
|
||||
List(
|
||||
show_index=True,
|
||||
users=[
|
||||
User(
|
||||
id=i.id,
|
||||
name=i.username.upper(),
|
||||
avatar=f'https://tetr.io/user-content/avatars/{i.id}.jpg',
|
||||
country=i.country,
|
||||
tetra_league=TetraLeague(
|
||||
rank=i.league.rank,
|
||||
tr=round(i.league.tr, 2),
|
||||
glicko=round(i.league.glicko, 2),
|
||||
rd=round(i.league.rd, 2),
|
||||
decaying=i.league.decaying,
|
||||
pps=(metrics := get_metrics(pps=i.league.pps, apm=i.league.apm, vs=i.league.vs)).pps,
|
||||
apm=metrics.apm,
|
||||
apl=metrics.apl,
|
||||
vs=metrics.vs,
|
||||
adpl=metrics.adpl,
|
||||
),
|
||||
xp=i.xp,
|
||||
join_at=None,
|
||||
)
|
||||
for i in league.data.entries
|
||||
],
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
await UniMessage.image(raw=await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')).finish()
|
||||
53
nonebot_plugin_tetris_stats/games/tetrio/models.py
Normal file
53
nonebot_plugin_tetris_stats/games/tetrio/models.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from datetime import datetime
|
||||
from uuid import UUID
|
||||
|
||||
from nonebot_plugin_orm import Model
|
||||
from sqlalchemy import DateTime, ForeignKey, String
|
||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column, relationship
|
||||
|
||||
from ...db.models import PydanticType
|
||||
from .api.schemas.leaderboards.by import BySuccessModel, Entry
|
||||
from .api.typing import ValidRank
|
||||
from .typing import Template
|
||||
|
||||
|
||||
class TETRIOUserConfig(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
query_template: Mapped[Template] = mapped_column(String(2))
|
||||
|
||||
|
||||
class TETRIOLeagueStats(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
raw: Mapped[list['TETRIOLeagueHistorical']] = relationship(back_populates='stats', lazy='noload')
|
||||
fields: Mapped[list['TETRIOLeagueStatsField']] = relationship(back_populates='stats')
|
||||
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||
|
||||
|
||||
class TETRIOLeagueHistorical(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
request_id: Mapped[UUID] = mapped_column(index=True)
|
||||
data: Mapped[BySuccessModel] = mapped_column(PydanticType([], {BySuccessModel}))
|
||||
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False)
|
||||
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='raw')
|
||||
|
||||
|
||||
entry_type = PydanticType([], {Entry})
|
||||
|
||||
|
||||
class TETRIOLeagueStatsField(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
rank: Mapped[ValidRank] = mapped_column(String(2), index=True)
|
||||
tr_line: Mapped[float]
|
||||
player_count: Mapped[int]
|
||||
low_pps: Mapped[Entry] = mapped_column(entry_type)
|
||||
low_apm: Mapped[Entry] = mapped_column(entry_type)
|
||||
low_vs: Mapped[Entry] = mapped_column(entry_type)
|
||||
avg_pps: Mapped[float]
|
||||
avg_apm: Mapped[float]
|
||||
avg_vs: Mapped[float]
|
||||
high_pps: Mapped[Entry] = mapped_column(entry_type)
|
||||
high_apm: Mapped[Entry] = mapped_column(entry_type)
|
||||
high_vs: Mapped[Entry] = mapped_column(entry_type)
|
||||
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False)
|
||||
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='fields')
|
||||
135
nonebot_plugin_tetris_stats/games/tetrio/query/__init__.py
Normal file
135
nonebot_plugin_tetris_stats/games/tetrio/query/__init__.py
Normal file
@@ -0,0 +1,135 @@
|
||||
from datetime import timezone
|
||||
|
||||
from arclet.alconna import Arg, ArgFlag
|
||||
from nonebot import get_driver
|
||||
from nonebot.adapters import Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import Args, At, Option, Subcommand
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import User as NBUser
|
||||
from nonebot_plugin_user import get_user
|
||||
from sqlalchemy import select
|
||||
|
||||
from ....db import query_bind_info, trigger
|
||||
from ....i18n import Lang
|
||||
from ....utils.exception import FallbackError
|
||||
from ....utils.typing import Me
|
||||
from ... import add_block_handlers, alc
|
||||
from .. import command, get_player
|
||||
from ..api import Player
|
||||
from ..constant import GAME_TYPE
|
||||
from ..models import TETRIOUserConfig
|
||||
from ..typing import Template
|
||||
from .v1 import make_query_image_v1
|
||||
from .v2 import make_query_image_v2
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
driver = get_driver()
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'query',
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 / 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
get_player,
|
||||
notice='TETR.IO 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
),
|
||||
Option(
|
||||
'--template',
|
||||
Arg('template', Template),
|
||||
alias=['-T'],
|
||||
help_text='要使用的查询模板',
|
||||
),
|
||||
help_text='查询 TETR.IO 游戏信息',
|
||||
),
|
||||
)
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:查询|查|query|stats)',
|
||||
command='tstats TETR.IO query',
|
||||
humanized='io查',
|
||||
)
|
||||
alc.shortcut(
|
||||
'fkosk',
|
||||
command='tstats TETR.IO query',
|
||||
arguments=['我'],
|
||||
fuzzy=False,
|
||||
humanized='An Easter egg!',
|
||||
)
|
||||
|
||||
add_block_handlers(alc.assign('TETRIO.query'))
|
||||
|
||||
|
||||
async def make_query_result(player: Player, template: Template) -> UniMessage:
|
||||
if template == 'v1':
|
||||
try:
|
||||
return UniMessage.image(raw=await make_query_image_v1(player))
|
||||
except FallbackError:
|
||||
template = 'v2'
|
||||
if template == 'v2':
|
||||
return UniMessage.image(raw=await make_query_image_v2(player))
|
||||
return None
|
||||
|
||||
|
||||
@alc.assign('TETRIO.query')
|
||||
async def _( # noqa: PLR0913
|
||||
user: NBUser,
|
||||
event: Event,
|
||||
matcher: Matcher,
|
||||
target: At | Me,
|
||||
event_session: EventSession,
|
||||
template: Template | None = None,
|
||||
):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='query',
|
||||
command_args=[f'--template {template}'] if template is not None else [],
|
||||
):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
user=await get_user(
|
||||
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
|
||||
),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if template is None:
|
||||
template = await session.scalar(
|
||||
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
player = Player(user_id=bind.game_account, trust=True)
|
||||
await (
|
||||
UniMessage.i18n(Lang.interaction.warning.unverified) + await make_query_result(player, template or 'v1')
|
||||
).finish()
|
||||
|
||||
|
||||
@alc.assign('TETRIO.query')
|
||||
async def _(user: NBUser, account: Player, event_session: EventSession, template: Template | None = None):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='query',
|
||||
command_args=[f'--template {template}'] if template is not None else [],
|
||||
):
|
||||
async with get_session() as session:
|
||||
if template is None:
|
||||
template = await session.scalar(
|
||||
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
||||
)
|
||||
await (await make_query_result(account, template or 'v1')).finish()
|
||||
56
nonebot_plugin_tetris_stats/games/tetrio/query/tools.py
Normal file
56
nonebot_plugin_tetris_stats/games/tetrio/query/tools.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from typing import TypeVar, overload
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from ....utils.exception import FallbackError
|
||||
from ....utils.render.schemas.tetrio.user.base import TetraLeagueHistoryData
|
||||
from ..api.schemas.labs.leagueflow import Empty, LeagueFlowSuccess
|
||||
from ..api.schemas.summaries.league import InvalidData, LeagueSuccessModel, NeverPlayedData, NeverRatedData, RatedData
|
||||
|
||||
|
||||
def flow_to_history(
|
||||
leagueflow: LeagueFlowSuccess,
|
||||
handle: Callable[[list[TetraLeagueHistoryData]], list[TetraLeagueHistoryData]] | None = None,
|
||||
) -> list[TetraLeagueHistoryData]:
|
||||
if isinstance(leagueflow.data, Empty):
|
||||
raise FallbackError
|
||||
start_time = leagueflow.data.start_time.astimezone(ZoneInfo('Asia/Shanghai'))
|
||||
ret = [
|
||||
TetraLeagueHistoryData(
|
||||
record_at=start_time + timedelta(milliseconds=i.timestamp_offset),
|
||||
tr=i.post_match_tr,
|
||||
)
|
||||
for i in leagueflow.data.points
|
||||
if start_time + timedelta(milliseconds=i.timestamp_offset)
|
||||
]
|
||||
return ret if handle is None else handle(ret)
|
||||
|
||||
|
||||
N = TypeVar('N', int, float)
|
||||
|
||||
|
||||
def handling_special_value(value: N) -> N | None:
|
||||
return value if value != -1 else None
|
||||
|
||||
|
||||
L = TypeVar('L', NeverPlayedData, NeverRatedData, RatedData)
|
||||
|
||||
|
||||
@overload
|
||||
def get_league_data(user_info: LeagueSuccessModel, league_type: type[L]) -> L: ...
|
||||
@overload
|
||||
def get_league_data(
|
||||
user_info: LeagueSuccessModel, league_type: None = None
|
||||
) -> NeverPlayedData | NeverRatedData | RatedData: ...
|
||||
def get_league_data(
|
||||
user_info: LeagueSuccessModel, league_type: type[L] | None = None
|
||||
) -> L | NeverPlayedData | NeverRatedData | RatedData:
|
||||
league = user_info.data
|
||||
if isinstance(league, InvalidData):
|
||||
raise FallbackError
|
||||
if league_type is None:
|
||||
return league
|
||||
if isinstance(league, league_type):
|
||||
return league
|
||||
raise FallbackError
|
||||
197
nonebot_plugin_tetris_stats/games/tetrio/query/v1.py
Normal file
197
nonebot_plugin_tetris_stats/games/tetrio/query/v1.py
Normal file
@@ -0,0 +1,197 @@
|
||||
from asyncio import gather
|
||||
from datetime import datetime, timedelta
|
||||
from hashlib import md5
|
||||
from math import ceil, floor
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from ....utils.exception import FallbackError, WhatTheFuckError
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.base import Avatar, Ranking
|
||||
from ....utils.render.schemas.tetrio.user.base import TetraLeagueHistoryData
|
||||
from ....utils.render.schemas.tetrio.user.info_v1 import Info, Radar, TetraLeague, TetraLeagueHistory, User
|
||||
from ....utils.screenshot import screenshot
|
||||
from ..api import Player
|
||||
from ..api.schemas.summaries.league import RatedData
|
||||
from ..constant import TR_MAX, TR_MIN
|
||||
from .tools import flow_to_history, get_league_data
|
||||
|
||||
|
||||
def get_value_bounds(values: list[int | float]) -> tuple[int, int]:
|
||||
value_max = 10 * ceil(max(values) / 10)
|
||||
value_min = 10 * floor(min(values) / 10)
|
||||
return value_max, value_min
|
||||
|
||||
|
||||
def get_split(value_max: int, value_min: int) -> tuple[int, int]:
|
||||
offset = 0
|
||||
overflow = 0
|
||||
|
||||
while True:
|
||||
if (new_max_value := value_max + offset + overflow) > TR_MAX:
|
||||
overflow -= 1
|
||||
continue
|
||||
if (new_min_value := value_min - offset + overflow) < TR_MIN:
|
||||
overflow += 1
|
||||
continue
|
||||
if ((new_max_value - new_min_value) / 40).is_integer():
|
||||
split_value = int((value_max + offset - (value_min - offset)) / 4)
|
||||
break
|
||||
offset += 1
|
||||
return split_value, offset + overflow
|
||||
|
||||
|
||||
def get_specified_point(
|
||||
previous_point: TetraLeagueHistoryData,
|
||||
behind_point: TetraLeagueHistoryData,
|
||||
point_time: datetime,
|
||||
) -> TetraLeagueHistoryData:
|
||||
"""根据给出的 previous_point 和 behind_point, 推算 point_time 点处的数据
|
||||
|
||||
Args:
|
||||
previous_point (Data): 前面的数据点
|
||||
behind_point (Data): 后面的数据点
|
||||
point_time (datetime): 要推算的点的位置
|
||||
|
||||
Returns:
|
||||
Data: 要推算的点的数据
|
||||
"""
|
||||
# 求两个点的斜率
|
||||
slope = (behind_point.tr - previous_point.tr) / (
|
||||
datetime.timestamp(behind_point.record_at) - datetime.timestamp(previous_point.record_at)
|
||||
)
|
||||
return TetraLeagueHistoryData(
|
||||
record_at=point_time,
|
||||
tr=previous_point.tr + slope * (datetime.timestamp(point_time) - datetime.timestamp(previous_point.record_at)),
|
||||
)
|
||||
|
||||
|
||||
def handle_history_data(data: list[TetraLeagueHistoryData]) -> list[TetraLeagueHistoryData]: # noqa: C901, PLR0912
|
||||
# 按照 记录时间 对数据进行排序
|
||||
data.sort(key=lambda x: x.record_at)
|
||||
|
||||
# 定义时间边界, 右边界为当前时间的当天零点, 左边界为右边界前推9天
|
||||
# 返回值的[0]和[-1]分别应满足left_border和right_border
|
||||
zero = datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
left_border = zero - timedelta(days=9)
|
||||
right_border = zero.replace(microsecond=1000)
|
||||
|
||||
lefts: list[TetraLeagueHistoryData] = []
|
||||
in_border: list[TetraLeagueHistoryData] = []
|
||||
rights: list[TetraLeagueHistoryData] = []
|
||||
|
||||
# 根据 记录时间 将数据分类到对应的列表中
|
||||
for i in data:
|
||||
if i.record_at < left_border:
|
||||
lefts.append(i)
|
||||
elif i.record_at < right_border:
|
||||
in_border.append(i)
|
||||
else:
|
||||
rights.append(i)
|
||||
|
||||
ret: list[TetraLeagueHistoryData] = []
|
||||
|
||||
# 处理左边界的点
|
||||
if lefts and in_border: # 如果边界左侧和边界内都有值则推算
|
||||
ret.append(get_specified_point(lefts[-1], in_border[0], left_border))
|
||||
elif lefts and not in_border: # 如果边界左侧有值但是边界内没有值则直接取左侧的最后一个值
|
||||
ret.append(TetraLeagueHistoryData(tr=lefts[-1].tr, record_at=left_border))
|
||||
elif not lefts and in_border: # 如果边界左侧没有值但是边界内有值则直接取边界内的第一个值
|
||||
ret.append(TetraLeagueHistoryData(tr=in_border[0].tr, record_at=left_border))
|
||||
elif not lefts and not in_border and rights: # 如果边界左侧和边界内都没有值但是边界右侧有值则直接取边界右侧的第一个值 # fmt: skip
|
||||
ret.append(TetraLeagueHistoryData(tr=rights[0].tr, record_at=left_border))
|
||||
else: # 暂时没想到其他情况
|
||||
raise WhatTheFuckError
|
||||
|
||||
# 添加边界内数据
|
||||
ret.extend(in_border)
|
||||
|
||||
# 处理右边界的点
|
||||
if in_border and rights: # 如果边界内和边界右侧都有值则推算
|
||||
ret.append(get_specified_point(in_border[-1], rights[0], right_border))
|
||||
elif not in_border and rights: # 如果边界内没有值但是边界右侧有值则直接取右侧的第一个值
|
||||
ret.append(TetraLeagueHistoryData(tr=rights[0].tr, record_at=right_border))
|
||||
elif in_border and not rights: # 如果边界内有值但是边界右侧没有值则直接取边界内的最后一个值
|
||||
ret.append(TetraLeagueHistoryData(tr=in_border[-1].tr, record_at=right_border))
|
||||
elif not in_border and not rights and lefts: # 如果边界内和边界右侧都没有值但是边界左侧有值则直接取边界左侧的最后一个值 # fmt: skip
|
||||
ret.append(TetraLeagueHistoryData(tr=lefts[-1].tr, record_at=right_border))
|
||||
else: # 暂时没想到其他情况
|
||||
raise WhatTheFuckError
|
||||
return ret
|
||||
|
||||
|
||||
async def make_query_image_v1(player: Player) -> bytes:
|
||||
(
|
||||
(user, user_info, league, sprint, blitz, leagueflow),
|
||||
(avatar_revision,),
|
||||
) = await gather(
|
||||
gather(player.user, player.get_info(), player.league, player.sprint, player.blitz, player.get_leagueflow()),
|
||||
gather(player.avatar_revision),
|
||||
)
|
||||
league_data = get_league_data(league, RatedData)
|
||||
if league_data.vs is None:
|
||||
raise FallbackError
|
||||
histories = flow_to_history(leagueflow, handle_history_data)
|
||||
value_max, value_min = get_value_bounds([i.tr for i in histories])
|
||||
split_value, offset = get_split(value_max, value_min)
|
||||
if sprint.data.record is not None:
|
||||
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
|
||||
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
||||
else:
|
||||
sprint_value = 'N/A'
|
||||
blitz_value = f'{blitz.data.record.results.stats.score:,}' if blitz.data.record is not None else 'N/A'
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
page=await render(
|
||||
'v1/tetrio/info',
|
||||
Info(
|
||||
user=User(
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||
)
|
||||
if avatar_revision is not None and avatar_revision != 0
|
||||
else Avatar(
|
||||
type='identicon',
|
||||
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||
),
|
||||
name=user.name.upper(),
|
||||
bio=user_info.data.bio,
|
||||
),
|
||||
ranking=Ranking(
|
||||
rating=round(league_data.glicko, 2),
|
||||
rd=round(league_data.rd, 2),
|
||||
),
|
||||
tetra_league=TetraLeague(
|
||||
rank=league_data.rank,
|
||||
tr=round(league_data.tr, 2),
|
||||
global_rank=league_data.standing,
|
||||
pps=league_data.pps,
|
||||
lpm=round(lpm := (league_data.pps * 24), 2),
|
||||
apm=league_data.apm,
|
||||
apl=round(league_data.apm / lpm, 2),
|
||||
vs=league_data.vs,
|
||||
adpm=round(adpm := (league_data.vs * 0.6), 2),
|
||||
adpl=round(adpm / lpm, 2),
|
||||
),
|
||||
tetra_league_history=TetraLeagueHistory(
|
||||
data=histories,
|
||||
split_interval=split_value,
|
||||
min_tr=value_min,
|
||||
max_tr=value_max,
|
||||
offset=offset,
|
||||
),
|
||||
radar=Radar(
|
||||
app=(app := (league_data.apm / (60 * league_data.pps))),
|
||||
dsps=(dsps := ((league_data.vs / 100) - (league_data.apm / 60))),
|
||||
dspp=(dspp := (dsps / league_data.pps)),
|
||||
ci=150 * dspp - 125 * app + 50 * (league_data.vs / league_data.apm) - 25,
|
||||
ge=2 * ((app * dsps) / league_data.pps),
|
||||
),
|
||||
sprint=sprint_value,
|
||||
blitz=blitz_value,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||
138
nonebot_plugin_tetris_stats/games/tetrio/query/v2.py
Normal file
138
nonebot_plugin_tetris_stats/games/tetrio/query/v2.py
Normal file
@@ -0,0 +1,138 @@
|
||||
from asyncio import gather
|
||||
from datetime import datetime, timedelta
|
||||
from hashlib import md5
|
||||
|
||||
from yarl import URL
|
||||
|
||||
from ....utils.exception import FallbackError
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.metrics import get_metrics
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.base import Avatar
|
||||
from ....utils.render.schemas.tetrio.user.info_v2 import (
|
||||
Badge,
|
||||
Blitz,
|
||||
Info,
|
||||
Sprint,
|
||||
Statistic,
|
||||
TetraLeague,
|
||||
TetraLeagueStatistic,
|
||||
User,
|
||||
Zen,
|
||||
)
|
||||
from ....utils.screenshot import screenshot
|
||||
from ..api import Player
|
||||
from ..api.schemas.summaries.league import InvalidData, NeverPlayedData, NeverRatedData
|
||||
from .tools import flow_to_history, handling_special_value
|
||||
|
||||
|
||||
async def make_query_image_v2(player: Player) -> bytes:
|
||||
(
|
||||
(user, user_info, league, sprint, blitz, zen),
|
||||
(avatar_revision, banner_revision, leagueflow),
|
||||
) = await gather(
|
||||
gather(player.user, player.get_info(), player.league, player.sprint, player.blitz, player.zen),
|
||||
gather(player.avatar_revision, player.banner_revision, player.get_leagueflow()),
|
||||
)
|
||||
if sprint.data.record is not None:
|
||||
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
|
||||
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
||||
else:
|
||||
sprint_value = 'N/A'
|
||||
|
||||
play_time: str | None
|
||||
if (game_time := handling_special_value(user_info.data.gametime)) is not None:
|
||||
if game_time // 3600 > 0:
|
||||
play_time = f'{game_time//3600:.0f}h {game_time % 3600 // 60:.0f}m {game_time % 60:.0f}s'
|
||||
elif game_time // 60 > 0:
|
||||
play_time = f'{game_time//60:.0f}m {game_time % 60:.0f}s'
|
||||
else:
|
||||
play_time = f'{game_time:.0f}s'
|
||||
else:
|
||||
play_time = game_time
|
||||
try:
|
||||
history = flow_to_history(leagueflow, lambda x: x[-100:])
|
||||
except FallbackError:
|
||||
history = None
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v2/tetrio/user/info',
|
||||
Info(
|
||||
user=User(
|
||||
id=user.ID,
|
||||
name=user.name.upper(),
|
||||
bio=user_info.data.bio,
|
||||
banner=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/banners/{user.ID}') % {'revision': banner_revision}
|
||||
)
|
||||
if banner_revision is not None and banner_revision != 0
|
||||
else None,
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||
)
|
||||
if avatar_revision is not None and avatar_revision != 0
|
||||
else Avatar(
|
||||
type='identicon',
|
||||
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||
),
|
||||
badges=[
|
||||
Badge(
|
||||
id=i.id,
|
||||
description=i.label,
|
||||
group=i.group,
|
||||
receive_at=i.ts if isinstance(i.ts, datetime) else None,
|
||||
)
|
||||
for i in user_info.data.badges
|
||||
],
|
||||
country=user_info.data.country,
|
||||
role=user_info.data.role,
|
||||
xp=user_info.data.xp,
|
||||
friend_count=user_info.data.friend_count,
|
||||
supporter_tier=user_info.data.supporter_tier,
|
||||
bad_standing=user_info.data.badstanding or False,
|
||||
playtime=play_time,
|
||||
join_at=user_info.data.ts,
|
||||
),
|
||||
tetra_league=TetraLeague(
|
||||
rank=league.data.rank,
|
||||
highest_rank='z' if isinstance(league.data, NeverRatedData) else league.data.bestrank,
|
||||
tr=round(league.data.tr, 2),
|
||||
glicko=round(league.data.glicko, 2),
|
||||
rd=round(league.data.rd, 2),
|
||||
global_rank=league.data.standing,
|
||||
country_rank=league.data.standing_local,
|
||||
pps=(metrics := get_metrics(pps=league.data.pps, apm=league.data.apm, vs=league.data.vs)).pps,
|
||||
apm=metrics.apm,
|
||||
apl=metrics.apl,
|
||||
vs=metrics.vs,
|
||||
adpl=metrics.adpl,
|
||||
statistic=TetraLeagueStatistic(total=league.data.gamesplayed, wins=league.data.gameswon),
|
||||
decaying=league.data.decaying,
|
||||
history=history,
|
||||
)
|
||||
if not isinstance(league.data, NeverPlayedData | InvalidData)
|
||||
else None,
|
||||
statistic=Statistic(
|
||||
total=handling_special_value(user_info.data.gamesplayed),
|
||||
wins=handling_special_value(user_info.data.gameswon),
|
||||
),
|
||||
sprint=Sprint(
|
||||
time=sprint_value,
|
||||
global_rank=sprint.data.rank,
|
||||
play_at=sprint.data.record.ts,
|
||||
)
|
||||
if sprint.data.record is not None
|
||||
else None,
|
||||
blitz=Blitz(
|
||||
score=blitz.data.record.results.stats.score,
|
||||
global_rank=blitz.data.rank,
|
||||
play_at=blitz.data.record.ts,
|
||||
)
|
||||
if blitz.data.record is not None
|
||||
else None,
|
||||
zen=Zen(level=zen.data.level, score=zen.data.score),
|
||||
),
|
||||
),
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||
153
nonebot_plugin_tetris_stats/games/tetrio/rank/__init__.py
Normal file
153
nonebot_plugin_tetris_stats/games/tetrio/rank/__init__.py
Normal file
@@ -0,0 +1,153 @@
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Sequence
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from math import floor
|
||||
from statistics import mean
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import uuid4
|
||||
|
||||
from nonebot import get_driver
|
||||
from nonebot_plugin_alconna import Subcommand
|
||||
from nonebot_plugin_apscheduler import scheduler
|
||||
from nonebot_plugin_orm import get_session
|
||||
from sqlalchemy import select
|
||||
|
||||
from ....utils.exception import RequestError
|
||||
from ....utils.retry import retry
|
||||
from .. import alc
|
||||
from .. import command as base_command
|
||||
from ..api.leaderboards import by
|
||||
from ..api.schemas.base import P
|
||||
from ..api.schemas.leaderboards import Parameter
|
||||
from ..api.schemas.leaderboards.by import Entry
|
||||
from ..constant import RANK_PERCENTILE
|
||||
from ..models import TETRIOLeagueHistorical, TETRIOLeagueStats, TETRIOLeagueStatsField
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ..api.schemas.leaderboards.by import BySuccessModel
|
||||
from ..api.typing import Rank
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
driver = get_driver()
|
||||
|
||||
|
||||
command = Subcommand('rank', help_text='查询 TETR.IO 段位信息')
|
||||
|
||||
|
||||
def wrapper(slot: int | str, content: str | None) -> str | None:
|
||||
if slot == 'rank' and not content:
|
||||
return '--all'
|
||||
if content is not None:
|
||||
return f'--detail {content.lower()}'
|
||||
return content
|
||||
|
||||
|
||||
alc.shortcut(
|
||||
r'(?i:io)(?i:段位|段|rank)\s*(?P<rank>[a-zA-Z+-]{0,2})',
|
||||
command='tstats TETR.IO rank {rank}',
|
||||
humanized='iorank',
|
||||
fuzzy=False,
|
||||
wrapper=wrapper,
|
||||
)
|
||||
|
||||
|
||||
def _pps(user: Entry) -> float:
|
||||
return user.league.pps
|
||||
|
||||
|
||||
def _apm(user: Entry) -> float:
|
||||
return user.league.apm
|
||||
|
||||
|
||||
def _vs(user: Entry) -> float:
|
||||
return user.league.vs
|
||||
|
||||
|
||||
def _min(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
|
||||
return min(users, key=field)
|
||||
|
||||
|
||||
def _max(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
|
||||
return max(users, key=field)
|
||||
|
||||
|
||||
def find_special_player(
|
||||
users: Sequence[Entry],
|
||||
field: Callable[[Entry], float],
|
||||
sort: Callable[[Sequence[Entry], Callable[[Entry], float]], Entry],
|
||||
) -> Entry:
|
||||
return sort(users, field)
|
||||
|
||||
|
||||
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
|
||||
async def get_tetra_league_data() -> None:
|
||||
x_session_id = uuid4()
|
||||
retry_by = retry(max_attempts=10, exception_type=RequestError)(by)
|
||||
prisecter = P(pri=9007199254740991, sec=9007199254740991, ter=9007199254740991) # * from ch.tetr.io
|
||||
results: list[BySuccessModel] = []
|
||||
while True:
|
||||
model = await retry_by('league', Parameter(after=prisecter.to_prisecter(), limit=100), x_session_id)
|
||||
prisecter = model.data.entries[-1].p
|
||||
results.append(model)
|
||||
if len(model.data.entries) < 100: # 分页值 # noqa: PLR2004
|
||||
break
|
||||
|
||||
players: list[Entry] = []
|
||||
for result in results:
|
||||
players.extend(result.data.entries)
|
||||
players.sort(key=lambda x: x.league.tr, reverse=True)
|
||||
|
||||
rank_player_mapping: defaultdict[Rank, list[Entry]] = defaultdict(list)
|
||||
for player in players:
|
||||
rank_player_mapping[player.league.rank].append(player)
|
||||
|
||||
stats = TETRIOLeagueStats(raw=[], fields=[], update_time=datetime.now(UTC))
|
||||
fields: list[TETRIOLeagueStatsField] = []
|
||||
for rank, percentile in RANK_PERCENTILE.items():
|
||||
offset = floor((percentile / 100) * len(players)) - 1
|
||||
tr_line = players[offset].league.tr
|
||||
rank_players = rank_player_mapping[rank]
|
||||
fields.append(
|
||||
TETRIOLeagueStatsField(
|
||||
rank=rank,
|
||||
tr_line=tr_line,
|
||||
player_count=len(rank_players),
|
||||
low_pps=find_special_player(rank_players, _pps, _min),
|
||||
low_apm=find_special_player(rank_players, _apm, _min),
|
||||
low_vs=find_special_player(rank_players, _vs, _min),
|
||||
avg_pps=mean(_pps(i) for i in rank_players),
|
||||
avg_apm=mean(_apm(i) for i in rank_players),
|
||||
avg_vs=mean(_vs(i) for i in rank_players),
|
||||
high_pps=find_special_player(rank_players, _pps, _max),
|
||||
high_apm=find_special_player(rank_players, _apm, _max),
|
||||
high_vs=find_special_player(rank_players, _vs, _max),
|
||||
stats=stats,
|
||||
)
|
||||
)
|
||||
historicals = [
|
||||
TETRIOLeagueHistorical(request_id=x_session_id, data=model, update_time=model.cache.cached_at, stats=stats)
|
||||
for model in results
|
||||
]
|
||||
stats.raw = historicals
|
||||
stats.fields = fields
|
||||
async with get_session() as session:
|
||||
session.add(stats)
|
||||
await session.commit()
|
||||
|
||||
|
||||
@driver.on_startup
|
||||
async def _() -> None:
|
||||
async with get_session() as session:
|
||||
latest_time = await session.scalar(
|
||||
select(TETRIOLeagueStats.update_time).order_by(TETRIOLeagueStats.id.desc()).limit(1)
|
||||
)
|
||||
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
|
||||
await get_tetra_league_data()
|
||||
|
||||
|
||||
from . import all, detail # noqa: A004, E402
|
||||
|
||||
base_command.add(command)
|
||||
|
||||
__all__ = ['all', 'detail']
|
||||
115
nonebot_plugin_tetris_stats/games/tetrio/rank/all.py
Normal file
115
nonebot_plugin_tetris_stats/games/tetrio/rank/all.py
Normal file
@@ -0,0 +1,115 @@
|
||||
from datetime import timedelta
|
||||
|
||||
from arclet.alconna import Arg
|
||||
from nonebot_plugin_alconna import Option, Subcommand, UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from ....db import trigger
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.metrics import get_metrics
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.tetrio.rank.v1 import Data as DataV1
|
||||
from ....utils.render.schemas.tetrio.rank.v1 import ItemData as ItemDataV1
|
||||
from ....utils.render.schemas.tetrio.rank.v2 import AverageData as AverageDataV2
|
||||
from ....utils.render.schemas.tetrio.rank.v2 import Data as DataV2
|
||||
from ....utils.render.schemas.tetrio.rank.v2 import ItemData as ItemDataV2
|
||||
from ....utils.screenshot import screenshot
|
||||
from .. import alc
|
||||
from ..constant import GAME_TYPE
|
||||
from ..models import TETRIOLeagueStats
|
||||
from ..typing import Template
|
||||
from . import command
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'--all', Option('--template', Arg('template', Template), alias=['-T'], help_text='要使用的查询模板'), dest='all'
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.rank.all')
|
||||
async def _(event_session: EventSession, template: Template | None = None):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='rank',
|
||||
command_args=['--all'] + ([f'--template {template}'] if template is not None else []),
|
||||
):
|
||||
async with get_session() as session:
|
||||
latest_data = (
|
||||
await session.scalars(
|
||||
select(TETRIOLeagueStats)
|
||||
.order_by(TETRIOLeagueStats.id.desc())
|
||||
.limit(1)
|
||||
.options(selectinload(TETRIOLeagueStats.fields))
|
||||
)
|
||||
).one()
|
||||
compare_data = (
|
||||
await session.scalars(
|
||||
select(TETRIOLeagueStats)
|
||||
.order_by(
|
||||
func.abs(
|
||||
func.julianday(TETRIOLeagueStats.update_time)
|
||||
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
.options(selectinload(TETRIOLeagueStats.fields))
|
||||
)
|
||||
).one()
|
||||
match template:
|
||||
case 'v1' | None:
|
||||
await UniMessage.image(raw=await make_image_v1(latest_data, compare_data)).finish()
|
||||
case 'v2':
|
||||
await UniMessage.image(raw=await make_image_v2(latest_data, compare_data)).finish()
|
||||
|
||||
|
||||
async def make_image_v1(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v1/tetrio/rank',
|
||||
DataV1(
|
||||
items={
|
||||
i[0].rank: ItemDataV1(
|
||||
trending=round(i[0].tr_line - i[1].tr_line, 2),
|
||||
require_tr=round(i[0].tr_line, 2),
|
||||
players=i[0].player_count,
|
||||
)
|
||||
for i in zip(latest_data.fields, compare_data.fields, strict=True)
|
||||
},
|
||||
updated_at=latest_data.update_time,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||
|
||||
|
||||
async def make_image_v2(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v2/tetrio/rank',
|
||||
DataV2(
|
||||
items={
|
||||
i[0].rank: ItemDataV2(
|
||||
require_tr=round(i[0].tr_line, 2),
|
||||
trending=round(i[0].tr_line - i[1].tr_line, 2),
|
||||
average_data=AverageDataV2(
|
||||
pps=(metrics := get_metrics(pps=i[0].avg_pps, apm=i[0].avg_apm, vs=i[0].avg_vs)).pps,
|
||||
apm=metrics.apm,
|
||||
apl=metrics.apl,
|
||||
vs=metrics.vs,
|
||||
adpl=metrics.adpl,
|
||||
),
|
||||
players=i[0].player_count,
|
||||
)
|
||||
for i in zip(latest_data.fields, compare_data.fields, strict=True)
|
||||
},
|
||||
updated_at=latest_data.update_time,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||
128
nonebot_plugin_tetris_stats/games/tetrio/rank/detail.py
Normal file
128
nonebot_plugin_tetris_stats/games/tetrio/rank/detail.py
Normal file
@@ -0,0 +1,128 @@
|
||||
from datetime import timedelta, timezone
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from arclet.alconna import Arg
|
||||
from nonebot import get_driver
|
||||
from nonebot_plugin_alconna import Option, UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from ....db import trigger
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.metrics import get_metrics
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.tetrio.rank.detail import Data, SpecialData
|
||||
from ....utils.screenshot import screenshot
|
||||
from .. import alc
|
||||
from ..api.typing import ValidRank
|
||||
from ..constant import GAME_TYPE
|
||||
from ..models import TETRIOLeagueStats
|
||||
from . import command
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
driver = get_driver()
|
||||
|
||||
command.add(Option('--detail', Arg('rank', ValidRank), alias=['-D']))
|
||||
|
||||
|
||||
@alc.assign('TETRIO.rank')
|
||||
async def _(rank: ValidRank, event_session: EventSession):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='rank',
|
||||
command_args=[f'--detail {rank}'],
|
||||
):
|
||||
async with get_session() as session:
|
||||
latest_data = (
|
||||
await session.scalars(
|
||||
select(TETRIOLeagueStats)
|
||||
.order_by(TETRIOLeagueStats.id.desc())
|
||||
.limit(1)
|
||||
.options(selectinload(TETRIOLeagueStats.fields))
|
||||
)
|
||||
).one()
|
||||
compare_data = (
|
||||
await session.scalars(
|
||||
select(TETRIOLeagueStats)
|
||||
.order_by(
|
||||
func.abs(
|
||||
func.julianday(TETRIOLeagueStats.update_time)
|
||||
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
.options(selectinload(TETRIOLeagueStats.fields))
|
||||
)
|
||||
).one()
|
||||
await UniMessage.image(
|
||||
raw=await make_image(
|
||||
rank,
|
||||
latest_data,
|
||||
compare_data,
|
||||
)
|
||||
).finish()
|
||||
|
||||
|
||||
async def make_image(rank: ValidRank, latest: TETRIOLeagueStats, compare: TETRIOLeagueStats) -> bytes:
|
||||
latest_data = next(filter(lambda x: x.rank == rank, latest.fields))
|
||||
compare_data = next(filter(lambda x: x.rank == rank, compare.fields))
|
||||
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
|
||||
low_pps = get_metrics(
|
||||
pps=latest_data.low_pps.league.pps, apm=latest_data.low_pps.league.apm, vs=latest_data.low_pps.league.vs
|
||||
)
|
||||
low_apm = get_metrics(
|
||||
pps=latest_data.low_apm.league.pps, apm=latest_data.low_apm.league.apm, vs=latest_data.low_apm.league.vs
|
||||
)
|
||||
low_vs = get_metrics(
|
||||
pps=latest_data.low_vs.league.pps, apm=latest_data.low_vs.league.apm, vs=latest_data.low_vs.league.vs
|
||||
)
|
||||
max_pps = get_metrics(
|
||||
pps=latest_data.high_pps.league.pps, apm=latest_data.high_pps.league.apm, vs=latest_data.high_pps.league.vs
|
||||
)
|
||||
max_apm = get_metrics(
|
||||
pps=latest_data.high_apm.league.pps, apm=latest_data.high_apm.league.apm, vs=latest_data.high_apm.league.vs
|
||||
)
|
||||
max_vs = get_metrics(
|
||||
pps=latest_data.high_vs.league.pps, apm=latest_data.high_vs.league.apm, vs=latest_data.high_vs.league.vs
|
||||
)
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v2/tetrio/rank/detail',
|
||||
Data(
|
||||
name=latest_data.rank,
|
||||
trending=round(latest_data.tr_line - compare_data.tr_line, 2),
|
||||
require_tr=round(latest_data.tr_line, 2),
|
||||
players=latest_data.player_count,
|
||||
minimum_data=SpecialData(
|
||||
apm=low_apm.apm,
|
||||
pps=low_pps.pps,
|
||||
lpm=low_pps.lpm,
|
||||
vs=low_vs.vs,
|
||||
adpm=low_vs.adpm,
|
||||
apm_holder=latest_data.low_apm.username.upper(),
|
||||
pps_holder=latest_data.low_pps.username.upper(),
|
||||
vs_holder=latest_data.low_vs.username.upper(),
|
||||
),
|
||||
average_data=SpecialData(
|
||||
apm=avg.apm, pps=avg.pps, lpm=avg.lpm, vs=avg.vs, adpm=avg.adpm, apl=avg.apl, adpl=avg.adpl
|
||||
),
|
||||
maximum_data=SpecialData(
|
||||
apm=max_apm.apm,
|
||||
pps=max_pps.pps,
|
||||
lpm=max_pps.lpm,
|
||||
vs=max_vs.vs,
|
||||
adpm=max_vs.adpm,
|
||||
apm_holder=latest_data.high_apm.username.upper(),
|
||||
pps_holder=latest_data.high_pps.username.upper(),
|
||||
vs_holder=latest_data.high_vs.username.upper(),
|
||||
),
|
||||
updated_at=latest.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo('Asia/Shanghai')),
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||
33
nonebot_plugin_tetris_stats/games/tetrio/record/__init__.py
Normal file
33
nonebot_plugin_tetris_stats/games/tetrio/record/__init__.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from arclet.alconna import Arg, ArgFlag
|
||||
from nonebot_plugin_alconna import Args, At, Subcommand
|
||||
|
||||
from ....utils.typing import Me
|
||||
from .. import command as base_command
|
||||
from .. import get_player
|
||||
|
||||
command = Subcommand(
|
||||
'record',
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 / 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
get_player,
|
||||
notice='TETR.IO 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
from . import blitz, sprint # noqa: E402
|
||||
|
||||
base_command.add(command)
|
||||
|
||||
__all__ = [
|
||||
'blitz',
|
||||
'sprint',
|
||||
]
|
||||
151
nonebot_plugin_tetris_stats/games/tetrio/record/blitz.py
Normal file
151
nonebot_plugin_tetris_stats/games/tetrio/record/blitz.py
Normal file
@@ -0,0 +1,151 @@
|
||||
from asyncio import gather
|
||||
from datetime import timedelta
|
||||
from hashlib import md5
|
||||
|
||||
from nonebot.adapters import Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import At, Option
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import get_user
|
||||
from yarl import URL
|
||||
|
||||
from ....db import query_bind_info, trigger
|
||||
from ....i18n import Lang
|
||||
from ....utils.exception import RecordNotFoundError
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.metrics import get_metrics
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.base import Avatar
|
||||
from ....utils.render.schemas.tetrio.record.base import Finesse, Max, Mini, Tspins, User
|
||||
from ....utils.render.schemas.tetrio.record.blitz import Record, Statistic
|
||||
from ....utils.screenshot import screenshot
|
||||
from ....utils.typing import Me
|
||||
from .. import alc
|
||||
from ..api.player import Player
|
||||
from ..constant import GAME_TYPE
|
||||
from . import command
|
||||
|
||||
command.add(Option('--blitz', dest='blitz'))
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:记录|record)(?i:blitz)',
|
||||
command='tstats TETR.IO record --blitz',
|
||||
humanized='io记录blitz',
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.record.blitz')
|
||||
async def _(
|
||||
event: Event,
|
||||
matcher: Matcher,
|
||||
target: At | Me,
|
||||
event_session: EventSession,
|
||||
):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='record',
|
||||
command_args=['--blitz'],
|
||||
):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
user=await get_user(
|
||||
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
|
||||
),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
player = Player(user_id=bind.game_account, trust=True)
|
||||
await (
|
||||
UniMessage.i18n(Lang.interaction.warning.unverified) + UniMessage.image(raw=await make_blitz_image(player))
|
||||
).finish()
|
||||
|
||||
|
||||
@alc.assign('TETRIO.record.blitz')
|
||||
async def _(account: Player, event_session: EventSession):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='record',
|
||||
command_args=['--blitz'],
|
||||
):
|
||||
await UniMessage.image(raw=await make_blitz_image(account)).finish()
|
||||
|
||||
|
||||
async def make_blitz_image(player: Player) -> bytes:
|
||||
user, blitz = await gather(player.user, player.blitz)
|
||||
if blitz.data.record is None:
|
||||
msg = f'未找到用户 {user.name.upper()} 的 Blitz 记录'
|
||||
raise RecordNotFoundError(msg)
|
||||
stats = blitz.data.record.results.stats
|
||||
clears = stats.clears
|
||||
duration = timedelta(milliseconds=stats.finaltime).total_seconds()
|
||||
metrics = get_metrics(pps=stats.piecesplaced / duration)
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
page=await render(
|
||||
'v2/tetrio/record/blitz',
|
||||
Record(
|
||||
type='best',
|
||||
user=User(
|
||||
id=user.ID,
|
||||
name=user.name.upper(),
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||
)
|
||||
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
||||
else Avatar(
|
||||
type='identicon',
|
||||
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||
),
|
||||
),
|
||||
replay_id=blitz.data.record.replayid,
|
||||
rank=blitz.data.rank,
|
||||
personal_rank=1,
|
||||
statistic=Statistic(
|
||||
keys=stats.inputs,
|
||||
kpp=round(stats.inputs / stats.piecesplaced, 2),
|
||||
kps=round(stats.inputs / duration, 2),
|
||||
max=Max(
|
||||
combo=max((0, stats.topcombo - 1)),
|
||||
btb=max((0, stats.topbtb - 1)),
|
||||
),
|
||||
pieces=stats.piecesplaced,
|
||||
pps=metrics.pps,
|
||||
lines=stats.lines,
|
||||
lpm=metrics.lpm,
|
||||
holds=stats.holds,
|
||||
score=stats.score,
|
||||
spp=round(stats.score / stats.piecesplaced, 2),
|
||||
single=clears.singles,
|
||||
double=clears.doubles,
|
||||
triple=clears.triples,
|
||||
quad=clears.quads,
|
||||
tspins=Tspins(
|
||||
total=clears.realtspins,
|
||||
single=clears.tspinsingles,
|
||||
double=clears.tspindoubles,
|
||||
triple=clears.tspintriples,
|
||||
mini=Mini(
|
||||
total=clears.minitspins,
|
||||
single=clears.minitspinsingles,
|
||||
double=clears.minitspindoubles,
|
||||
),
|
||||
),
|
||||
all_clear=clears.allclear,
|
||||
finesse=Finesse(
|
||||
faults=stats.finesse.faults,
|
||||
accuracy=round(stats.finesse.perfectpieces / stats.piecesplaced * 100, 2),
|
||||
),
|
||||
level=stats.level,
|
||||
),
|
||||
play_at=blitz.data.record.ts,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||
151
nonebot_plugin_tetris_stats/games/tetrio/record/sprint.py
Normal file
151
nonebot_plugin_tetris_stats/games/tetrio/record/sprint.py
Normal file
@@ -0,0 +1,151 @@
|
||||
from asyncio import gather
|
||||
from datetime import timedelta
|
||||
from hashlib import md5
|
||||
|
||||
from nonebot.adapters import Event
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot_plugin_alconna import At, Option
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import get_user
|
||||
from yarl import URL
|
||||
|
||||
from ....db import query_bind_info, trigger
|
||||
from ....i18n import Lang
|
||||
from ....utils.exception import RecordNotFoundError
|
||||
from ....utils.host import HostPage, get_self_netloc
|
||||
from ....utils.metrics import get_metrics
|
||||
from ....utils.render import render
|
||||
from ....utils.render.schemas.base import Avatar
|
||||
from ....utils.render.schemas.tetrio.record.base import Finesse, Max, Mini, Statistic, Tspins, User
|
||||
from ....utils.render.schemas.tetrio.record.sprint import Record
|
||||
from ....utils.screenshot import screenshot
|
||||
from ....utils.typing import Me
|
||||
from .. import alc
|
||||
from ..api.player import Player
|
||||
from ..constant import GAME_TYPE
|
||||
from . import command
|
||||
|
||||
command.add(Option('--40l', dest='sprint'))
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:记录|record)(?i:40l)',
|
||||
command='tstats TETR.IO record --40l',
|
||||
humanized='io记录40l',
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.record.sprint')
|
||||
async def _(
|
||||
event: Event,
|
||||
matcher: Matcher,
|
||||
target: At | Me,
|
||||
event_session: EventSession,
|
||||
):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='record',
|
||||
command_args=['--40l'],
|
||||
):
|
||||
async with get_session() as session:
|
||||
bind = await query_bind_info(
|
||||
session=session,
|
||||
user=await get_user(
|
||||
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
|
||||
),
|
||||
game_platform=GAME_TYPE,
|
||||
)
|
||||
if bind is None:
|
||||
await matcher.finish('未查询到绑定信息')
|
||||
player = Player(user_id=bind.game_account, trust=True)
|
||||
await (
|
||||
UniMessage.i18n(Lang.interaction.warning.unverified) + UniMessage.image(raw=await make_sprint_image(player))
|
||||
).finish()
|
||||
|
||||
|
||||
@alc.assign('TETRIO.record.sprint')
|
||||
async def _(account: Player, event_session: EventSession):
|
||||
async with trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='record',
|
||||
command_args=['--40l'],
|
||||
):
|
||||
await UniMessage.image(raw=await make_sprint_image(account)).finish()
|
||||
|
||||
|
||||
async def make_sprint_image(player: Player) -> bytes:
|
||||
user, sprint = await gather(player.user, player.sprint)
|
||||
if sprint.data.record is None:
|
||||
msg = f'未找到用户 {user.name.upper()} 的 40L 记录'
|
||||
raise RecordNotFoundError(msg)
|
||||
stats = sprint.data.record.results.stats
|
||||
clears = stats.clears
|
||||
duration = timedelta(milliseconds=stats.finaltime).total_seconds()
|
||||
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
||||
metrics = get_metrics(pps=stats.piecesplaced / duration)
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
page=await render(
|
||||
'v2/tetrio/record/40l',
|
||||
Record(
|
||||
type='best',
|
||||
user=User(
|
||||
id=user.ID,
|
||||
name=user.name.upper(),
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||
)
|
||||
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
||||
else Avatar(
|
||||
type='identicon',
|
||||
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||
),
|
||||
),
|
||||
time=sprint_value,
|
||||
replay_id=sprint.data.record.replayid,
|
||||
rank=sprint.data.rank,
|
||||
personal_rank=1,
|
||||
statistic=Statistic(
|
||||
keys=stats.inputs,
|
||||
kpp=round(stats.inputs / stats.piecesplaced, 2),
|
||||
kps=round(stats.inputs / duration, 2),
|
||||
max=Max(
|
||||
combo=max((0, stats.topcombo - 1)),
|
||||
btb=max((0, stats.topbtb - 1)),
|
||||
),
|
||||
pieces=stats.piecesplaced,
|
||||
pps=metrics.pps,
|
||||
lines=stats.lines,
|
||||
lpm=metrics.lpm,
|
||||
holds=stats.holds,
|
||||
score=stats.score,
|
||||
single=clears.singles,
|
||||
double=clears.doubles,
|
||||
triple=clears.triples,
|
||||
quad=clears.quads,
|
||||
tspins=Tspins(
|
||||
total=clears.realtspins,
|
||||
single=clears.tspinsingles,
|
||||
double=clears.tspindoubles,
|
||||
triple=clears.tspintriples,
|
||||
mini=Mini(
|
||||
total=clears.minitspins,
|
||||
single=clears.minitspinsingles,
|
||||
double=clears.minitspindoubles,
|
||||
),
|
||||
),
|
||||
all_clear=clears.allclear,
|
||||
finesse=Finesse(
|
||||
faults=stats.finesse.faults,
|
||||
accuracy=round(stats.finesse.perfectpieces / stats.piecesplaced * 100, 2),
|
||||
),
|
||||
),
|
||||
play_at=sprint.data.record.ts,
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||
3
nonebot_plugin_tetris_stats/games/tetrio/typing.py
Normal file
3
nonebot_plugin_tetris_stats/games/tetrio/typing.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from typing import Literal
|
||||
|
||||
Template = Literal['v1', 'v2']
|
||||
75
nonebot_plugin_tetris_stats/games/tetrio/unbind.py
Normal file
75
nonebot_plugin_tetris_stats/games/tetrio/unbind.py
Normal file
@@ -0,0 +1,75 @@
|
||||
from hashlib import md5
|
||||
|
||||
from nonebot_plugin_alconna import Subcommand
|
||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||
from nonebot_plugin_orm import get_session
|
||||
from nonebot_plugin_session import EventSession
|
||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||
from nonebot_plugin_user import User
|
||||
from nonebot_plugin_userinfo import BotUserInfo, UserInfo
|
||||
from nonebot_plugin_waiter import suggest # type: ignore[import-untyped]
|
||||
from yarl import URL
|
||||
|
||||
from ...db import query_bind_info, remove_bind, trigger
|
||||
from ...utils.host import HostPage, get_self_netloc
|
||||
from ...utils.image import get_avatar
|
||||
from ...utils.render import Bind, render
|
||||
from ...utils.render.schemas.base import Avatar, People
|
||||
from ...utils.screenshot import screenshot
|
||||
from . import alc, command
|
||||
from .api import Player
|
||||
from .constant import GAME_TYPE
|
||||
|
||||
command.add(Subcommand('unbind', help_text='解除绑定 TETR.IO 账号'))
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:io)(?i:解除绑定|解绑|unbind)',
|
||||
command='tstats TETR.IO unbind',
|
||||
humanized='io解绑',
|
||||
)
|
||||
|
||||
|
||||
@alc.assign('TETRIO.unbind')
|
||||
async def _(nb_user: User, event_session: EventSession, bot_info: UserInfo = BotUserInfo()): # noqa: B008
|
||||
async with (
|
||||
trigger(
|
||||
session_persist_id=await get_session_persist_id(event_session),
|
||||
game_platform=GAME_TYPE,
|
||||
command_type='unbind',
|
||||
command_args=[],
|
||||
),
|
||||
get_session() as session,
|
||||
):
|
||||
if (bind := await query_bind_info(session=session, user=nb_user, game_platform=GAME_TYPE)) is None:
|
||||
await UniMessage('您还未绑定 TETR.IO 账号').finish()
|
||||
resp = await suggest('您确定要解绑吗?', ['是', '否'])
|
||||
if resp is None or resp.extract_plain_text() == '否':
|
||||
return
|
||||
player = Player(user_id=bind.game_account, trust=True)
|
||||
user = await player.user
|
||||
netloc = get_self_netloc()
|
||||
async with HostPage(
|
||||
await render(
|
||||
'v1/binding',
|
||||
Bind(
|
||||
platform='TETR.IO',
|
||||
status='unlink',
|
||||
user=People(
|
||||
avatar=str(
|
||||
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}')
|
||||
% {'revision': avatar_revision}
|
||||
)
|
||||
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
||||
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
|
||||
name=user.name.upper(),
|
||||
),
|
||||
bot=People(
|
||||
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||
name=bot_info.user_name,
|
||||
),
|
||||
command='io绑定{游戏ID}',
|
||||
),
|
||||
)
|
||||
) as page_hash:
|
||||
await UniMessage.image(raw=await screenshot(f'http://{netloc}/host/{page_hash}.html')).send()
|
||||
await remove_bind(session=session, user=nb_user, game_platform=GAME_TYPE)
|
||||
76
nonebot_plugin_tetris_stats/games/top/__init__.py
Normal file
76
nonebot_plugin_tetris_stats/games/top/__init__.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from arclet.alconna import Arg, ArgFlag
|
||||
from nonebot_plugin_alconna import Args, At, Subcommand
|
||||
|
||||
from ...utils.exception import MessageFormatError
|
||||
from ...utils.typing import Me
|
||||
from .. import add_block_handlers, alc, command
|
||||
from .api import Player
|
||||
from .constant import USER_NAME
|
||||
|
||||
|
||||
def get_player(name: str) -> Player | MessageFormatError:
|
||||
if USER_NAME.match(name):
|
||||
return Player(user_name=name, trust=True)
|
||||
return MessageFormatError('用户名/ID不合法')
|
||||
|
||||
|
||||
command.add(
|
||||
Subcommand(
|
||||
'TOP',
|
||||
Subcommand(
|
||||
'bind',
|
||||
Args(
|
||||
Arg(
|
||||
'account',
|
||||
get_player,
|
||||
notice='TOP 用户名 / ID',
|
||||
flags=[ArgFlag.HIDDEN],
|
||||
)
|
||||
),
|
||||
help_text='绑定 TOP 账号',
|
||||
),
|
||||
Subcommand(
|
||||
'unbind',
|
||||
help_text='解除绑定 TOP 账号',
|
||||
),
|
||||
Subcommand(
|
||||
'query',
|
||||
Args(
|
||||
Arg(
|
||||
'target',
|
||||
At | Me,
|
||||
notice='@想要查询的人 / 自己',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
Arg(
|
||||
'account',
|
||||
get_player,
|
||||
notice='TOP 用户名',
|
||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||
),
|
||||
),
|
||||
help_text='查询 TOP 游戏信息',
|
||||
),
|
||||
help_text='TOP 游戏相关指令',
|
||||
)
|
||||
)
|
||||
|
||||
alc.shortcut(
|
||||
'(?i:top)(?i:绑定|绑|bind)',
|
||||
command='tstats TOP bind',
|
||||
humanized='top绑定',
|
||||
)
|
||||
alc.shortcut(
|
||||
'(?i:top)(?i:解除绑定|解绑|unbind)',
|
||||
command='tstats TOP unbind',
|
||||
humanized='top解绑',
|
||||
)
|
||||
alc.shortcut(
|
||||
'(?i:top)(?i:查询|查|query|stats)',
|
||||
command='tstats TOP query',
|
||||
humanized='top查',
|
||||
)
|
||||
|
||||
add_block_handlers(alc.assign('TOP.query'))
|
||||
|
||||
from . import bind, query, unbind # noqa: E402, F401
|
||||
3
nonebot_plugin_tetris_stats/games/top/api/__init__.py
Normal file
3
nonebot_plugin_tetris_stats/games/top/api/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .player import Player
|
||||
|
||||
__all__ = ['Player']
|
||||
17
nonebot_plugin_tetris_stats/games/top/api/models.py
Normal file
17
nonebot_plugin_tetris_stats/games/top/api/models.py
Normal file
@@ -0,0 +1,17 @@
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from nonebot_plugin_orm import Model
|
||||
from sqlalchemy import DateTime, String
|
||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||
|
||||
from ....db.models import PydanticType
|
||||
from .schemas.user_profile import UserProfile
|
||||
|
||||
|
||||
class TOPHistoricalData(MappedAsDataclass, Model):
|
||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
|
||||
api_type: Mapped[Literal['User Profile']] = mapped_column(String(16), index=True)
|
||||
data: Mapped[UserProfile] = mapped_column(PydanticType(get_model=[], models={UserProfile}))
|
||||
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user