mirror of
https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
synced 2026-03-05 05:36:54 +08:00
Compare commits
149 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 08a1a427b4 | |||
|
|
d4e91c8521 | ||
| dbde1181ce | |||
|
|
86fe4f0766 | ||
|
|
381f2505d6 | ||
| b3a77f5296 | |||
| 274f30f82a | |||
| efb1ddb260 | |||
| 7e3f49bc9e | |||
| 665772ed66 | |||
|
|
44fda8a19e | ||
|
|
6921bf4e37 | ||
|
|
c3c97c1c8b | ||
|
|
1d33872c9b | ||
|
|
b2d5a1e729 | ||
|
|
a0fd9eaed3 | ||
|
|
593723aa76 | ||
|
|
73d97d8458 | ||
|
|
d6f11655c1 | ||
|
|
376e85e36e | ||
|
|
45116a1418 | ||
|
|
a42d3e3837 | ||
|
|
b333c54c7d | ||
|
|
8840402d2f | ||
|
|
e8b64b23f5 | ||
|
|
40762a3180 | ||
|
|
a2c6ad8328 | ||
|
|
c7d93069ef | ||
| 4b514df2db | |||
| 47c83be1b5 | |||
| 6c0e092f51 | |||
| 04b9cd9eae | |||
|
|
61b5fcb137 | ||
| c0540769c8 | |||
|
|
0e19943046 | ||
|
|
7e1d2e8cb0 | ||
|
|
8931cfb5a7 | ||
|
|
ea8a18c1b1 | ||
| ef1acb0f16 | |||
|
|
f7bb667254 | ||
|
|
fa94c1beeb | ||
|
|
4e1e91a977 | ||
|
|
0f6a00819b | ||
|
|
b56385b412 | ||
|
|
7eea235f52 | ||
|
|
8a06b572ed | ||
| 6867245be3 | |||
|
|
eebff0a8ad | ||
| 74eef41506 | |||
| 5eb4771259 | |||
| 7a3a4d936d | |||
|
|
03ca7c4486 | ||
|
|
b043d1da59 | ||
|
|
c9659201b1 | ||
|
|
617d3ec658 | ||
|
|
57a1992675 | ||
|
|
8d1d2f329e | ||
|
|
fa6cbd5c6d | ||
|
|
9f0f0b87f4 | ||
|
|
96c298b1b8 | ||
|
|
df5ced235d | ||
|
|
af83c7a2d9 | ||
|
|
bc41a91034 | ||
|
|
d97291d1bc | ||
| 5b56de9de1 | |||
| 0898a81331 | |||
|
|
d464059c0a | ||
|
|
6ea8b9328c | ||
|
|
773ff5545c | ||
|
|
94710b938b | ||
| ec09bb734d | |||
|
|
9e9a642847 | ||
|
|
04e0b14e72 | ||
|
|
20ce9c64be | ||
|
|
8af07bf031 | ||
|
|
3a904f67ad | ||
|
|
fc9b751ac4 | ||
| cb4c6b96f0 | |||
|
|
25c3777c0f | ||
|
|
193fd1da2a | ||
|
|
2cd609dd40 | ||
| a206098805 | |||
|
|
d493ba5f0d | ||
|
|
581d1f9674 | ||
|
|
01c99e8a8c | ||
|
|
eb3f4bea04 | ||
|
|
ebbbd68b05 | ||
|
|
10e0eb815e | ||
|
|
a57b04e181 | ||
|
|
cc2e71f1a5 | ||
|
|
3384263bb2 | ||
|
|
68f210dc4f | ||
|
|
00a85fe3e9 | ||
|
|
a10a7584ae | ||
|
|
95aac5e321 | ||
| 89d8c938e2 | |||
| 84db42f1ce | |||
|
|
0a660922bb | ||
|
|
56bc98cc79 | ||
|
|
be61683b51 | ||
| ccd5706a95 | |||
| b69240caa5 | |||
| 49d00f4d0e | |||
| 389a850025 | |||
| 20dcc2bc3d | |||
| 606dddbca2 | |||
| f509b03cd0 | |||
| 6293d088db | |||
| 97e2abed78 | |||
|
|
5ea3fcb234 | ||
|
|
ca33ba1310 | ||
| 3629a2ff4a | |||
| a2108c9776 | |||
| 7133cd9384 | |||
|
|
406bc7674e | ||
|
|
259b38fda5 | ||
|
|
414345ae5c | ||
|
|
341cbd86cd | ||
|
|
bf7804738e | ||
|
|
553f373671 | ||
| e53e164a52 | |||
| 2cd7d89c3e | |||
| b8b6d5f6c8 | |||
| 7a44c0dca5 | |||
| 4155d8eb42 | |||
| 4cc942d226 | |||
| 996dd565d8 | |||
| 5b0660e45b | |||
| 8d1ebc06d1 | |||
| c57aa48048 | |||
| ad90562fdf | |||
| cbc96fc09e | |||
| 8e10cfe0d0 | |||
| d192f0506d | |||
| 44aed656b8 | |||
| feb662b980 | |||
| ed6eb9a5cf | |||
| 25e281a4c5 | |||
| a2d69b9113 | |||
| c8907a47a4 | |||
| 9fb176b4bc | |||
|
|
53740265b6 | ||
|
|
e6119074ce | ||
| f7a2e89274 | |||
| 3fe5a19c4a | |||
| d35469cdef | |||
| 0cbae117aa | |||
| 25dc57d911 | |||
| 6042417b65 |
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
# To get started with Dependabot version updates, you'll need to specify which
|
|
||||||
# package ecosystems to update and where the package manifests are located.
|
|
||||||
# Please see the documentation for all configuration options:
|
|
||||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
|
||||||
|
|
||||||
version: 2
|
|
||||||
updates:
|
|
||||||
- package-ecosystem: "pip" # See documentation for possible values
|
|
||||||
directory: "/" # Location of package manifests
|
|
||||||
target-branch: "main"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
25
.github/workflows/Release.yml
vendored
25
.github/workflows/Release.yml
vendored
@@ -12,24 +12,25 @@ jobs:
|
|||||||
id-token: write
|
id-token: write
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install poetry
|
- uses: astral-sh/setup-uv@v3
|
||||||
run: pipx install poetry
|
name: Setup UV
|
||||||
shell: bash
|
|
||||||
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: '3.11'
|
enable-cache: true
|
||||||
cache: "poetry"
|
|
||||||
|
|
||||||
- run: poetry install
|
- name: "Set up Python"
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version-file: ".python-version"
|
||||||
|
|
||||||
|
- run: uv sync
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Get Version
|
- name: Get Version
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
echo "VERSION=$(poetry version -s)" >> $GITHUB_OUTPUT
|
echo "VERSION=$(uvx pdm show --version)" >> $GITHUB_OUTPUT
|
||||||
echo "TAG_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
echo "TAG_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
@@ -38,10 +39,10 @@ jobs:
|
|||||||
run: exit 1
|
run: exit 1
|
||||||
|
|
||||||
- name: Build Package
|
- name: Build Package
|
||||||
run: poetry build
|
run: uv build
|
||||||
|
|
||||||
- name: Publish Package to PyPI
|
- name: Publish Package to PyPI
|
||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
run: uv publish
|
||||||
|
|
||||||
- name: Publish Package to GitHub Release
|
- name: Publish Package to GitHub Release
|
||||||
run: gh release create ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl -t "🔖 ${{ steps.version.outputs.TAG_NAME }}" --generate-notes
|
run: gh release create ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl -t "🔖 ${{ steps.version.outputs.TAG_NAME }}" --generate-notes
|
||||||
|
|||||||
42
.github/workflows/TypeCheck.yml
vendored
42
.github/workflows/TypeCheck.yml
vendored
@@ -1,27 +1,33 @@
|
|||||||
name: TypeCheck
|
name: TypeCheck
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
Mypy:
|
TypeCheck:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install poetry
|
- uses: astral-sh/setup-uv@v3
|
||||||
run: pipx install poetry
|
name: Setup UV
|
||||||
shell: bash
|
with:
|
||||||
|
enable-cache: true
|
||||||
|
|
||||||
- uses: actions/setup-python@v5
|
- name: "Set up Python"
|
||||||
with:
|
uses: actions/setup-python@v5
|
||||||
python-version: '3.10'
|
with:
|
||||||
cache: 'poetry'
|
python-version-file: ".python-version"
|
||||||
|
|
||||||
- run: poetry install
|
- run: uv sync
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Run Mypy
|
- name: Run Mypy
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
poetry run mypy ./nonebot_plugin_tetris_stats
|
uv run mypy ./nonebot_plugin_tetris_stats
|
||||||
|
|
||||||
|
- name: Run BasedPyright
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
uv run basedpyright ./nonebot_plugin_tetris_stats/
|
||||||
|
|||||||
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
@@ -38,11 +38,11 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
@@ -69,4 +69,4 @@ jobs:
|
|||||||
# ./location_of_script_within_repo/buildscript.sh
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
|
|||||||
@@ -7,13 +7,13 @@ ci:
|
|||||||
autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks'
|
autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks'
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.5.7
|
rev: v0.7.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
stages: [commit]
|
stages: [pre-commit]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
stages: [commit]
|
stages: [pre-commit]
|
||||||
|
|
||||||
- repo: https://github.com/nonebot/nonemoji
|
- repo: https://github.com/nonebot/nonemoji
|
||||||
rev: v0.1.4
|
rev: v0.1.4
|
||||||
|
|||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.10
|
||||||
58
CONTRIBUTING.en-US.md
Normal file
58
CONTRIBUTING.en-US.md
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# How to Contribute?
|
||||||
|
|
||||||
|
## Setting Up the Environment
|
||||||
|
|
||||||
|
### For Developers with Basic Python Knowledge
|
||||||
|
|
||||||
|
First, you need install [uv](https://docs.astral.sh/uv/).
|
||||||
|
Then:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Set up the basic Python environment
|
||||||
|
uv python install 3.10
|
||||||
|
|
||||||
|
# Clone the repository
|
||||||
|
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
|
||||||
|
cd nonebot-plugin-tetris-stats
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
uv sync
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Code Development
|
||||||
|
|
||||||
|
1. For static code analysis, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff check ./nonebot_plugin_tetris_stats/` to check the code.
|
||||||
|
2. For code formatting, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff format ./nonebot_plugin_tetris_stats/` to format the code.
|
||||||
|
3. For type checking, use both [basedpyright](https://docs.basedpyright.com/latest/) and [mypy](https://www.mypy-lang.org/). You can install the corresponding plugins for your IDE or use the following commands in the terminal to check the code:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# basedpyright
|
||||||
|
basedpyright ./nonebot_plugin_tetris_stats/
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
mypy ./nonebot_plugin_tetris_stats/
|
||||||
|
```
|
||||||
|
|
||||||
|
### Internationalization
|
||||||
|
|
||||||
|
This project uses [Tarina](https://github.com/ArcletProject/Tarina) for internationalization support.
|
||||||
|
|
||||||
|
#### Adding a New Language
|
||||||
|
|
||||||
|
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||||
|
2. Run `tarina-lang create {language_code}` * Please note that the language code should preferably follow the [IETF language tag](https://en.wikipedia.org/wiki/IETF_language_tag) standard.
|
||||||
|
3. Edit the generated `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
|
||||||
|
|
||||||
|
#### Updating an Existing Language
|
||||||
|
|
||||||
|
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||||
|
2. Edit the corresponding `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
|
||||||
|
|
||||||
|
#### Adding New Entries
|
||||||
|
|
||||||
|
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
|
||||||
|
2. Edit the `.template.json` file.
|
||||||
|
3. Run `tarina-lang schema && tarina-lang model`.
|
||||||
|
4. Modify the language files, adding new entries at least to `en-US.json`.
|
||||||
57
CONTRIBUTING.md
Normal file
57
CONTRIBUTING.md
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
# 我该如何参与开发?
|
||||||
|
|
||||||
|
## 配置环境
|
||||||
|
|
||||||
|
首先你需要安装 [uv](https://docs.astral.sh/uv/)。
|
||||||
|
然后:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 配置基础 Python 环境
|
||||||
|
uv python install 3.10
|
||||||
|
|
||||||
|
# 克隆仓库
|
||||||
|
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
|
||||||
|
cd nonebot-plugin-tetris-stats
|
||||||
|
|
||||||
|
# 安装依赖
|
||||||
|
uv sync
|
||||||
|
```
|
||||||
|
|
||||||
|
## 开发
|
||||||
|
|
||||||
|
### 代码开发
|
||||||
|
|
||||||
|
1. 代码静态检查使用 [ruff](https://docs.astral.sh/ruff/),你可以为你的ide安装对应插件来使用,也可以在命令行使用`ruff check ./nonebot_plugin_tetris_stats/`来检查代码。
|
||||||
|
2. 代码格式化使用 [ruff](https://docs.astral.sh/ruff/),你可以为你的ide安装对应插件来使用,也可以在命令行使用`ruff format ./nonebot_plugin_tetris_stats/`来格式化代码。
|
||||||
|
3. 类型检查同时使用 [basedpyright](https://docs.basedpyright.com/latest/) 和 [mypy](https://www.mypy-lang.org/),你可以为你的ide安装对应插件来使用。
|
||||||
|
也可以在命令行使用下面的命令来检查代码:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# basedpyright
|
||||||
|
basedpyright ./nonebot_plugin_tetris_stats/
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
mypy ./nonebot_plugin_tetris_stats/
|
||||||
|
```
|
||||||
|
|
||||||
|
### 国际化
|
||||||
|
|
||||||
|
本项目使用 [Tarina](https://github.com/ArcletProject/Tarina) 提供国际化支持。
|
||||||
|
|
||||||
|
#### 添加新的语言
|
||||||
|
|
||||||
|
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||||
|
2. 运行 `tarina-lang create {语言代码}` * 请注意,语言代码最好符合 [IETF语言标签](https://zh.wikipedia.org/wiki/IETF%E8%AF%AD%E8%A8%80%E6%A0%87%E7%AD%BE) 的规范。
|
||||||
|
3. 编辑生成的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
|
||||||
|
|
||||||
|
#### 更新已有语言
|
||||||
|
|
||||||
|
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||||
|
2. 编辑对应的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
|
||||||
|
|
||||||
|
#### 添加新的条目
|
||||||
|
|
||||||
|
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
|
||||||
|
2. 编辑 `.template.json` 文件。
|
||||||
|
3. 运行 `tarina-lang schema && tarina-lang model`。
|
||||||
|
4. 修改语言文件,至少为`en-US.json`添加新的条目。
|
||||||
@@ -1,13 +1,27 @@
|
|||||||
from pathlib import Path
|
from nonebot import get_plugin_config
|
||||||
|
from nonebot_plugin_localstore import get_plugin_cache_dir, get_plugin_data_dir
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from nonebot_plugin_localstore import get_cache_dir
|
CACHE_PATH = get_plugin_cache_dir()
|
||||||
from pydantic import BaseModel
|
DATA_PATH = get_plugin_data_dir()
|
||||||
|
|
||||||
CACHE_PATH: Path = get_cache_dir('nonebot_plugin_tetris_stats')
|
|
||||||
|
class Proxy(BaseModel):
|
||||||
|
main: str | None = None
|
||||||
|
github: str | None = None
|
||||||
|
tetrio: str | None = None
|
||||||
|
tos: str | None = None
|
||||||
|
top: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class ScopedConfig(BaseModel):
|
||||||
|
request_timeout: float = 30.0
|
||||||
|
screenshot_quality: float = 2
|
||||||
|
proxy: Proxy = Field(default_factory=Proxy)
|
||||||
|
|
||||||
|
|
||||||
class Config(BaseModel):
|
class Config(BaseModel):
|
||||||
"""配置类"""
|
tetris: ScopedConfig = Field(default_factory=ScopedConfig)
|
||||||
|
|
||||||
tetris_req_timeout: float = 30.0
|
|
||||||
tetris_screenshot_quality: float = 2
|
config = get_plugin_config(Config)
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ from sqlalchemy import desc, select
|
|||||||
from sqlalchemy.dialects import sqlite
|
from sqlalchemy.dialects import sqlite
|
||||||
from sqlalchemy.ext.automap import automap_base
|
from sqlalchemy.ext.automap import automap_base
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from ujson import dumps, loads
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
@@ -30,7 +29,9 @@ branch_labels: str | Sequence[str] | None = None
|
|||||||
depends_on: str | Sequence[str] | None = None
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
def migrate_old_data() -> None:
|
def migrate_old_data() -> None: # noqa: C901
|
||||||
|
from json import dumps, loads
|
||||||
|
|
||||||
Base = automap_base() # noqa: N806
|
Base = automap_base() # noqa: N806
|
||||||
Base.prepare(autoload_with=op.get_bind())
|
Base.prepare(autoload_with=op.get_bind())
|
||||||
OldHistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
OldHistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
@@ -46,6 +47,9 @@ def migrate_old_data() -> None:
|
|||||||
TimeRemainingColumn(),
|
TimeRemainingColumn(),
|
||||||
) as progress,
|
) as progress,
|
||||||
):
|
):
|
||||||
|
if session.query(OldHistoricalData).count() == 0:
|
||||||
|
logger.info('空表, 跳过')
|
||||||
|
return
|
||||||
task_id = progress.add_task('[cyan]Migrating:', total=session.query(OldHistoricalData).count())
|
task_id = progress.add_task('[cyan]Migrating:', total=session.query(OldHistoricalData).count())
|
||||||
pointer = 0
|
pointer = 0
|
||||||
while pointer < session.query(OldHistoricalData).order_by(desc(OldHistoricalData.id)).limit(1).one().id:
|
while pointer < session.query(OldHistoricalData).order_by(desc(OldHistoricalData.id)).limit(1).one().id:
|
||||||
|
|||||||
@@ -0,0 +1,119 @@
|
|||||||
|
"""add TETRIOLeagueStats
|
||||||
|
|
||||||
|
迁移 ID: 5a1b93948494
|
||||||
|
父迁移: cfeab6961dce
|
||||||
|
创建时间: 2024-08-24 00:22:41.359500
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '5a1b93948494'
|
||||||
|
down_revision: str | Sequence[str] | None = 'cfeab6961dce'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_tetrioleaguestats',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestats')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'), ['update_time'], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_tetrioleaguehistorical',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('request_id', sa.Uuid(), nullable=False),
|
||||||
|
sa.Column('data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('stats_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['stats_id'],
|
||||||
|
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
|
||||||
|
name=op.f(
|
||||||
|
'fk_nonebot_plugin_tetris_stats_tetrioleaguehistorical_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguehistorical')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'), ['request_id'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'),
|
||||||
|
['update_time'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_tetrioleaguestatsfield',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('rank', sa.String(length=2), nullable=False),
|
||||||
|
sa.Column('tr_line', sa.Float(), nullable=False),
|
||||||
|
sa.Column('player_count', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('low_pps', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('low_apm', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('low_vs', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('avg_pps', sa.Float(), nullable=False),
|
||||||
|
sa.Column('avg_apm', sa.Float(), nullable=False),
|
||||||
|
sa.Column('avg_vs', sa.Float(), nullable=False),
|
||||||
|
sa.Column('high_pps', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('high_apm', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('high_vs', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('stats_id', sa.Integer(), nullable=False),
|
||||||
|
sa.ForeignKeyConstraint(
|
||||||
|
['stats_id'],
|
||||||
|
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
|
||||||
|
name=op.f(
|
||||||
|
'fk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
|
||||||
|
),
|
||||||
|
),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'), ['rank'], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield')
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical')
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestats')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -13,7 +13,6 @@ from typing import TYPE_CHECKING
|
|||||||
from alembic import op
|
from alembic import op
|
||||||
from sqlalchemy.ext.automap import automap_base
|
from sqlalchemy.ext.automap import automap_base
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from ujson import dumps, loads
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
@@ -27,6 +26,7 @@ depends_on: str | Sequence[str] | None = None
|
|||||||
def upgrade(name: str = '') -> None:
|
def upgrade(name: str = '') -> None:
|
||||||
if name:
|
if name:
|
||||||
return
|
return
|
||||||
|
from json import dumps, loads
|
||||||
|
|
||||||
Base = automap_base() # noqa: N806
|
Base = automap_base() # noqa: N806
|
||||||
connection = op.get_bind()
|
connection = op.get_bind()
|
||||||
@@ -50,6 +50,7 @@ def upgrade(name: str = '') -> None:
|
|||||||
def downgrade(name: str = '') -> None:
|
def downgrade(name: str = '') -> None:
|
||||||
if name:
|
if name:
|
||||||
return
|
return
|
||||||
|
from json import dumps, loads
|
||||||
|
|
||||||
Base = automap_base() # noqa: N806
|
Base = automap_base() # noqa: N806
|
||||||
connection = op.get_bind()
|
connection = op.get_bind()
|
||||||
|
|||||||
@@ -28,12 +28,6 @@ depends_on: str | Sequence[str] | None = None
|
|||||||
def upgrade(name: str = '') -> None: # noqa: C901
|
def upgrade(name: str = '') -> None: # noqa: C901
|
||||||
if name:
|
if name:
|
||||||
return
|
return
|
||||||
from nonebot_plugin_tetris_stats.version import __version__
|
|
||||||
|
|
||||||
if __version__ != '1.0.3':
|
|
||||||
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
|
|
||||||
logger.critical(msg)
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
||||||
from pydantic import BaseModel, ValidationError
|
from pydantic import BaseModel, ValidationError
|
||||||
@@ -46,10 +40,6 @@ def upgrade(name: str = '') -> None: # noqa: C901
|
|||||||
TimeRemainingColumn,
|
TimeRemainingColumn,
|
||||||
)
|
)
|
||||||
|
|
||||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped]
|
|
||||||
BaseProcessedData,
|
|
||||||
)
|
|
||||||
|
|
||||||
Base = automap_base() # noqa: N806
|
Base = automap_base() # noqa: N806
|
||||||
Base.prepare(autoload_with=op.get_bind())
|
Base.prepare(autoload_with=op.get_bind())
|
||||||
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
@@ -62,18 +52,33 @@ def upgrade(name: str = '') -> None: # noqa: C901
|
|||||||
def model_to_json(value: BaseModel) -> str:
|
def model_to_json(value: BaseModel) -> str:
|
||||||
return value.json(by_alias=True)
|
return value.json(by_alias=True)
|
||||||
|
|
||||||
models = BaseProcessedData.__subclasses__()
|
|
||||||
|
|
||||||
def json_to_model(value: str) -> BaseModel:
|
|
||||||
for i in models:
|
|
||||||
try:
|
|
||||||
return type_validate_json(i, value)
|
|
||||||
except ValidationError: # noqa: PERF203
|
|
||||||
...
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
with Session(op.get_bind()) as session:
|
with Session(op.get_bind()) as session:
|
||||||
count = session.query(HistoricalData).count()
|
count = session.query(HistoricalData).count()
|
||||||
|
if count == 0:
|
||||||
|
logger.info('空表, 跳过')
|
||||||
|
return
|
||||||
|
|
||||||
|
from nonebot_plugin_tetris_stats.version import __version__
|
||||||
|
|
||||||
|
if __version__ != '1.0.3':
|
||||||
|
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
|
||||||
|
logger.critical(msg)
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped]
|
||||||
|
BaseProcessedData,
|
||||||
|
)
|
||||||
|
|
||||||
|
models = BaseProcessedData.__subclasses__()
|
||||||
|
|
||||||
|
def json_to_model(value: str) -> BaseModel:
|
||||||
|
for i in models:
|
||||||
|
try:
|
||||||
|
return type_validate_json(i, value)
|
||||||
|
except ValidationError: # noqa: PERF203
|
||||||
|
...
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
with Progress(
|
with Progress(
|
||||||
TextColumn('[progress.description]{task.description}'),
|
TextColumn('[progress.description]{task.description}'),
|
||||||
BarColumn(),
|
BarColumn(),
|
||||||
|
|||||||
@@ -26,12 +26,7 @@ depends_on: str | Sequence[str] | None = None
|
|||||||
def upgrade(name: str = '') -> None:
|
def upgrade(name: str = '') -> None:
|
||||||
if name:
|
if name:
|
||||||
return
|
return
|
||||||
from nonebot_plugin_tetris_stats.version import __version__
|
|
||||||
|
|
||||||
if __version__ != '1.0.4':
|
|
||||||
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
|
|
||||||
logger.critical(msg)
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
from nonebot.compat import type_validate_json
|
from nonebot.compat import type_validate_json
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
from rich.progress import (
|
from rich.progress import (
|
||||||
@@ -46,8 +41,6 @@ def upgrade(name: str = '') -> None:
|
|||||||
from sqlalchemy.ext.automap import automap_base
|
from sqlalchemy.ext.automap import automap_base
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser # type: ignore[import-untyped]
|
|
||||||
|
|
||||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
|
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
|
||||||
batch_op.create_index(
|
batch_op.create_index(
|
||||||
@@ -60,37 +53,48 @@ def upgrade(name: str = '') -> None:
|
|||||||
Base.prepare(autoload_with=connection)
|
Base.prepare(autoload_with=connection)
|
||||||
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
|
||||||
models: list[type[BaseUser]] = BaseUser.__subclasses__()
|
|
||||||
|
|
||||||
def json_to_model(value: str) -> BaseUser:
|
|
||||||
for i in models:
|
|
||||||
try:
|
|
||||||
return type_validate_json(i, value)
|
|
||||||
except ValidationError: # noqa: PERF203
|
|
||||||
...
|
|
||||||
raise ValueError
|
|
||||||
|
|
||||||
with Session(op.get_bind()) as session:
|
with Session(op.get_bind()) as session:
|
||||||
count = session.query(HistoricalData).count()
|
count = session.query(HistoricalData).count()
|
||||||
with Progress(
|
if count == 0:
|
||||||
TextColumn('[progress.description]{task.description}'),
|
logger.info('空表, 跳过')
|
||||||
BarColumn(),
|
else:
|
||||||
MofNCompleteColumn(),
|
from nonebot_plugin_tetris_stats.version import __version__
|
||||||
TaskProgressColumn(),
|
|
||||||
TimeRemainingColumn(),
|
if __version__ != '1.0.4':
|
||||||
) as progress:
|
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
|
||||||
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
logger.critical(msg)
|
||||||
for i in range(0, count, 100):
|
raise RuntimeError(msg)
|
||||||
for j in session.scalars(
|
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser # type: ignore[import-untyped]
|
||||||
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
|
||||||
):
|
models: list[type[BaseUser]] = BaseUser.__subclasses__()
|
||||||
model = json_to_model(j.game_user)
|
|
||||||
|
def json_to_model(value: str) -> BaseUser:
|
||||||
|
for i in models:
|
||||||
try:
|
try:
|
||||||
j.user_unique_identifier = model.unique_identifier
|
return type_validate_json(i, value)
|
||||||
except ValueError:
|
except ValidationError: # noqa: PERF203
|
||||||
session.delete(j)
|
...
|
||||||
progress.update(task_id, advance=1)
|
raise ValueError
|
||||||
session.commit()
|
|
||||||
|
with Progress(
|
||||||
|
TextColumn('[progress.description]{task.description}'),
|
||||||
|
BarColumn(),
|
||||||
|
MofNCompleteColumn(),
|
||||||
|
TaskProgressColumn(),
|
||||||
|
TimeRemainingColumn(),
|
||||||
|
) as progress:
|
||||||
|
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
||||||
|
for i in range(0, count, 100):
|
||||||
|
for j in session.scalars(
|
||||||
|
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
||||||
|
):
|
||||||
|
model = json_to_model(j.game_user)
|
||||||
|
try:
|
||||||
|
j.user_unique_identifier = model.unique_identifier
|
||||||
|
except ValueError:
|
||||||
|
session.delete(j)
|
||||||
|
progress.update(task_id, advance=1)
|
||||||
|
session.commit()
|
||||||
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
batch_op.alter_column('user_unique_identifier', existing_type=sa.VARCHAR(length=32), nullable=False)
|
batch_op.alter_column('user_unique_identifier', existing_type=sa.VARCHAR(length=32), nullable=False)
|
||||||
logger.success('database upgrade success')
|
logger.success('database upgrade success')
|
||||||
|
|||||||
@@ -55,12 +55,12 @@ async def create_or_update_bind(
|
|||||||
game_account=game_account,
|
game_account=game_account,
|
||||||
)
|
)
|
||||||
session.add(bind)
|
session.add(bind)
|
||||||
message = BindStatus.SUCCESS
|
status = BindStatus.SUCCESS
|
||||||
else:
|
else:
|
||||||
bind.game_account = game_account
|
bind.game_account = game_account
|
||||||
message = BindStatus.UPDATE
|
status = BindStatus.UPDATE
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return message
|
return status
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', 'TETRIOHistoricalData', 'TOPHistoricalData', 'TOSHistoricalData')
|
T = TypeVar('T', 'TETRIOHistoricalData', 'TOPHistoricalData', 'TOSHistoricalData')
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from nonebot.typing import T_Handler
|
|||||||
from nonebot_plugin_alconna import AlcMatches, Alconna, At, CommandMeta, on_alconna
|
from nonebot_plugin_alconna import AlcMatches, Alconna, At, CommandMeta, on_alconna
|
||||||
|
|
||||||
from .. import ns
|
from .. import ns
|
||||||
|
from ..i18n.model import Lang
|
||||||
from ..utils.exception import MessageFormatError, NeedCatchError
|
from ..utils.exception import MessageFormatError, NeedCatchError
|
||||||
|
|
||||||
command: Alconna = Alconna(
|
command: Alconna = Alconna(
|
||||||
@@ -30,7 +31,7 @@ def add_block_handlers(handler: Callable[[T_Handler], T_Handler]) -> None:
|
|||||||
@handler
|
@handler
|
||||||
async def _(bot: Bot, matcher: Matcher, target: At):
|
async def _(bot: Bot, matcher: Matcher, target: At):
|
||||||
if isinstance(target, At) and target.target == bot.self_id:
|
if isinstance(target, At) and target.target == bot.self_id:
|
||||||
await matcher.finish('不能查询bot的信息')
|
await matcher.finish(Lang.interaction.wrong.query_bot())
|
||||||
|
|
||||||
|
|
||||||
from . import tetrio, top, tos # noqa: F401, E402
|
from . import tetrio, top, tos # noqa: F401, E402
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
CANT_VERIFY_MESSAGE = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
|
||||||
@@ -23,7 +23,7 @@ command = Subcommand(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
from . import bind, config, query, record # noqa: E402
|
from . import bind, config, list, query, rank, record # noqa: E402
|
||||||
|
|
||||||
main_command.add(command)
|
main_command.add(command)
|
||||||
|
|
||||||
@@ -31,6 +31,8 @@ __all__ = [
|
|||||||
'alc',
|
'alc',
|
||||||
'bind',
|
'bind',
|
||||||
'config',
|
'config',
|
||||||
|
'list',
|
||||||
'query',
|
'query',
|
||||||
|
'rank',
|
||||||
'record',
|
'record',
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from .player import Player
|
from .player import Player
|
||||||
from .schemas.user import User
|
from .schemas.user import User
|
||||||
from .schemas.user_info import UserInfoSuccess
|
from .schemas.user_info import UserInfoSuccess
|
||||||
from .tetra_league import full_export as tetra_league_full_export
|
|
||||||
|
|
||||||
__all__ = ['Player', 'User', 'UserInfoSuccess', 'tetra_league_full_export']
|
__all__ = ['Player', 'User', 'UserInfoSuccess']
|
||||||
|
|||||||
@@ -1,30 +1,37 @@
|
|||||||
from asyncio import Lock
|
from asyncio import Lock
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import ClassVar
|
from typing import ClassVar
|
||||||
from weakref import WeakValueDictionary
|
from weakref import WeakValueDictionary
|
||||||
|
|
||||||
from aiocache import Cache as ACache # type: ignore[import-untyped]
|
from aiocache import Cache as ACache # type: ignore[import-untyped]
|
||||||
from nonebot.compat import type_validate_json
|
from nonebot.compat import type_validate_json
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from ....config.config import config
|
||||||
|
from ....utils.limit import limit
|
||||||
from ....utils.request import Request
|
from ....utils.request import Request
|
||||||
from .schemas.base import FailedModel, SuccessModel
|
from .schemas.base import FailedModel, SuccessModel
|
||||||
|
|
||||||
UTC = timezone.utc
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
request = Request(config.tetris.proxy.tetrio or config.tetris.proxy.main)
|
||||||
|
request.request = limit(timedelta(seconds=1))(request.request) # type: ignore[method-assign]
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
cache = ACache(ACache.MEMORY)
|
cache = ACache(ACache.MEMORY)
|
||||||
task: ClassVar[WeakValueDictionary[str, Lock]] = WeakValueDictionary()
|
task: ClassVar[WeakValueDictionary[URL, Lock]] = WeakValueDictionary()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def get(cls, url: str) -> bytes:
|
async def get(cls, url: URL, extra_headers: dict | None = None) -> bytes:
|
||||||
lock = cls.task.setdefault(url, Lock())
|
lock = cls.task.setdefault(url, Lock())
|
||||||
async with lock:
|
async with lock:
|
||||||
if (cached_data := await cls.cache.get(url)) is not None:
|
if (cached_data := await cls.cache.get(url)) is not None:
|
||||||
logger.debug(f'{url}: Cache hit!')
|
logger.debug(f'{url}: Cache hit!')
|
||||||
return cached_data
|
return cached_data
|
||||||
response_data = await Request.request(url)
|
response_data = await request.request(url, extra_headers, enable_anti_cloudflare=True)
|
||||||
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
|
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
|
||||||
if isinstance(parsed_data, SuccessModel):
|
if isinstance(parsed_data, SuccessModel):
|
||||||
await cls.cache.add(
|
await cls.cache.add(
|
||||||
|
|||||||
89
nonebot_plugin_tetris_stats/games/tetrio/api/leaderboards.py
Normal file
89
nonebot_plugin_tetris_stats/games/tetrio/api/leaderboards.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from typing import Literal, overload
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from ....utils.exception import RequestError
|
||||||
|
from ..constant import BASE_URL
|
||||||
|
from .cache import Cache
|
||||||
|
from .schemas.base import FailedModel
|
||||||
|
from .schemas.leaderboards import Parameter
|
||||||
|
from .schemas.leaderboards.by import By, BySuccessModel
|
||||||
|
from .schemas.leaderboards.solo import Solo, SoloSuccessModel
|
||||||
|
from .schemas.leaderboards.zenith import Zenith, ZenithSuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
async def by(
|
||||||
|
by_type: Literal['league', 'xp', 'ar'], parameter: Parameter, x_session_id: UUID | None = None
|
||||||
|
) -> BySuccessModel:
|
||||||
|
model: By = type_validate_json(
|
||||||
|
By, # type: ignore[arg-type]
|
||||||
|
await get(
|
||||||
|
BASE_URL / f'users/by/{by_type}',
|
||||||
|
parameter,
|
||||||
|
{'X-Session-ID': str(x_session_id)} if x_session_id is not None else None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
if isinstance(model, FailedModel):
|
||||||
|
msg = f'排行榜信息请求错误:\n{model.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
return model
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def records(
|
||||||
|
records_type: Literal['40l', 'blitz'],
|
||||||
|
scope: str = '_global',
|
||||||
|
revolution_id: str | None = None,
|
||||||
|
*,
|
||||||
|
parameter: Parameter,
|
||||||
|
) -> SoloSuccessModel: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def records(
|
||||||
|
records_type: Literal['zenith', 'zenithex'],
|
||||||
|
scope: str = '_global',
|
||||||
|
revolution_id: str | None = None,
|
||||||
|
*,
|
||||||
|
parameter: Parameter,
|
||||||
|
) -> ZenithSuccessModel: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def records(
|
||||||
|
records_type: Literal['40l', 'blitz', 'zenith', 'zenithex'],
|
||||||
|
scope: str = '_global',
|
||||||
|
revolution_id: str | None = None,
|
||||||
|
*,
|
||||||
|
parameter: Parameter,
|
||||||
|
) -> SoloSuccessModel | ZenithSuccessModel:
|
||||||
|
model: Solo | Zenith
|
||||||
|
match records_type:
|
||||||
|
case '40l' | 'blitz':
|
||||||
|
model = type_validate_json(
|
||||||
|
Solo, # type: ignore[arg-type]
|
||||||
|
await get(
|
||||||
|
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
|
||||||
|
parameter,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
case 'zenith' | 'zenithex':
|
||||||
|
model = type_validate_json(
|
||||||
|
Zenith, # type: ignore[arg-type]
|
||||||
|
await get(
|
||||||
|
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
|
||||||
|
parameter,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
case _:
|
||||||
|
msg = f'records_type: {records_type} is not supported'
|
||||||
|
raise ValueError(msg)
|
||||||
|
if isinstance(model, FailedModel):
|
||||||
|
msg = f'排行榜信息请求错误:\n{model.error}' # type: ignore[attr-defined]
|
||||||
|
raise RequestError(msg)
|
||||||
|
return model
|
||||||
|
|
||||||
|
|
||||||
|
async def get(url: URL, parameter: Parameter, extra_headers: dict | None = None) -> bytes:
|
||||||
|
return await Cache.get(url % parameter.to_params(), extra_headers)
|
||||||
@@ -7,11 +7,11 @@ from nonebot.compat import type_validate_json
|
|||||||
|
|
||||||
from ....db import anti_duplicate_add
|
from ....db import anti_duplicate_add
|
||||||
from ....utils.exception import RequestError
|
from ....utils.exception import RequestError
|
||||||
from ....utils.request import splice_url
|
|
||||||
from ..constant import BASE_URL, USER_ID, USER_NAME
|
from ..constant import BASE_URL, USER_ID, USER_NAME
|
||||||
from .cache import Cache
|
from .cache import Cache
|
||||||
from .models import TETRIOHistoricalData
|
from .models import TETRIOHistoricalData
|
||||||
from .schemas.base import FailedModel
|
from .schemas.base import FailedModel
|
||||||
|
from .schemas.labs.leagueflow import LeagueFlow, LeagueFlowSuccess
|
||||||
from .schemas.records.solo import Solo as SoloRecord
|
from .schemas.records.solo import Solo as SoloRecord
|
||||||
from .schemas.records.solo import SoloSuccessModel as RecordsSoloSuccessModel
|
from .schemas.records.solo import SoloSuccessModel as RecordsSoloSuccessModel
|
||||||
from .schemas.summaries import (
|
from .schemas.summaries import (
|
||||||
@@ -24,6 +24,7 @@ from .schemas.summaries import (
|
|||||||
SoloSuccessModel as SummariesSoloSuccessModel,
|
SoloSuccessModel as SummariesSoloSuccessModel,
|
||||||
)
|
)
|
||||||
from .schemas.summaries.base import User as SummariesUser
|
from .schemas.summaries.base import User as SummariesUser
|
||||||
|
from .schemas.summaries.league import LeagueSuccessModel
|
||||||
from .schemas.user import User
|
from .schemas.user import User
|
||||||
from .schemas.user_info import UserInfo, UserInfoSuccess
|
from .schemas.user_info import UserInfo, UserInfoSuccess
|
||||||
from .typing import Records, Summaries
|
from .typing import Records, Summaries
|
||||||
@@ -55,6 +56,7 @@ class Player:
|
|||||||
'blitz': SummariesSoloSuccessModel,
|
'blitz': SummariesSoloSuccessModel,
|
||||||
'zenith': ZenithSuccessModel,
|
'zenith': ZenithSuccessModel,
|
||||||
'zenithex': ZenithSuccessModel,
|
'zenithex': ZenithSuccessModel,
|
||||||
|
'league': LeagueSuccessModel,
|
||||||
'zen': ZenSuccessModel,
|
'zen': ZenSuccessModel,
|
||||||
'achievements': AchievementsSuccessModel,
|
'achievements': AchievementsSuccessModel,
|
||||||
}
|
}
|
||||||
@@ -83,15 +85,11 @@ class Player:
|
|||||||
self._user_info: UserInfoSuccess | None = None
|
self._user_info: UserInfoSuccess | None = None
|
||||||
self._summaries: dict[Summaries, SummariesModel] = {}
|
self._summaries: dict[Summaries, SummariesModel] = {}
|
||||||
self._records: dict[RecordKey, RecordsSoloSuccessModel] = {}
|
self._records: dict[RecordKey, RecordsSoloSuccessModel] = {}
|
||||||
|
self._leagueflow: LeagueFlowSuccess | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _request_user_parameter(self) -> str:
|
def _request_user_parameter(self) -> str:
|
||||||
if self.user_id is not None:
|
return self.user_id or cast(str, self.user_name).lower()
|
||||||
return self.user_id
|
|
||||||
if self.user_name is not None:
|
|
||||||
return self.user_name.lower()
|
|
||||||
msg = 'Invalid user'
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
async def user(self) -> User:
|
async def user(self) -> User:
|
||||||
@@ -115,7 +113,7 @@ class Player:
|
|||||||
async def get_info(self) -> UserInfoSuccess:
|
async def get_info(self) -> UserInfoSuccess:
|
||||||
"""Get User Info"""
|
"""Get User Info"""
|
||||||
if self._user_info is None:
|
if self._user_info is None:
|
||||||
raw_user_info = await Cache.get(splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}']))
|
raw_user_info = await Cache.get(BASE_URL / 'users' / self._request_user_parameter)
|
||||||
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
||||||
if isinstance(user_info, FailedModel):
|
if isinstance(user_info, FailedModel):
|
||||||
msg = f'用户信息请求错误:\n{user_info.error}'
|
msg = f'用户信息请求错误:\n{user_info.error}'
|
||||||
@@ -138,12 +136,14 @@ class Player:
|
|||||||
@overload
|
@overload
|
||||||
async def get_summaries(self, summaries_type: Literal['zen']) -> ZenSuccessModel: ...
|
async def get_summaries(self, summaries_type: Literal['zen']) -> ZenSuccessModel: ...
|
||||||
@overload
|
@overload
|
||||||
|
async def get_summaries(self, summaries_type: Literal['league']) -> LeagueSuccessModel: ...
|
||||||
|
@overload
|
||||||
async def get_summaries(self, summaries_type: Literal['achievements']) -> AchievementsSuccessModel: ...
|
async def get_summaries(self, summaries_type: Literal['achievements']) -> AchievementsSuccessModel: ...
|
||||||
|
|
||||||
async def get_summaries(self, summaries_type: Summaries) -> SummariesModel:
|
async def get_summaries(self, summaries_type: Summaries) -> SummariesModel:
|
||||||
if summaries_type not in self._summaries:
|
if summaries_type not in self._summaries:
|
||||||
raw_summaries = await Cache.get(
|
raw_summaries = await Cache.get(
|
||||||
splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}/', 'summaries/', summaries_type])
|
BASE_URL / 'users' / self._request_user_parameter / 'summaries' / summaries_type
|
||||||
)
|
)
|
||||||
summaries: SummariesModel | FailedModel = type_validate_json(
|
summaries: SummariesModel | FailedModel = type_validate_json(
|
||||||
self.__SUMMARIES_MAPPING[summaries_type] | FailedModel, # type: ignore[arg-type]
|
self.__SUMMARIES_MAPPING[summaries_type] | FailedModel, # type: ignore[arg-type]
|
||||||
@@ -163,21 +163,34 @@ class Player:
|
|||||||
)
|
)
|
||||||
return self._summaries[summaries_type]
|
return self._summaries[summaries_type]
|
||||||
|
|
||||||
|
async def get_leagueflow(self) -> LeagueFlowSuccess:
|
||||||
|
if self._leagueflow is None:
|
||||||
|
leagueflow: LeagueFlow = type_validate_json(
|
||||||
|
LeagueFlow, # type: ignore[arg-type]
|
||||||
|
await Cache.get(BASE_URL / 'labs/leagueflow' / self._request_user_parameter),
|
||||||
|
)
|
||||||
|
if isinstance(leagueflow, FailedModel):
|
||||||
|
msg = f'League 历史记录请求错误:\n{leagueflow.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
self._leagueflow = leagueflow
|
||||||
|
return self._leagueflow
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@alru_cache
|
|
||||||
async def sprint(self) -> SummariesSoloSuccessModel:
|
async def sprint(self) -> SummariesSoloSuccessModel:
|
||||||
return await self.get_summaries('40l')
|
return await self.get_summaries('40l')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@alru_cache
|
|
||||||
async def blitz(self) -> SummariesSoloSuccessModel:
|
async def blitz(self) -> SummariesSoloSuccessModel:
|
||||||
return await self.get_summaries('blitz')
|
return await self.get_summaries('blitz')
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@alru_cache
|
|
||||||
async def zen(self) -> ZenSuccessModel:
|
async def zen(self) -> ZenSuccessModel:
|
||||||
return await self.get_summaries('zen')
|
return await self.get_summaries('zen')
|
||||||
|
|
||||||
|
@property
|
||||||
|
async def league(self) -> LeagueSuccessModel:
|
||||||
|
return await self.get_summaries('league')
|
||||||
|
|
||||||
async def _get_local_summaries_user(self) -> SummariesUser | None:
|
async def _get_local_summaries_user(self) -> SummariesUser | None:
|
||||||
allow_summaries: set[Literal['40l', 'blitz', 'zenith', 'zenithex']] = {
|
allow_summaries: set[Literal['40l', 'blitz', 'zenith', 'zenithex']] = {
|
||||||
'40l',
|
'40l',
|
||||||
@@ -212,16 +225,7 @@ class Player:
|
|||||||
async def get_records(self, mode_type: RecordModeType, records_type: RecordType) -> RecordsSoloSuccessModel:
|
async def get_records(self, mode_type: RecordModeType, records_type: RecordType) -> RecordsSoloSuccessModel:
|
||||||
if (record_key := RecordKey(mode_type, records_type)) not in self._records:
|
if (record_key := RecordKey(mode_type, records_type)) not in self._records:
|
||||||
raw_records = await Cache.get(
|
raw_records = await Cache.get(
|
||||||
splice_url(
|
BASE_URL / 'users' / self._request_user_parameter / 'records' / mode_type / records_type,
|
||||||
[
|
|
||||||
BASE_URL,
|
|
||||||
'users/',
|
|
||||||
f'{self._request_user_parameter}/',
|
|
||||||
'records/',
|
|
||||||
f'{mode_type}/',
|
|
||||||
records_type,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
records: RecordsSoloSuccessModel | FailedModel = type_validate_json(SoloRecord, raw_records) # type: ignore[arg-type]
|
records: RecordsSoloSuccessModel | FailedModel = type_validate_json(SoloRecord, raw_records) # type: ignore[arg-type]
|
||||||
if isinstance(records, FailedModel):
|
if isinstance(records, FailedModel):
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ...typing import Prisecter
|
||||||
|
|
||||||
|
|
||||||
class AggregateStats(BaseModel):
|
class AggregateStats(BaseModel):
|
||||||
@@ -39,11 +41,31 @@ class Garbage(BaseModel):
|
|||||||
cleared: int
|
cleared: int
|
||||||
|
|
||||||
|
|
||||||
class P(BaseModel): # what is P
|
class P(BaseModel):
|
||||||
pri: float
|
pri: float
|
||||||
sec: float
|
sec: float
|
||||||
ter: float
|
ter: float
|
||||||
|
|
||||||
|
def to_prisecter(self) -> Prisecter:
|
||||||
|
return Prisecter(f'{self.pri}:{self.sec}:{self.ter}')
|
||||||
|
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
class ArCounts(BaseModel):
|
||||||
|
bronze: int | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
silver: int | None = Field(default=None, alias='2') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
gold: int | None = Field(default=None, alias='3') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
platinum: int | None = Field(default=None, alias='4') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
diamond: int | None = Field(default=None, alias='5') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
issued: int | None = Field(default=None, alias='100') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
top3: int | None = Field(default=None, alias='t3')
|
||||||
|
top5: int | None = Field(default=None, alias='t5')
|
||||||
|
top10: int | None = Field(default=None, alias='t10')
|
||||||
|
top25: int | None = Field(default=None, alias='t25')
|
||||||
|
top50: int | None = Field(default=None, alias='t50')
|
||||||
|
top100: int | None = Field(default=None, alias='t100')
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
class Cache(BaseModel):
|
class Cache(BaseModel):
|
||||||
status: str
|
status: str
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ class Stats(BaseModel):
|
|||||||
level_lines: int
|
level_lines: int
|
||||||
level_lines_needed: int
|
level_lines_needed: int
|
||||||
inputs: int
|
inputs: int
|
||||||
holds: int
|
holds: int = 0
|
||||||
time: Time | None = None # ?: 不知道是之后都没有了还是还会有
|
time: Time | None = None # ?: 不知道是之后都没有了还是还会有
|
||||||
score: int
|
score: int
|
||||||
zenlevel: int
|
zenlevel: int
|
||||||
|
|||||||
@@ -0,0 +1,43 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from enum import IntEnum
|
||||||
|
from typing import Literal, NamedTuple
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ..base import FailedModel
|
||||||
|
from ..base import SuccessModel as BaseSuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class Result(IntEnum):
|
||||||
|
VICTORY = 1
|
||||||
|
DEFEAT = 2
|
||||||
|
VICTORY_BY_DISQUALIFICATION = 3
|
||||||
|
DEFEAT_BY_DISQUALIFICATION = 4
|
||||||
|
TIE = 5
|
||||||
|
NO_CONTEST = 6
|
||||||
|
MATCH_NULLIFIED = 7
|
||||||
|
|
||||||
|
|
||||||
|
class Point(NamedTuple):
|
||||||
|
timestamp_offset: int
|
||||||
|
result: Result
|
||||||
|
post_match_tr: int
|
||||||
|
opponent_pre_match_tr: int
|
||||||
|
"""If the opponent was unranked, same as post_match_tr."""
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
start_time: datetime = Field(..., alias='startTime')
|
||||||
|
points: list[Point] = Field(..., min_length=1)
|
||||||
|
|
||||||
|
|
||||||
|
class Empty(BaseModel):
|
||||||
|
start_time: Literal[9007199254740991] = Field(..., alias='startTime')
|
||||||
|
points: list = Field(..., max_length=0)
|
||||||
|
|
||||||
|
|
||||||
|
class LeagueFlowSuccess(BaseSuccessModel):
|
||||||
|
data: Data | Empty
|
||||||
|
|
||||||
|
|
||||||
|
LeagueFlow = LeagueFlowSuccess | FailedModel
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from nonebot.compat import PYDANTIC_V2
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ...typing import Prisecter
|
||||||
|
|
||||||
|
|
||||||
|
class Parameter(BaseModel):
|
||||||
|
after: Prisecter | None = None
|
||||||
|
before: Prisecter | None = None
|
||||||
|
limit: int = Field(default=25, ge=1, le=100)
|
||||||
|
country: str | None = None
|
||||||
|
|
||||||
|
def to_params(self) -> dict[str, Any]:
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
return self.model_dump(exclude_defaults=True)
|
||||||
|
return self.dict(exclude_defaults=True)
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from ..base import SuccessModel
|
|
||||||
from .base import Entry as BaseEntry
|
|
||||||
|
|
||||||
|
|
||||||
class ArCounts(BaseModel):
|
|
||||||
bronze: int | None = Field(None, alias='1')
|
|
||||||
silver: int | None = Field(None, alias='2')
|
|
||||||
gold: int | None = Field(None, alias='3')
|
|
||||||
platinum: int | None = Field(None, alias='4')
|
|
||||||
diamond: int | None = Field(None, alias='5')
|
|
||||||
issued: int | None = Field(None, alias='100')
|
|
||||||
top10: int | None = Field(None, alias='t10')
|
|
||||||
|
|
||||||
|
|
||||||
class Entry(BaseEntry):
|
|
||||||
ar: int
|
|
||||||
ar_counts: ArCounts
|
|
||||||
|
|
||||||
|
|
||||||
class Data(BaseModel):
|
|
||||||
entries: list[Entry]
|
|
||||||
|
|
||||||
|
|
||||||
class ArSuccessModel(SuccessModel):
|
|
||||||
data: Data
|
|
||||||
@@ -1,30 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from ...typing import Rank
|
|
||||||
from ..base import P
|
|
||||||
|
|
||||||
|
|
||||||
class League(BaseModel):
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
rating: int
|
|
||||||
rank: Rank
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
|
|
||||||
class Entry(BaseModel):
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
role: str
|
|
||||||
xp: float
|
|
||||||
league: League
|
|
||||||
supporter: bool | None = None
|
|
||||||
verified: bool
|
|
||||||
country: str | None = None
|
|
||||||
ts: datetime
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
gametime: float
|
|
||||||
p: P
|
|
||||||
@@ -0,0 +1,50 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ...typing import Rank, ValidRank
|
||||||
|
from ..base import ArCounts, FailedModel, P, SuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class League(BaseModel):
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
tr: float
|
||||||
|
gxe: float
|
||||||
|
rank: Rank
|
||||||
|
bestrank: ValidRank
|
||||||
|
glicko: float
|
||||||
|
rd: float
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float
|
||||||
|
decaying: bool
|
||||||
|
|
||||||
|
|
||||||
|
class Entry(BaseModel):
|
||||||
|
id: str = Field(..., alias='_id')
|
||||||
|
username: str
|
||||||
|
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop']
|
||||||
|
ts: datetime | None = None
|
||||||
|
xp: float
|
||||||
|
country: str | None = None
|
||||||
|
supporter: bool | None = None
|
||||||
|
league: League
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
gametime: float
|
||||||
|
ar: int
|
||||||
|
ar_counts: ArCounts
|
||||||
|
p: P
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
entries: list[Entry]
|
||||||
|
|
||||||
|
|
||||||
|
class BySuccessModel(SuccessModel):
|
||||||
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
By = BySuccessModel | FailedModel
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ..base import SuccessModel
|
from ..base import FailedModel, SuccessModel
|
||||||
from ..summaries.solo import Record
|
from ..summaries.solo import Record
|
||||||
|
|
||||||
|
|
||||||
@@ -10,3 +10,6 @@ class Data(BaseModel):
|
|||||||
|
|
||||||
class SoloSuccessModel(SuccessModel):
|
class SoloSuccessModel(SuccessModel):
|
||||||
data: Data
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
Solo = SoloSuccessModel | FailedModel
|
||||||
|
|||||||
@@ -1,12 +0,0 @@
|
|||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from ..base import SuccessModel
|
|
||||||
from .base import Entry
|
|
||||||
|
|
||||||
|
|
||||||
class Data(BaseModel):
|
|
||||||
entries: list[Entry]
|
|
||||||
|
|
||||||
|
|
||||||
class XpSuccessModel(SuccessModel):
|
|
||||||
data: Data
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ..base import SuccessModel
|
from ..base import FailedModel, SuccessModel
|
||||||
from ..summaries.zenith import Record
|
from ..summaries.zenith import Record
|
||||||
|
|
||||||
|
|
||||||
@@ -10,3 +10,6 @@ class Data(BaseModel):
|
|||||||
|
|
||||||
class ZenithSuccessModel(SuccessModel):
|
class ZenithSuccessModel(SuccessModel):
|
||||||
data: Data
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
Zenith = ZenithSuccessModel | FailedModel
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
from .achievements import Achievements, AchievementsSuccessModel
|
from .achievements import Achievements, AchievementsSuccessModel
|
||||||
|
from .league import LeagueSuccessModel
|
||||||
from .solo import Solo, SoloSuccessModel
|
from .solo import Solo, SoloSuccessModel
|
||||||
from .zen import Zen, ZenSuccessModel
|
from .zen import Zen, ZenSuccessModel
|
||||||
from .zenith import Zenith, ZenithEx, ZenithSuccessModel
|
from .zenith import Zenith, ZenithEx, ZenithSuccessModel
|
||||||
|
|
||||||
SummariesModel = AchievementsSuccessModel | SoloSuccessModel | ZenSuccessModel | ZenithSuccessModel
|
SummariesModel = AchievementsSuccessModel | SoloSuccessModel | ZenSuccessModel | LeagueSuccessModel | ZenithSuccessModel
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'Achievements',
|
'Achievements',
|
||||||
'AchievementsSuccessModel',
|
'AchievementsSuccessModel',
|
||||||
|
'LeagueSuccessModel',
|
||||||
'Solo',
|
'Solo',
|
||||||
'SoloSuccessModel',
|
'SoloSuccessModel',
|
||||||
|
'SummariesModel',
|
||||||
'Zen',
|
'Zen',
|
||||||
'ZenSuccessModel',
|
|
||||||
'Zenith',
|
'Zenith',
|
||||||
'ZenithEx',
|
'ZenithEx',
|
||||||
'ZenithSuccessModel',
|
'ZenithSuccessModel',
|
||||||
'SummariesModel',
|
'ZenSuccessModel',
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -7,5 +7,4 @@ class User(BaseModel):
|
|||||||
avatar_revision: int | None
|
avatar_revision: int | None
|
||||||
banner_revision: int | None
|
banner_revision: int | None
|
||||||
country: str | None
|
country: str | None
|
||||||
verified: int
|
|
||||||
supporter: int
|
supporter: int
|
||||||
|
|||||||
@@ -0,0 +1,130 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from nonebot.compat import PYDANTIC_V2
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ...typing import Rank, S1Rank, S1ValidRank
|
||||||
|
from ..base import SuccessModel
|
||||||
|
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
from pydantic import field_validator
|
||||||
|
else:
|
||||||
|
from pydantic import validator
|
||||||
|
|
||||||
|
|
||||||
|
class PastInner(BaseModel):
|
||||||
|
season: str
|
||||||
|
username: str
|
||||||
|
country: str | None = None
|
||||||
|
placement: int | None = None
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
glicko: float
|
||||||
|
gxe: float
|
||||||
|
tr: float
|
||||||
|
rd: float
|
||||||
|
rank: S1Rank
|
||||||
|
bestrank: S1ValidRank
|
||||||
|
ranked: bool
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float
|
||||||
|
|
||||||
|
|
||||||
|
class Past(BaseModel):
|
||||||
|
first: PastInner | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseData(BaseModel):
|
||||||
|
decaying: bool
|
||||||
|
past: Past
|
||||||
|
|
||||||
|
|
||||||
|
class NeverPlayedData(BaseData):
|
||||||
|
gamesplayed: Literal[0]
|
||||||
|
gameswon: Literal[0]
|
||||||
|
glicko: Literal[-1]
|
||||||
|
rd: Literal[-1]
|
||||||
|
gxe: Literal[-1]
|
||||||
|
tr: Literal[-1]
|
||||||
|
rank: Literal['z']
|
||||||
|
apm: None = None
|
||||||
|
pps: None = None
|
||||||
|
vs: None = None
|
||||||
|
standing: Literal[-1]
|
||||||
|
standing_local: Literal[-1]
|
||||||
|
prev_rank: None
|
||||||
|
prev_at: Literal[-1]
|
||||||
|
next_rank: None
|
||||||
|
next_at: Literal[-1]
|
||||||
|
percentile: Literal[-1]
|
||||||
|
percentile_rank: Literal['z']
|
||||||
|
|
||||||
|
|
||||||
|
class NeverRatedData(BaseData):
|
||||||
|
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
gameswon: int
|
||||||
|
glicko: Literal[-1]
|
||||||
|
rd: Literal[-1]
|
||||||
|
gxe: Literal[-1]
|
||||||
|
tr: Literal[-1]
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float
|
||||||
|
rank: Literal['z']
|
||||||
|
standing: Literal[-1]
|
||||||
|
standing_local: Literal[-1]
|
||||||
|
prev_rank: None
|
||||||
|
prev_at: Literal[-1]
|
||||||
|
next_rank: None
|
||||||
|
next_at: Literal[-1]
|
||||||
|
percentile: Literal[-1]
|
||||||
|
percentile_rank: Literal['z']
|
||||||
|
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
|
||||||
|
@field_validator('apm', 'pps', 'vs', mode='before')
|
||||||
|
@classmethod
|
||||||
|
def _(cls, value: float | None) -> float:
|
||||||
|
if value is None:
|
||||||
|
return 0
|
||||||
|
return value
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
@validator('apm', 'pps', 'vs', pre=True, always=True)
|
||||||
|
@classmethod
|
||||||
|
def _(cls, value: float | None) -> float:
|
||||||
|
if value is None:
|
||||||
|
return 0
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class RatedData(BaseData):
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
glicko: float
|
||||||
|
rd: float
|
||||||
|
gxe: float
|
||||||
|
tr: float
|
||||||
|
rank: Rank
|
||||||
|
bestrank: Rank
|
||||||
|
standing: int
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float
|
||||||
|
standing_local: int
|
||||||
|
prev_rank: Rank | None = None
|
||||||
|
prev_at: int
|
||||||
|
next_rank: Rank | None = None
|
||||||
|
next_at: int
|
||||||
|
percentile: float
|
||||||
|
percentile_rank: str
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidData(BaseModel):
|
||||||
|
"""I don't know what osk is doing, but the return value is an empty dictionary"""
|
||||||
|
|
||||||
|
|
||||||
|
class LeagueSuccessModel(SuccessModel):
|
||||||
|
data: NeverPlayedData | NeverRatedData | RatedData | InvalidData
|
||||||
@@ -1,59 +0,0 @@
|
|||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from ..typing import Rank
|
|
||||||
from .base import FailedModel
|
|
||||||
from .base import SuccessModel as BaseSuccessModel
|
|
||||||
|
|
||||||
|
|
||||||
class _User(BaseModel):
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
role: str
|
|
||||||
xp: float
|
|
||||||
supporter: bool | None = None
|
|
||||||
verified: bool
|
|
||||||
country: str | None = None
|
|
||||||
|
|
||||||
|
|
||||||
class _League(BaseModel):
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
rating: float
|
|
||||||
rank: Rank
|
|
||||||
bestrank: Rank
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
|
|
||||||
class ValidLeague(_League):
|
|
||||||
glicko: float
|
|
||||||
rd: float
|
|
||||||
apm: float
|
|
||||||
pps: float
|
|
||||||
vs: float
|
|
||||||
|
|
||||||
|
|
||||||
class ValidUser(_User):
|
|
||||||
league: ValidLeague
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidLeague(_League):
|
|
||||||
glicko: float | None = None
|
|
||||||
rd: float | None = None
|
|
||||||
apm: float | None = None
|
|
||||||
pps: float | None = None
|
|
||||||
vs: float | None = None
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidUser(_User):
|
|
||||||
league: InvalidLeague
|
|
||||||
|
|
||||||
|
|
||||||
class Data(BaseModel):
|
|
||||||
users: list[ValidUser | InvalidUser]
|
|
||||||
|
|
||||||
|
|
||||||
class TetraLeagueSuccess(BaseSuccessModel):
|
|
||||||
data: Data
|
|
||||||
|
|
||||||
|
|
||||||
TetraLeague = TetraLeagueSuccess | FailedModel
|
|
||||||
@@ -3,7 +3,7 @@ from typing import Literal
|
|||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from .base import FailedModel
|
from .base import ArCounts, FailedModel
|
||||||
from .base import SuccessModel as BaseSuccessModel
|
from .base import SuccessModel as BaseSuccessModel
|
||||||
|
|
||||||
|
|
||||||
@@ -14,13 +14,19 @@ class Badge(BaseModel):
|
|||||||
ts: datetime | Literal[False] | None = None
|
ts: datetime | Literal[False] | None = None
|
||||||
|
|
||||||
|
|
||||||
class Discord(BaseModel):
|
class Connection(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
username: str
|
username: str
|
||||||
|
display_username: str
|
||||||
|
|
||||||
|
|
||||||
class Connections(BaseModel):
|
class Connections(BaseModel):
|
||||||
discord: Discord | None = None
|
discord: Connection | None = None
|
||||||
|
twitch: Connection | None = None
|
||||||
|
twitter: Connection | None = None
|
||||||
|
reddit: Connection | None = None
|
||||||
|
youtube: Connection | None = None
|
||||||
|
steam: Connection | None = None
|
||||||
|
|
||||||
|
|
||||||
class Distinguishment(BaseModel):
|
class Distinguishment(BaseModel):
|
||||||
@@ -28,9 +34,9 @@ class Distinguishment(BaseModel):
|
|||||||
|
|
||||||
|
|
||||||
class Data(BaseModel):
|
class Data(BaseModel):
|
||||||
id: str = Field(..., alias='_id')
|
id: str = Field(default=..., alias='_id')
|
||||||
username: str
|
username: str
|
||||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
|
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'hidden', 'banned']
|
||||||
ts: datetime | None = None
|
ts: datetime | None = None
|
||||||
botmaster: str | None = None
|
botmaster: str | None = None
|
||||||
badges: list[Badge]
|
badges: list[Badge]
|
||||||
@@ -42,7 +48,6 @@ class Data(BaseModel):
|
|||||||
badstanding: bool | None = None
|
badstanding: bool | None = None
|
||||||
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
|
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
|
||||||
supporter_tier: int
|
supporter_tier: int
|
||||||
verified: bool
|
|
||||||
avatar_revision: int | None = None
|
avatar_revision: int | None = None
|
||||||
"""This user's avatar ID. Get their avatar at
|
"""This user's avatar ID. Get their avatar at
|
||||||
|
|
||||||
@@ -57,6 +62,9 @@ class Data(BaseModel):
|
|||||||
connections: Connections
|
connections: Connections
|
||||||
friend_count: int | None = None
|
friend_count: int | None = None
|
||||||
distinguishment: Distinguishment | None = None
|
distinguishment: Distinguishment | None = None
|
||||||
|
achievements: list[int]
|
||||||
|
ar: int
|
||||||
|
ar_counts: ArCounts
|
||||||
|
|
||||||
|
|
||||||
class UserInfoSuccess(BaseSuccessModel):
|
class UserInfoSuccess(BaseSuccessModel):
|
||||||
|
|||||||
@@ -1,55 +0,0 @@
|
|||||||
from typing import Literal, NamedTuple, TypedDict, overload
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from nonebot.compat import type_validate_json
|
|
||||||
|
|
||||||
from ....utils.exception import RequestError
|
|
||||||
from ....utils.request import splice_url
|
|
||||||
from ..constant import BASE_URL
|
|
||||||
from .cache import Cache
|
|
||||||
from .schemas.base import FailedModel
|
|
||||||
from .schemas.tetra_league import TetraLeague, TetraLeagueSuccess
|
|
||||||
|
|
||||||
|
|
||||||
class Parameter(TypedDict, total=False):
|
|
||||||
after: float
|
|
||||||
before: float
|
|
||||||
limit: int
|
|
||||||
country: str
|
|
||||||
|
|
||||||
|
|
||||||
async def leaderboard(parameter: Parameter | None = None) -> TetraLeagueSuccess:
|
|
||||||
league: TetraLeague = type_validate_json(
|
|
||||||
TetraLeague, # type: ignore[arg-type]
|
|
||||||
(await Cache.get(splice_url([BASE_URL, 'users/lists/league', f'?{urlencode(parameter or {})}']))),
|
|
||||||
)
|
|
||||||
if isinstance(league, FailedModel):
|
|
||||||
msg = f'排行榜数据请求错误:\n{league.error}'
|
|
||||||
raise RequestError(msg)
|
|
||||||
return league
|
|
||||||
|
|
||||||
|
|
||||||
class FullExport(NamedTuple):
|
|
||||||
model: TetraLeagueSuccess
|
|
||||||
original: bytes
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
async def full_export(*, with_original: Literal[False]) -> TetraLeagueSuccess: ...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
|
||||||
async def full_export(*, with_original: Literal[True]) -> FullExport: ...
|
|
||||||
|
|
||||||
|
|
||||||
async def full_export(*, with_original: bool) -> TetraLeagueSuccess | FullExport:
|
|
||||||
full: TetraLeague = type_validate_json(
|
|
||||||
TetraLeague, # type: ignore[arg-type]
|
|
||||||
(data := await Cache.get(splice_url([BASE_URL, 'users/lists/league/all']))),
|
|
||||||
)
|
|
||||||
if isinstance(full, FailedModel):
|
|
||||||
msg = f'排行榜数据请求错误:\n{full.error}'
|
|
||||||
raise RequestError(msg)
|
|
||||||
if with_original:
|
|
||||||
return FullExport(full, data)
|
|
||||||
return full
|
|
||||||
@@ -1,6 +1,7 @@
|
|||||||
from typing import Literal
|
from typing import Literal, NewType
|
||||||
|
|
||||||
ValidRank = Literal[
|
S1ValidRank = Literal[
|
||||||
|
'x+',
|
||||||
'x',
|
'x',
|
||||||
'u',
|
'u',
|
||||||
'ss',
|
'ss',
|
||||||
@@ -19,7 +20,9 @@ ValidRank = Literal[
|
|||||||
'd+',
|
'd+',
|
||||||
'd',
|
'd',
|
||||||
]
|
]
|
||||||
|
S1Rank = S1ValidRank | Literal['z']
|
||||||
|
|
||||||
|
ValidRank = Literal['x+'] | S1ValidRank
|
||||||
Rank = ValidRank | Literal['z'] # 未定级
|
Rank = ValidRank | Literal['z'] # 未定级
|
||||||
|
|
||||||
Summaries = Literal[
|
Summaries = Literal[
|
||||||
@@ -27,7 +30,7 @@ Summaries = Literal[
|
|||||||
'blitz',
|
'blitz',
|
||||||
'zenith',
|
'zenith',
|
||||||
'zenithex',
|
'zenithex',
|
||||||
# 'league', # 等待正式赛季开始
|
'league',
|
||||||
'zen',
|
'zen',
|
||||||
'achievements',
|
'achievements',
|
||||||
]
|
]
|
||||||
@@ -40,3 +43,5 @@ Records = Literal[
|
|||||||
'blitz_recent',
|
'blitz_recent',
|
||||||
'blitz_progression',
|
'blitz_progression',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
Prisecter = NewType('Prisecter', str)
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from arclet.alconna import Arg, ArgFlag
|
from arclet.alconna import Arg, ArgFlag
|
||||||
from nonebot_plugin_alconna import Args, Subcommand
|
from nonebot_plugin_alconna import Args, Subcommand
|
||||||
@@ -9,6 +8,7 @@ from nonebot_plugin_session import EventSession
|
|||||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
from nonebot_plugin_user import User
|
from nonebot_plugin_user import User
|
||||||
from nonebot_plugin_userinfo import BotUserInfo, UserInfo
|
from nonebot_plugin_userinfo import BotUserInfo, UserInfo
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from ...db import BindStatus, create_or_update_bind, trigger
|
from ...db import BindStatus, create_or_update_bind, trigger
|
||||||
from ...utils.host import HostPage, get_self_netloc
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
@@ -67,7 +67,10 @@ async def _(nb_user: User, account: Player, event_session: EventSession, bot_inf
|
|||||||
platform='TETR.IO',
|
platform='TETR.IO',
|
||||||
status='unknown',
|
status='unknown',
|
||||||
user=People(
|
user=People(
|
||||||
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
|
avatar=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}')
|
||||||
|
% {'revision': avatar_revision}
|
||||||
|
)
|
||||||
if (avatar_revision := (await account.avatar_revision)) is not None and avatar_revision != 0
|
if (avatar_revision := (await account.avatar_revision)) is not None and avatar_revision != 0
|
||||||
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
|
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
|
||||||
name=user.name.upper(),
|
name=user.name.upper(),
|
||||||
|
|||||||
@@ -1,13 +1,16 @@
|
|||||||
from re import compile
|
from re import compile
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from .api.typing import ValidRank
|
from .api.typing import ValidRank
|
||||||
|
|
||||||
GAME_TYPE: Literal['IO'] = 'IO'
|
GAME_TYPE: Literal['IO'] = 'IO'
|
||||||
|
|
||||||
BASE_URL = 'https://ch.tetr.io/api/'
|
BASE_URL = URL('https://ch.tetr.io/api/')
|
||||||
|
|
||||||
RANK_PERCENTILE: dict[ValidRank, float] = {
|
RANK_PERCENTILE: dict[ValidRank, float] = {
|
||||||
|
'x+': 0.2,
|
||||||
'x': 1,
|
'x': 1,
|
||||||
'u': 5,
|
'u': 5,
|
||||||
'ss': 11,
|
'ss': 11,
|
||||||
|
|||||||
91
nonebot_plugin_tetris_stats/games/tetrio/list.py
Normal file
91
nonebot_plugin_tetris_stats/games/tetrio/list.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
from nonebot_plugin_alconna import Args, Option, Subcommand
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_session import EventSession
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import trigger
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.metrics import get_metrics
|
||||||
|
from ...utils.render import render
|
||||||
|
from ...utils.render.schemas.tetrio.user.list_v2 import List, TetraLeague, User
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
|
from .. import alc
|
||||||
|
from . import command
|
||||||
|
from .api.leaderboards import by
|
||||||
|
from .api.schemas.base import P
|
||||||
|
from .api.schemas.leaderboards import Parameter
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
command.add(
|
||||||
|
Subcommand(
|
||||||
|
'list',
|
||||||
|
Option('--max-tr', Args['max_tr', float], help_text='TR的上限'),
|
||||||
|
Option('--min-tr', Args['min_tr', float], help_text='TR的下限'),
|
||||||
|
Option('--limit', Args['limit', int], help_text='查询数量'),
|
||||||
|
Option('--country', Args['country', str], help_text='国家代码'),
|
||||||
|
help_text='查询 TETR.IO 段位排行榜',
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('TETRIO.list')
|
||||||
|
async def _(
|
||||||
|
event_session: EventSession,
|
||||||
|
max_tr: float | None = None,
|
||||||
|
min_tr: float | None = None,
|
||||||
|
limit: int | None = None,
|
||||||
|
country: str | None = None,
|
||||||
|
):
|
||||||
|
country = country.upper() if country is not None else None
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='list',
|
||||||
|
command_args=[
|
||||||
|
f'{key} {value}'
|
||||||
|
for key, value in zip(
|
||||||
|
('--max-tr', '--min-tr', '--limit', '--country'), (max_tr, min_tr, limit, country), strict=True
|
||||||
|
)
|
||||||
|
if value is not None
|
||||||
|
],
|
||||||
|
):
|
||||||
|
parameter = Parameter(
|
||||||
|
# ?: 似乎是只需要 pri 至少 league 榜的返回值只有 pri
|
||||||
|
after=P(pri=max_tr, sec=0, ter=0).to_prisecter() if max_tr is not None else None,
|
||||||
|
before=P(pri=min_tr, sec=0, ter=0).to_prisecter() if min_tr is not None else None,
|
||||||
|
limit=limit or 25,
|
||||||
|
country=country,
|
||||||
|
)
|
||||||
|
league = await by('league', parameter)
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v2/tetrio/user/list',
|
||||||
|
List(
|
||||||
|
show_index=True,
|
||||||
|
users=[
|
||||||
|
User(
|
||||||
|
id=i.id,
|
||||||
|
name=i.username.upper(),
|
||||||
|
avatar=f'https://tetr.io/user-content/avatars/{i.id}.jpg',
|
||||||
|
country=i.country,
|
||||||
|
tetra_league=TetraLeague(
|
||||||
|
rank=i.league.rank,
|
||||||
|
tr=round(i.league.tr, 2),
|
||||||
|
glicko=round(i.league.glicko, 2),
|
||||||
|
rd=round(i.league.rd, 2),
|
||||||
|
decaying=i.league.decaying,
|
||||||
|
pps=(metrics := get_metrics(pps=i.league.pps, apm=i.league.apm, vs=i.league.vs)).pps,
|
||||||
|
apm=metrics.apm,
|
||||||
|
apl=metrics.apl,
|
||||||
|
vs=metrics.vs,
|
||||||
|
adpl=metrics.adpl,
|
||||||
|
),
|
||||||
|
xp=i.xp,
|
||||||
|
join_at=None,
|
||||||
|
)
|
||||||
|
for i in league.data.entries
|
||||||
|
],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
await UniMessage.image(raw=await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')).finish()
|
||||||
@@ -1,10 +1,53 @@
|
|||||||
from nonebot_plugin_orm import Model
|
from datetime import datetime
|
||||||
from sqlalchemy import String
|
from uuid import UUID
|
||||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
|
||||||
|
|
||||||
|
from nonebot_plugin_orm import Model
|
||||||
|
from sqlalchemy import DateTime, ForeignKey, String
|
||||||
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column, relationship
|
||||||
|
|
||||||
|
from ...db.models import PydanticType
|
||||||
|
from .api.schemas.leaderboards.by import BySuccessModel, Entry
|
||||||
|
from .api.typing import ValidRank
|
||||||
from .typing import Template
|
from .typing import Template
|
||||||
|
|
||||||
|
|
||||||
class TETRIOUserConfig(MappedAsDataclass, Model):
|
class TETRIOUserConfig(MappedAsDataclass, Model):
|
||||||
id: Mapped[int] = mapped_column(primary_key=True)
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
query_template: Mapped[Template] = mapped_column(String(2))
|
query_template: Mapped[Template] = mapped_column(String(2))
|
||||||
|
|
||||||
|
|
||||||
|
class TETRIOLeagueStats(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
raw: Mapped[list['TETRIOLeagueHistorical']] = relationship(back_populates='stats', lazy='noload')
|
||||||
|
fields: Mapped[list['TETRIOLeagueStatsField']] = relationship(back_populates='stats')
|
||||||
|
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TETRIOLeagueHistorical(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
request_id: Mapped[UUID] = mapped_column(index=True)
|
||||||
|
data: Mapped[BySuccessModel] = mapped_column(PydanticType([], {BySuccessModel}))
|
||||||
|
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||||
|
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False)
|
||||||
|
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='raw')
|
||||||
|
|
||||||
|
|
||||||
|
entry_type = PydanticType([], {Entry})
|
||||||
|
|
||||||
|
|
||||||
|
class TETRIOLeagueStatsField(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
rank: Mapped[ValidRank] = mapped_column(String(2), index=True)
|
||||||
|
tr_line: Mapped[float]
|
||||||
|
player_count: Mapped[int]
|
||||||
|
low_pps: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
low_apm: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
low_vs: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
avg_pps: Mapped[float]
|
||||||
|
avg_apm: Mapped[float]
|
||||||
|
avg_vs: Mapped[float]
|
||||||
|
high_pps: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
high_apm: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
high_vs: Mapped[Entry] = mapped_column(entry_type)
|
||||||
|
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False)
|
||||||
|
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='fields')
|
||||||
|
|||||||
@@ -1,241 +0,0 @@
|
|||||||
from asyncio import gather
|
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from hashlib import md5
|
|
||||||
from typing import TYPE_CHECKING, TypeVar
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from arclet.alconna import Arg, ArgFlag
|
|
||||||
from nonebot import get_driver
|
|
||||||
from nonebot.adapters import Event
|
|
||||||
from nonebot.matcher import Matcher
|
|
||||||
from nonebot_plugin_alconna import Args, At, Option, Subcommand
|
|
||||||
from nonebot_plugin_alconna.uniseg import UniMessage
|
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
from nonebot_plugin_session import EventSession
|
|
||||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
|
||||||
from nonebot_plugin_user import User as NBUser
|
|
||||||
from nonebot_plugin_user import get_user
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from ...db import query_bind_info, trigger
|
|
||||||
from ...utils.host import HostPage, get_self_netloc
|
|
||||||
from ...utils.render import render
|
|
||||||
from ...utils.render.schemas.base import Avatar
|
|
||||||
from ...utils.render.schemas.tetrio.user.info_v2 import Badge, Blitz, Sprint, Statistic, Zen
|
|
||||||
from ...utils.render.schemas.tetrio.user.info_v2 import Info as V2TemplateInfo
|
|
||||||
from ...utils.render.schemas.tetrio.user.info_v2 import User as V2TemplateUser
|
|
||||||
from ...utils.screenshot import screenshot
|
|
||||||
from ...utils.typing import Me
|
|
||||||
from .. import add_block_handlers, alc
|
|
||||||
from ..constant import CANT_VERIFY_MESSAGE
|
|
||||||
from . import command, get_player
|
|
||||||
from .api import Player
|
|
||||||
from .constant import GAME_TYPE
|
|
||||||
from .models import TETRIOUserConfig
|
|
||||||
from .typing import Template
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .api.schemas.summaries import SoloSuccessModel, ZenSuccessModel
|
|
||||||
from .api.schemas.user import User
|
|
||||||
from .api.schemas.user_info import UserInfoSuccess
|
|
||||||
|
|
||||||
UTC = timezone.utc
|
|
||||||
|
|
||||||
driver = get_driver()
|
|
||||||
|
|
||||||
command.add(
|
|
||||||
Subcommand(
|
|
||||||
'query',
|
|
||||||
Args(
|
|
||||||
Arg(
|
|
||||||
'target',
|
|
||||||
At | Me,
|
|
||||||
notice='@想要查询的人 / 自己',
|
|
||||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
|
||||||
),
|
|
||||||
Arg(
|
|
||||||
'account',
|
|
||||||
get_player,
|
|
||||||
notice='TETR.IO 用户名 / ID',
|
|
||||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
|
||||||
),
|
|
||||||
),
|
|
||||||
Option(
|
|
||||||
'--template',
|
|
||||||
Arg('template', Template),
|
|
||||||
alias=['-T'],
|
|
||||||
help_text='要使用的查询模板',
|
|
||||||
),
|
|
||||||
help_text='查询 TETR.IO 游戏信息',
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
alc.shortcut(
|
|
||||||
'(?i:io)(?i:查询|查|query|stats)',
|
|
||||||
command='tstats TETR.IO query',
|
|
||||||
humanized='io查',
|
|
||||||
)
|
|
||||||
alc.shortcut(
|
|
||||||
'fkosk',
|
|
||||||
command='tstats TETR.IO query',
|
|
||||||
arguments=['我'],
|
|
||||||
fuzzy=False,
|
|
||||||
humanized='An Easter egg!',
|
|
||||||
)
|
|
||||||
|
|
||||||
add_block_handlers(alc.assign('TETRIO.query'))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('TETRIO.query')
|
|
||||||
async def _( # noqa: PLR0913
|
|
||||||
user: NBUser,
|
|
||||||
event: Event,
|
|
||||||
matcher: Matcher,
|
|
||||||
target: At | Me,
|
|
||||||
event_session: EventSession,
|
|
||||||
template: Template | None = None,
|
|
||||||
):
|
|
||||||
async with trigger(
|
|
||||||
session_persist_id=await get_session_persist_id(event_session),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
command_type='query',
|
|
||||||
command_args=[f'--default-template {template}'] if template is not None else [],
|
|
||||||
):
|
|
||||||
async with get_session() as session:
|
|
||||||
bind = await query_bind_info(
|
|
||||||
session=session,
|
|
||||||
user=await get_user(
|
|
||||||
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
|
|
||||||
),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
)
|
|
||||||
if template is None:
|
|
||||||
template = await session.scalar(
|
|
||||||
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
|
||||||
)
|
|
||||||
if bind is None:
|
|
||||||
await matcher.finish('未查询到绑定信息')
|
|
||||||
message = UniMessage(CANT_VERIFY_MESSAGE)
|
|
||||||
player = Player(user_id=bind.game_account, trust=True)
|
|
||||||
await (message + UniMessage.image(raw=await make_query_image_v2(player))).finish()
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('TETRIO.query')
|
|
||||||
async def _(user: NBUser, account: Player, event_session: EventSession, template: Template | None = None):
|
|
||||||
async with trigger(
|
|
||||||
session_persist_id=await get_session_persist_id(event_session),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
command_type='query',
|
|
||||||
command_args=[f'--default-template {template}'] if template is not None else [],
|
|
||||||
):
|
|
||||||
async with get_session() as session:
|
|
||||||
if template is None:
|
|
||||||
template = await session.scalar(
|
|
||||||
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
|
||||||
)
|
|
||||||
await (UniMessage.image(raw=await make_query_image_v2(account))).finish()
|
|
||||||
|
|
||||||
|
|
||||||
N = TypeVar('N', int, float)
|
|
||||||
|
|
||||||
|
|
||||||
def handling_special_value(value: N) -> N | None:
|
|
||||||
return value if value != -1 else None
|
|
||||||
|
|
||||||
|
|
||||||
async def make_query_image_v2(player: Player) -> bytes:
|
|
||||||
user: User
|
|
||||||
user_info: UserInfoSuccess
|
|
||||||
sprint: SoloSuccessModel
|
|
||||||
blitz: SoloSuccessModel
|
|
||||||
zen: ZenSuccessModel
|
|
||||||
avatar_revision: int | None
|
|
||||||
banner_revision: int | None
|
|
||||||
# TODO)) 有没有什么办法能让这类型推导成功)
|
|
||||||
user, user_info, sprint, blitz, zen, avatar_revision, banner_revision = await gather( # type: ignore[assignment]
|
|
||||||
player.user,
|
|
||||||
player.get_info(),
|
|
||||||
player.sprint,
|
|
||||||
player.blitz,
|
|
||||||
player.zen,
|
|
||||||
player.avatar_revision,
|
|
||||||
player.banner_revision,
|
|
||||||
)
|
|
||||||
|
|
||||||
if sprint.data.record is not None:
|
|
||||||
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
|
|
||||||
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
|
||||||
else:
|
|
||||||
sprint_value = 'N/A'
|
|
||||||
|
|
||||||
play_time: str | None
|
|
||||||
if (game_time := handling_special_value(user_info.data.gametime)) is not None:
|
|
||||||
if game_time // 3600 > 0:
|
|
||||||
play_time = f'{game_time//3600:.0f}h {game_time % 3600 // 60:.0f}m {game_time % 60:.0f}s'
|
|
||||||
elif game_time // 60 > 0:
|
|
||||||
play_time = f'{game_time//60:.0f}m {game_time % 60:.0f}s'
|
|
||||||
else:
|
|
||||||
play_time = f'{game_time:.0f}s'
|
|
||||||
else:
|
|
||||||
play_time = game_time
|
|
||||||
netloc = get_self_netloc()
|
|
||||||
async with HostPage(
|
|
||||||
await render(
|
|
||||||
'v2/tetrio/user/info',
|
|
||||||
V2TemplateInfo(
|
|
||||||
user=V2TemplateUser(
|
|
||||||
id=user.ID,
|
|
||||||
name=user.name.upper(),
|
|
||||||
bio=user_info.data.bio,
|
|
||||||
banner=f'http://{netloc}/host/resource/tetrio/banners/{user.ID}?{urlencode({"revision": banner_revision})}'
|
|
||||||
if banner_revision is not None and banner_revision != 0
|
|
||||||
else None,
|
|
||||||
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
|
|
||||||
if avatar_revision is not None and avatar_revision != 0
|
|
||||||
else Avatar(
|
|
||||||
type='identicon',
|
|
||||||
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
|
||||||
),
|
|
||||||
badges=[
|
|
||||||
Badge(
|
|
||||||
id=i.id,
|
|
||||||
description=i.label,
|
|
||||||
group=i.group,
|
|
||||||
receive_at=i.ts if isinstance(i.ts, datetime) else None,
|
|
||||||
)
|
|
||||||
for i in user_info.data.badges
|
|
||||||
],
|
|
||||||
country=user_info.data.country,
|
|
||||||
role=user_info.data.role,
|
|
||||||
xp=user_info.data.xp,
|
|
||||||
friend_count=user_info.data.friend_count,
|
|
||||||
supporter_tier=user_info.data.supporter_tier,
|
|
||||||
bad_standing=user_info.data.badstanding or False,
|
|
||||||
verified=user_info.data.verified,
|
|
||||||
playtime=play_time,
|
|
||||||
join_at=user_info.data.ts,
|
|
||||||
),
|
|
||||||
tetra_league=None,
|
|
||||||
statistic=Statistic(
|
|
||||||
total=handling_special_value(user_info.data.gamesplayed),
|
|
||||||
wins=handling_special_value(user_info.data.gameswon),
|
|
||||||
),
|
|
||||||
sprint=Sprint(
|
|
||||||
time=sprint_value,
|
|
||||||
global_rank=sprint.data.rank,
|
|
||||||
play_at=sprint.data.record.ts,
|
|
||||||
)
|
|
||||||
if sprint.data.record is not None
|
|
||||||
else None,
|
|
||||||
blitz=Blitz(
|
|
||||||
score=blitz.data.record.results.stats.score,
|
|
||||||
global_rank=blitz.data.rank,
|
|
||||||
play_at=blitz.data.record.ts,
|
|
||||||
)
|
|
||||||
if blitz.data.record is not None
|
|
||||||
else None,
|
|
||||||
zen=Zen(level=zen.data.level, score=zen.data.score),
|
|
||||||
),
|
|
||||||
),
|
|
||||||
) as page_hash:
|
|
||||||
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
|
||||||
135
nonebot_plugin_tetris_stats/games/tetrio/query/__init__.py
Normal file
135
nonebot_plugin_tetris_stats/games/tetrio/query/__init__.py
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
from datetime import timezone
|
||||||
|
|
||||||
|
from arclet.alconna import Arg, ArgFlag
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot.adapters import Event
|
||||||
|
from nonebot.matcher import Matcher
|
||||||
|
from nonebot_plugin_alconna import Args, At, Option, Subcommand
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_user import User as NBUser
|
||||||
|
from nonebot_plugin_user import get_user
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from ....db import query_bind_info, trigger
|
||||||
|
from ....i18n import Lang
|
||||||
|
from ....utils.exception import FallbackError
|
||||||
|
from ....utils.typing import Me
|
||||||
|
from ... import add_block_handlers, alc
|
||||||
|
from .. import command, get_player
|
||||||
|
from ..api import Player
|
||||||
|
from ..constant import GAME_TYPE
|
||||||
|
from ..models import TETRIOUserConfig
|
||||||
|
from ..typing import Template
|
||||||
|
from .v1 import make_query_image_v1
|
||||||
|
from .v2 import make_query_image_v2
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
command.add(
|
||||||
|
Subcommand(
|
||||||
|
'query',
|
||||||
|
Args(
|
||||||
|
Arg(
|
||||||
|
'target',
|
||||||
|
At | Me,
|
||||||
|
notice='@想要查询的人 / 自己',
|
||||||
|
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||||
|
),
|
||||||
|
Arg(
|
||||||
|
'account',
|
||||||
|
get_player,
|
||||||
|
notice='TETR.IO 用户名 / ID',
|
||||||
|
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
Option(
|
||||||
|
'--template',
|
||||||
|
Arg('template', Template),
|
||||||
|
alias=['-T'],
|
||||||
|
help_text='要使用的查询模板',
|
||||||
|
),
|
||||||
|
help_text='查询 TETR.IO 游戏信息',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
alc.shortcut(
|
||||||
|
'(?i:io)(?i:查询|查|query|stats)',
|
||||||
|
command='tstats TETR.IO query',
|
||||||
|
humanized='io查',
|
||||||
|
)
|
||||||
|
alc.shortcut(
|
||||||
|
'fkosk',
|
||||||
|
command='tstats TETR.IO query',
|
||||||
|
arguments=['我'],
|
||||||
|
fuzzy=False,
|
||||||
|
humanized='An Easter egg!',
|
||||||
|
)
|
||||||
|
|
||||||
|
add_block_handlers(alc.assign('TETRIO.query'))
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_result(player: Player, template: Template) -> UniMessage:
|
||||||
|
if template == 'v1':
|
||||||
|
try:
|
||||||
|
return UniMessage.image(raw=await make_query_image_v1(player))
|
||||||
|
except FallbackError:
|
||||||
|
template = 'v2'
|
||||||
|
if template == 'v2':
|
||||||
|
return UniMessage.image(raw=await make_query_image_v2(player))
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('TETRIO.query')
|
||||||
|
async def _( # noqa: PLR0913
|
||||||
|
user: NBUser,
|
||||||
|
event: Event,
|
||||||
|
matcher: Matcher,
|
||||||
|
target: At | Me,
|
||||||
|
event_session: EventSession,
|
||||||
|
template: Template | None = None,
|
||||||
|
):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[f'--template {template}'] if template is not None else [],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
bind = await query_bind_info(
|
||||||
|
session=session,
|
||||||
|
user=await get_user(
|
||||||
|
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
|
||||||
|
),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
)
|
||||||
|
if template is None:
|
||||||
|
template = await session.scalar(
|
||||||
|
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
||||||
|
)
|
||||||
|
if bind is None:
|
||||||
|
await matcher.finish('未查询到绑定信息')
|
||||||
|
player = Player(user_id=bind.game_account, trust=True)
|
||||||
|
await (
|
||||||
|
UniMessage.i18n(Lang.interaction.warning.unverified) + await make_query_result(player, template or 'v1')
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('TETRIO.query')
|
||||||
|
async def _(user: NBUser, account: Player, event_session: EventSession, template: Template | None = None):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[f'--template {template}'] if template is not None else [],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
if template is None:
|
||||||
|
template = await session.scalar(
|
||||||
|
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
|
||||||
|
)
|
||||||
|
await (await make_query_result(account, template or 'v1')).finish()
|
||||||
56
nonebot_plugin_tetris_stats/games/tetrio/query/tools.py
Normal file
56
nonebot_plugin_tetris_stats/games/tetrio/query/tools.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from collections.abc import Callable
|
||||||
|
from datetime import timedelta
|
||||||
|
from typing import TypeVar, overload
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from ....utils.exception import FallbackError
|
||||||
|
from ....utils.render.schemas.tetrio.user.base import TetraLeagueHistoryData
|
||||||
|
from ..api.schemas.labs.leagueflow import Empty, LeagueFlowSuccess
|
||||||
|
from ..api.schemas.summaries.league import InvalidData, LeagueSuccessModel, NeverPlayedData, NeverRatedData, RatedData
|
||||||
|
|
||||||
|
|
||||||
|
def flow_to_history(
|
||||||
|
leagueflow: LeagueFlowSuccess,
|
||||||
|
handle: Callable[[list[TetraLeagueHistoryData]], list[TetraLeagueHistoryData]] | None = None,
|
||||||
|
) -> list[TetraLeagueHistoryData]:
|
||||||
|
if isinstance(leagueflow.data, Empty):
|
||||||
|
raise FallbackError
|
||||||
|
start_time = leagueflow.data.start_time.astimezone(ZoneInfo('Asia/Shanghai'))
|
||||||
|
ret = [
|
||||||
|
TetraLeagueHistoryData(
|
||||||
|
record_at=start_time + timedelta(milliseconds=i.timestamp_offset),
|
||||||
|
tr=i.post_match_tr,
|
||||||
|
)
|
||||||
|
for i in leagueflow.data.points
|
||||||
|
if start_time + timedelta(milliseconds=i.timestamp_offset)
|
||||||
|
]
|
||||||
|
return ret if handle is None else handle(ret)
|
||||||
|
|
||||||
|
|
||||||
|
N = TypeVar('N', int, float)
|
||||||
|
|
||||||
|
|
||||||
|
def handling_special_value(value: N) -> N | None:
|
||||||
|
return value if value != -1 else None
|
||||||
|
|
||||||
|
|
||||||
|
L = TypeVar('L', NeverPlayedData, NeverRatedData, RatedData)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def get_league_data(user_info: LeagueSuccessModel, league_type: type[L]) -> L: ...
|
||||||
|
@overload
|
||||||
|
def get_league_data(
|
||||||
|
user_info: LeagueSuccessModel, league_type: None = None
|
||||||
|
) -> NeverPlayedData | NeverRatedData | RatedData: ...
|
||||||
|
def get_league_data(
|
||||||
|
user_info: LeagueSuccessModel, league_type: type[L] | None = None
|
||||||
|
) -> L | NeverPlayedData | NeverRatedData | RatedData:
|
||||||
|
league = user_info.data
|
||||||
|
if isinstance(league, InvalidData):
|
||||||
|
raise FallbackError
|
||||||
|
if league_type is None:
|
||||||
|
return league
|
||||||
|
if isinstance(league, league_type):
|
||||||
|
return league
|
||||||
|
raise FallbackError
|
||||||
197
nonebot_plugin_tetris_stats/games/tetrio/query/v1.py
Normal file
197
nonebot_plugin_tetris_stats/games/tetrio/query/v1.py
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
from asyncio import gather
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from hashlib import md5
|
||||||
|
from math import ceil, floor
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from ....utils.exception import FallbackError, WhatTheFuckError
|
||||||
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
|
from ....utils.render import render
|
||||||
|
from ....utils.render.schemas.base import Avatar, Ranking
|
||||||
|
from ....utils.render.schemas.tetrio.user.base import TetraLeagueHistoryData
|
||||||
|
from ....utils.render.schemas.tetrio.user.info_v1 import Info, Radar, TetraLeague, TetraLeagueHistory, User
|
||||||
|
from ....utils.screenshot import screenshot
|
||||||
|
from ..api import Player
|
||||||
|
from ..api.schemas.summaries.league import RatedData
|
||||||
|
from ..constant import TR_MAX, TR_MIN
|
||||||
|
from .tools import flow_to_history, get_league_data
|
||||||
|
|
||||||
|
|
||||||
|
def get_value_bounds(values: list[int | float]) -> tuple[int, int]:
|
||||||
|
value_max = 10 * ceil(max(values) / 10)
|
||||||
|
value_min = 10 * floor(min(values) / 10)
|
||||||
|
return value_max, value_min
|
||||||
|
|
||||||
|
|
||||||
|
def get_split(value_max: int, value_min: int) -> tuple[int, int]:
|
||||||
|
offset = 0
|
||||||
|
overflow = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if (new_max_value := value_max + offset + overflow) > TR_MAX:
|
||||||
|
overflow -= 1
|
||||||
|
continue
|
||||||
|
if (new_min_value := value_min - offset + overflow) < TR_MIN:
|
||||||
|
overflow += 1
|
||||||
|
continue
|
||||||
|
if ((new_max_value - new_min_value) / 40).is_integer():
|
||||||
|
split_value = int((value_max + offset - (value_min - offset)) / 4)
|
||||||
|
break
|
||||||
|
offset += 1
|
||||||
|
return split_value, offset + overflow
|
||||||
|
|
||||||
|
|
||||||
|
def get_specified_point(
|
||||||
|
previous_point: TetraLeagueHistoryData,
|
||||||
|
behind_point: TetraLeagueHistoryData,
|
||||||
|
point_time: datetime,
|
||||||
|
) -> TetraLeagueHistoryData:
|
||||||
|
"""根据给出的 previous_point 和 behind_point, 推算 point_time 点处的数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
previous_point (Data): 前面的数据点
|
||||||
|
behind_point (Data): 后面的数据点
|
||||||
|
point_time (datetime): 要推算的点的位置
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Data: 要推算的点的数据
|
||||||
|
"""
|
||||||
|
# 求两个点的斜率
|
||||||
|
slope = (behind_point.tr - previous_point.tr) / (
|
||||||
|
datetime.timestamp(behind_point.record_at) - datetime.timestamp(previous_point.record_at)
|
||||||
|
)
|
||||||
|
return TetraLeagueHistoryData(
|
||||||
|
record_at=point_time,
|
||||||
|
tr=previous_point.tr + slope * (datetime.timestamp(point_time) - datetime.timestamp(previous_point.record_at)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_history_data(data: list[TetraLeagueHistoryData]) -> list[TetraLeagueHistoryData]: # noqa: C901, PLR0912
|
||||||
|
# 按照 记录时间 对数据进行排序
|
||||||
|
data.sort(key=lambda x: x.record_at)
|
||||||
|
|
||||||
|
# 定义时间边界, 右边界为当前时间的当天零点, 左边界为右边界前推9天
|
||||||
|
# 返回值的[0]和[-1]分别应满足left_border和right_border
|
||||||
|
zero = datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
left_border = zero - timedelta(days=9)
|
||||||
|
right_border = zero.replace(microsecond=1000)
|
||||||
|
|
||||||
|
lefts: list[TetraLeagueHistoryData] = []
|
||||||
|
in_border: list[TetraLeagueHistoryData] = []
|
||||||
|
rights: list[TetraLeagueHistoryData] = []
|
||||||
|
|
||||||
|
# 根据 记录时间 将数据分类到对应的列表中
|
||||||
|
for i in data:
|
||||||
|
if i.record_at < left_border:
|
||||||
|
lefts.append(i)
|
||||||
|
elif i.record_at < right_border:
|
||||||
|
in_border.append(i)
|
||||||
|
else:
|
||||||
|
rights.append(i)
|
||||||
|
|
||||||
|
ret: list[TetraLeagueHistoryData] = []
|
||||||
|
|
||||||
|
# 处理左边界的点
|
||||||
|
if lefts and in_border: # 如果边界左侧和边界内都有值则推算
|
||||||
|
ret.append(get_specified_point(lefts[-1], in_border[0], left_border))
|
||||||
|
elif lefts and not in_border: # 如果边界左侧有值但是边界内没有值则直接取左侧的最后一个值
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=lefts[-1].tr, record_at=left_border))
|
||||||
|
elif not lefts and in_border: # 如果边界左侧没有值但是边界内有值则直接取边界内的第一个值
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=in_border[0].tr, record_at=left_border))
|
||||||
|
elif not lefts and not in_border and rights: # 如果边界左侧和边界内都没有值但是边界右侧有值则直接取边界右侧的第一个值 # fmt: skip
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=rights[0].tr, record_at=left_border))
|
||||||
|
else: # 暂时没想到其他情况
|
||||||
|
raise WhatTheFuckError
|
||||||
|
|
||||||
|
# 添加边界内数据
|
||||||
|
ret.extend(in_border)
|
||||||
|
|
||||||
|
# 处理右边界的点
|
||||||
|
if in_border and rights: # 如果边界内和边界右侧都有值则推算
|
||||||
|
ret.append(get_specified_point(in_border[-1], rights[0], right_border))
|
||||||
|
elif not in_border and rights: # 如果边界内没有值但是边界右侧有值则直接取右侧的第一个值
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=rights[0].tr, record_at=right_border))
|
||||||
|
elif in_border and not rights: # 如果边界内有值但是边界右侧没有值则直接取边界内的最后一个值
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=in_border[-1].tr, record_at=right_border))
|
||||||
|
elif not in_border and not rights and lefts: # 如果边界内和边界右侧都没有值但是边界左侧有值则直接取边界左侧的最后一个值 # fmt: skip
|
||||||
|
ret.append(TetraLeagueHistoryData(tr=lefts[-1].tr, record_at=right_border))
|
||||||
|
else: # 暂时没想到其他情况
|
||||||
|
raise WhatTheFuckError
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_image_v1(player: Player) -> bytes:
|
||||||
|
(
|
||||||
|
(user, user_info, league, sprint, blitz, leagueflow),
|
||||||
|
(avatar_revision,),
|
||||||
|
) = await gather(
|
||||||
|
gather(player.user, player.get_info(), player.league, player.sprint, player.blitz, player.get_leagueflow()),
|
||||||
|
gather(player.avatar_revision),
|
||||||
|
)
|
||||||
|
league_data = get_league_data(league, RatedData)
|
||||||
|
if league_data.vs is None:
|
||||||
|
raise FallbackError
|
||||||
|
histories = flow_to_history(leagueflow, handle_history_data)
|
||||||
|
value_max, value_min = get_value_bounds([i.tr for i in histories])
|
||||||
|
split_value, offset = get_split(value_max, value_min)
|
||||||
|
if sprint.data.record is not None:
|
||||||
|
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
|
||||||
|
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
||||||
|
else:
|
||||||
|
sprint_value = 'N/A'
|
||||||
|
blitz_value = f'{blitz.data.record.results.stats.score:,}' if blitz.data.record is not None else 'N/A'
|
||||||
|
netloc = get_self_netloc()
|
||||||
|
async with HostPage(
|
||||||
|
page=await render(
|
||||||
|
'v1/tetrio/info',
|
||||||
|
Info(
|
||||||
|
user=User(
|
||||||
|
avatar=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||||
|
)
|
||||||
|
if avatar_revision is not None and avatar_revision != 0
|
||||||
|
else Avatar(
|
||||||
|
type='identicon',
|
||||||
|
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||||
|
),
|
||||||
|
name=user.name.upper(),
|
||||||
|
bio=user_info.data.bio,
|
||||||
|
),
|
||||||
|
ranking=Ranking(
|
||||||
|
rating=round(league_data.glicko, 2),
|
||||||
|
rd=round(league_data.rd, 2),
|
||||||
|
),
|
||||||
|
tetra_league=TetraLeague(
|
||||||
|
rank=league_data.rank,
|
||||||
|
tr=round(league_data.tr, 2),
|
||||||
|
global_rank=league_data.standing,
|
||||||
|
pps=league_data.pps,
|
||||||
|
lpm=round(lpm := (league_data.pps * 24), 2),
|
||||||
|
apm=league_data.apm,
|
||||||
|
apl=round(league_data.apm / lpm, 2),
|
||||||
|
vs=league_data.vs,
|
||||||
|
adpm=round(adpm := (league_data.vs * 0.6), 2),
|
||||||
|
adpl=round(adpm / lpm, 2),
|
||||||
|
),
|
||||||
|
tetra_league_history=TetraLeagueHistory(
|
||||||
|
data=histories,
|
||||||
|
split_interval=split_value,
|
||||||
|
min_tr=value_min,
|
||||||
|
max_tr=value_max,
|
||||||
|
offset=offset,
|
||||||
|
),
|
||||||
|
radar=Radar(
|
||||||
|
app=(app := (league_data.apm / (60 * league_data.pps))),
|
||||||
|
dsps=(dsps := ((league_data.vs / 100) - (league_data.apm / 60))),
|
||||||
|
dspp=(dspp := (dsps / league_data.pps)),
|
||||||
|
ci=150 * dspp - 125 * app + 50 * (league_data.vs / league_data.apm) - 25,
|
||||||
|
ge=2 * ((app * dsps) / league_data.pps),
|
||||||
|
),
|
||||||
|
sprint=sprint_value,
|
||||||
|
blitz=blitz_value,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||||
138
nonebot_plugin_tetris_stats/games/tetrio/query/v2.py
Normal file
138
nonebot_plugin_tetris_stats/games/tetrio/query/v2.py
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
from asyncio import gather
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from hashlib import md5
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from ....utils.exception import FallbackError
|
||||||
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
|
from ....utils.metrics import get_metrics
|
||||||
|
from ....utils.render import render
|
||||||
|
from ....utils.render.schemas.base import Avatar
|
||||||
|
from ....utils.render.schemas.tetrio.user.info_v2 import (
|
||||||
|
Badge,
|
||||||
|
Blitz,
|
||||||
|
Info,
|
||||||
|
Sprint,
|
||||||
|
Statistic,
|
||||||
|
TetraLeague,
|
||||||
|
TetraLeagueStatistic,
|
||||||
|
User,
|
||||||
|
Zen,
|
||||||
|
)
|
||||||
|
from ....utils.screenshot import screenshot
|
||||||
|
from ..api import Player
|
||||||
|
from ..api.schemas.summaries.league import InvalidData, NeverPlayedData, NeverRatedData
|
||||||
|
from .tools import flow_to_history, handling_special_value
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_image_v2(player: Player) -> bytes:
|
||||||
|
(
|
||||||
|
(user, user_info, league, sprint, blitz, zen),
|
||||||
|
(avatar_revision, banner_revision, leagueflow),
|
||||||
|
) = await gather(
|
||||||
|
gather(player.user, player.get_info(), player.league, player.sprint, player.blitz, player.zen),
|
||||||
|
gather(player.avatar_revision, player.banner_revision, player.get_leagueflow()),
|
||||||
|
)
|
||||||
|
if sprint.data.record is not None:
|
||||||
|
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
|
||||||
|
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
|
||||||
|
else:
|
||||||
|
sprint_value = 'N/A'
|
||||||
|
|
||||||
|
play_time: str | None
|
||||||
|
if (game_time := handling_special_value(user_info.data.gametime)) is not None:
|
||||||
|
if game_time // 3600 > 0:
|
||||||
|
play_time = f'{game_time//3600:.0f}h {game_time % 3600 // 60:.0f}m {game_time % 60:.0f}s'
|
||||||
|
elif game_time // 60 > 0:
|
||||||
|
play_time = f'{game_time//60:.0f}m {game_time % 60:.0f}s'
|
||||||
|
else:
|
||||||
|
play_time = f'{game_time:.0f}s'
|
||||||
|
else:
|
||||||
|
play_time = game_time
|
||||||
|
try:
|
||||||
|
history = flow_to_history(leagueflow, lambda x: x[-100:])
|
||||||
|
except FallbackError:
|
||||||
|
history = None
|
||||||
|
netloc = get_self_netloc()
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v2/tetrio/user/info',
|
||||||
|
Info(
|
||||||
|
user=User(
|
||||||
|
id=user.ID,
|
||||||
|
name=user.name.upper(),
|
||||||
|
bio=user_info.data.bio,
|
||||||
|
banner=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/banners/{user.ID}') % {'revision': banner_revision}
|
||||||
|
)
|
||||||
|
if banner_revision is not None and banner_revision != 0
|
||||||
|
else None,
|
||||||
|
avatar=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||||
|
)
|
||||||
|
if avatar_revision is not None and avatar_revision != 0
|
||||||
|
else Avatar(
|
||||||
|
type='identicon',
|
||||||
|
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
|
||||||
|
),
|
||||||
|
badges=[
|
||||||
|
Badge(
|
||||||
|
id=i.id,
|
||||||
|
description=i.label,
|
||||||
|
group=i.group,
|
||||||
|
receive_at=i.ts if isinstance(i.ts, datetime) else None,
|
||||||
|
)
|
||||||
|
for i in user_info.data.badges
|
||||||
|
],
|
||||||
|
country=user_info.data.country,
|
||||||
|
role=user_info.data.role,
|
||||||
|
xp=user_info.data.xp,
|
||||||
|
friend_count=user_info.data.friend_count,
|
||||||
|
supporter_tier=user_info.data.supporter_tier,
|
||||||
|
bad_standing=user_info.data.badstanding or False,
|
||||||
|
playtime=play_time,
|
||||||
|
join_at=user_info.data.ts,
|
||||||
|
),
|
||||||
|
tetra_league=TetraLeague(
|
||||||
|
rank=league.data.rank,
|
||||||
|
highest_rank='z' if isinstance(league.data, NeverRatedData) else league.data.bestrank,
|
||||||
|
tr=round(league.data.tr, 2),
|
||||||
|
glicko=round(league.data.glicko, 2),
|
||||||
|
rd=round(league.data.rd, 2),
|
||||||
|
global_rank=league.data.standing,
|
||||||
|
country_rank=league.data.standing_local,
|
||||||
|
pps=(metrics := get_metrics(pps=league.data.pps, apm=league.data.apm, vs=league.data.vs)).pps,
|
||||||
|
apm=metrics.apm,
|
||||||
|
apl=metrics.apl,
|
||||||
|
vs=metrics.vs,
|
||||||
|
adpl=metrics.adpl,
|
||||||
|
statistic=TetraLeagueStatistic(total=league.data.gamesplayed, wins=league.data.gameswon),
|
||||||
|
decaying=league.data.decaying,
|
||||||
|
history=history,
|
||||||
|
)
|
||||||
|
if not isinstance(league.data, NeverPlayedData | InvalidData)
|
||||||
|
else None,
|
||||||
|
statistic=Statistic(
|
||||||
|
total=handling_special_value(user_info.data.gamesplayed),
|
||||||
|
wins=handling_special_value(user_info.data.gameswon),
|
||||||
|
),
|
||||||
|
sprint=Sprint(
|
||||||
|
time=sprint_value,
|
||||||
|
global_rank=sprint.data.rank,
|
||||||
|
play_at=sprint.data.record.ts,
|
||||||
|
)
|
||||||
|
if sprint.data.record is not None
|
||||||
|
else None,
|
||||||
|
blitz=Blitz(
|
||||||
|
score=blitz.data.record.results.stats.score,
|
||||||
|
global_rank=blitz.data.rank,
|
||||||
|
play_at=blitz.data.record.ts,
|
||||||
|
)
|
||||||
|
if blitz.data.record is not None
|
||||||
|
else None,
|
||||||
|
zen=Zen(level=zen.data.level, score=zen.data.score),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
|
||||||
153
nonebot_plugin_tetris_stats/games/tetrio/rank/__init__.py
Normal file
153
nonebot_plugin_tetris_stats/games/tetrio/rank/__init__.py
Normal file
@@ -0,0 +1,153 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
from collections.abc import Callable, Sequence
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from math import floor
|
||||||
|
from statistics import mean
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot_plugin_alconna import Subcommand
|
||||||
|
from nonebot_plugin_apscheduler import scheduler
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from sqlalchemy import select
|
||||||
|
|
||||||
|
from ....utils.exception import RequestError
|
||||||
|
from ....utils.retry import retry
|
||||||
|
from .. import alc
|
||||||
|
from .. import command as base_command
|
||||||
|
from ..api.leaderboards import by
|
||||||
|
from ..api.schemas.base import P
|
||||||
|
from ..api.schemas.leaderboards import Parameter
|
||||||
|
from ..api.schemas.leaderboards.by import Entry
|
||||||
|
from ..constant import RANK_PERCENTILE
|
||||||
|
from ..models import TETRIOLeagueHistorical, TETRIOLeagueStats, TETRIOLeagueStatsField
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..api.schemas.leaderboards.by import BySuccessModel
|
||||||
|
from ..api.typing import Rank
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
|
||||||
|
command = Subcommand('rank', help_text='查询 TETR.IO 段位信息')
|
||||||
|
|
||||||
|
|
||||||
|
def wrapper(slot: int | str, content: str | None) -> str | None:
|
||||||
|
if slot == 'rank' and not content:
|
||||||
|
return '--all'
|
||||||
|
if content is not None:
|
||||||
|
return f'--detail {content.lower()}'
|
||||||
|
return content
|
||||||
|
|
||||||
|
|
||||||
|
alc.shortcut(
|
||||||
|
r'(?i:io)(?i:段位|段|rank)\s*(?P<rank>[a-zA-Z+-]{0,2})',
|
||||||
|
command='tstats TETR.IO rank {rank}',
|
||||||
|
humanized='iorank',
|
||||||
|
fuzzy=False,
|
||||||
|
wrapper=wrapper,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _pps(user: Entry) -> float:
|
||||||
|
return user.league.pps
|
||||||
|
|
||||||
|
|
||||||
|
def _apm(user: Entry) -> float:
|
||||||
|
return user.league.apm
|
||||||
|
|
||||||
|
|
||||||
|
def _vs(user: Entry) -> float:
|
||||||
|
return user.league.vs
|
||||||
|
|
||||||
|
|
||||||
|
def _min(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
|
||||||
|
return min(users, key=field)
|
||||||
|
|
||||||
|
|
||||||
|
def _max(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
|
||||||
|
return max(users, key=field)
|
||||||
|
|
||||||
|
|
||||||
|
def find_special_player(
|
||||||
|
users: Sequence[Entry],
|
||||||
|
field: Callable[[Entry], float],
|
||||||
|
sort: Callable[[Sequence[Entry], Callable[[Entry], float]], Entry],
|
||||||
|
) -> Entry:
|
||||||
|
return sort(users, field)
|
||||||
|
|
||||||
|
|
||||||
|
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
|
||||||
|
async def get_tetra_league_data() -> None:
|
||||||
|
x_session_id = uuid4()
|
||||||
|
retry_by = retry(max_attempts=10, exception_type=RequestError)(by)
|
||||||
|
prisecter = P(pri=9007199254740991, sec=9007199254740991, ter=9007199254740991) # * from ch.tetr.io
|
||||||
|
results: list[BySuccessModel] = []
|
||||||
|
while True:
|
||||||
|
model = await retry_by('league', Parameter(after=prisecter.to_prisecter(), limit=100), x_session_id)
|
||||||
|
prisecter = model.data.entries[-1].p
|
||||||
|
results.append(model)
|
||||||
|
if len(model.data.entries) < 100: # 分页值 # noqa: PLR2004
|
||||||
|
break
|
||||||
|
|
||||||
|
players: list[Entry] = []
|
||||||
|
for result in results:
|
||||||
|
players.extend(result.data.entries)
|
||||||
|
players.sort(key=lambda x: x.league.tr, reverse=True)
|
||||||
|
|
||||||
|
rank_player_mapping: defaultdict[Rank, list[Entry]] = defaultdict(list)
|
||||||
|
for player in players:
|
||||||
|
rank_player_mapping[player.league.rank].append(player)
|
||||||
|
|
||||||
|
stats = TETRIOLeagueStats(raw=[], fields=[], update_time=datetime.now(UTC))
|
||||||
|
fields: list[TETRIOLeagueStatsField] = []
|
||||||
|
for rank, percentile in RANK_PERCENTILE.items():
|
||||||
|
offset = floor((percentile / 100) * len(players)) - 1
|
||||||
|
tr_line = players[offset].league.tr
|
||||||
|
rank_players = rank_player_mapping[rank]
|
||||||
|
fields.append(
|
||||||
|
TETRIOLeagueStatsField(
|
||||||
|
rank=rank,
|
||||||
|
tr_line=tr_line,
|
||||||
|
player_count=len(rank_players),
|
||||||
|
low_pps=find_special_player(rank_players, _pps, _min),
|
||||||
|
low_apm=find_special_player(rank_players, _apm, _min),
|
||||||
|
low_vs=find_special_player(rank_players, _vs, _min),
|
||||||
|
avg_pps=mean(_pps(i) for i in rank_players),
|
||||||
|
avg_apm=mean(_apm(i) for i in rank_players),
|
||||||
|
avg_vs=mean(_vs(i) for i in rank_players),
|
||||||
|
high_pps=find_special_player(rank_players, _pps, _max),
|
||||||
|
high_apm=find_special_player(rank_players, _apm, _max),
|
||||||
|
high_vs=find_special_player(rank_players, _vs, _max),
|
||||||
|
stats=stats,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
historicals = [
|
||||||
|
TETRIOLeagueHistorical(request_id=x_session_id, data=model, update_time=model.cache.cached_at, stats=stats)
|
||||||
|
for model in results
|
||||||
|
]
|
||||||
|
stats.raw = historicals
|
||||||
|
stats.fields = fields
|
||||||
|
async with get_session() as session:
|
||||||
|
session.add(stats)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@driver.on_startup
|
||||||
|
async def _() -> None:
|
||||||
|
async with get_session() as session:
|
||||||
|
latest_time = await session.scalar(
|
||||||
|
select(TETRIOLeagueStats.update_time).order_by(TETRIOLeagueStats.id.desc()).limit(1)
|
||||||
|
)
|
||||||
|
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
|
||||||
|
await get_tetra_league_data()
|
||||||
|
|
||||||
|
|
||||||
|
from . import all, detail # noqa: E402
|
||||||
|
|
||||||
|
base_command.add(command)
|
||||||
|
|
||||||
|
__all__ = ['all', 'detail']
|
||||||
115
nonebot_plugin_tetris_stats/games/tetrio/rank/all.py
Normal file
115
nonebot_plugin_tetris_stats/games/tetrio/rank/all.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from arclet.alconna import Arg
|
||||||
|
from nonebot_plugin_alconna import Option, Subcommand, UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from sqlalchemy import func, select
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from ....db import trigger
|
||||||
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
|
from ....utils.metrics import get_metrics
|
||||||
|
from ....utils.render import render
|
||||||
|
from ....utils.render.schemas.tetrio.rank.v1 import Data as DataV1
|
||||||
|
from ....utils.render.schemas.tetrio.rank.v1 import ItemData as ItemDataV1
|
||||||
|
from ....utils.render.schemas.tetrio.rank.v2 import AverageData as AverageDataV2
|
||||||
|
from ....utils.render.schemas.tetrio.rank.v2 import Data as DataV2
|
||||||
|
from ....utils.render.schemas.tetrio.rank.v2 import ItemData as ItemDataV2
|
||||||
|
from ....utils.screenshot import screenshot
|
||||||
|
from .. import alc
|
||||||
|
from ..constant import GAME_TYPE
|
||||||
|
from ..models import TETRIOLeagueStats
|
||||||
|
from ..typing import Template
|
||||||
|
from . import command
|
||||||
|
|
||||||
|
command.add(
|
||||||
|
Subcommand(
|
||||||
|
'--all', Option('--template', Arg('template', Template), alias=['-T'], help_text='要使用的查询模板'), dest='all'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('TETRIO.rank.all')
|
||||||
|
async def _(event_session: EventSession, template: Template | None = None):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='rank',
|
||||||
|
command_args=['--all'] + ([f'--template {template}'] if template is not None else []),
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
latest_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOLeagueStats)
|
||||||
|
.order_by(TETRIOLeagueStats.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
.options(selectinload(TETRIOLeagueStats.fields))
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
compare_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOLeagueStats)
|
||||||
|
.order_by(
|
||||||
|
func.abs(
|
||||||
|
func.julianday(TETRIOLeagueStats.update_time)
|
||||||
|
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
.options(selectinload(TETRIOLeagueStats.fields))
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
match template:
|
||||||
|
case 'v1' | None:
|
||||||
|
await UniMessage.image(raw=await make_image_v1(latest_data, compare_data)).finish()
|
||||||
|
case 'v2':
|
||||||
|
await UniMessage.image(raw=await make_image_v2(latest_data, compare_data)).finish()
|
||||||
|
|
||||||
|
|
||||||
|
async def make_image_v1(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v1/tetrio/rank',
|
||||||
|
DataV1(
|
||||||
|
items={
|
||||||
|
i[0].rank: ItemDataV1(
|
||||||
|
trending=round(i[0].tr_line - i[1].tr_line, 2),
|
||||||
|
require_tr=round(i[0].tr_line, 2),
|
||||||
|
players=i[0].player_count,
|
||||||
|
)
|
||||||
|
for i in zip(latest_data.fields, compare_data.fields, strict=True)
|
||||||
|
},
|
||||||
|
updated_at=latest_data.update_time,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||||
|
|
||||||
|
|
||||||
|
async def make_image_v2(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v2/tetrio/rank',
|
||||||
|
DataV2(
|
||||||
|
items={
|
||||||
|
i[0].rank: ItemDataV2(
|
||||||
|
require_tr=round(i[0].tr_line, 2),
|
||||||
|
trending=round(i[0].tr_line - i[1].tr_line, 2),
|
||||||
|
average_data=AverageDataV2(
|
||||||
|
pps=(metrics := get_metrics(pps=i[0].avg_pps, apm=i[0].avg_apm, vs=i[0].avg_vs)).pps,
|
||||||
|
apm=metrics.apm,
|
||||||
|
apl=metrics.apl,
|
||||||
|
vs=metrics.vs,
|
||||||
|
adpl=metrics.adpl,
|
||||||
|
),
|
||||||
|
players=i[0].player_count,
|
||||||
|
)
|
||||||
|
for i in zip(latest_data.fields, compare_data.fields, strict=True)
|
||||||
|
},
|
||||||
|
updated_at=latest_data.update_time,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||||
128
nonebot_plugin_tetris_stats/games/tetrio/rank/detail.py
Normal file
128
nonebot_plugin_tetris_stats/games/tetrio/rank/detail.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
from datetime import timedelta, timezone
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from arclet.alconna import Arg
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot_plugin_alconna import Option, UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from sqlalchemy import func, select
|
||||||
|
from sqlalchemy.orm import selectinload
|
||||||
|
|
||||||
|
from ....db import trigger
|
||||||
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
|
from ....utils.metrics import get_metrics
|
||||||
|
from ....utils.render import render
|
||||||
|
from ....utils.render.schemas.tetrio.rank.detail import Data, SpecialData
|
||||||
|
from ....utils.screenshot import screenshot
|
||||||
|
from .. import alc
|
||||||
|
from ..api.typing import ValidRank
|
||||||
|
from ..constant import GAME_TYPE
|
||||||
|
from ..models import TETRIOLeagueStats
|
||||||
|
from . import command
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
command.add(Option('--detail', Arg('rank', ValidRank), alias=['-D']))
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('TETRIO.rank')
|
||||||
|
async def _(rank: ValidRank, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='rank',
|
||||||
|
command_args=[f'--detail {rank}'],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
latest_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOLeagueStats)
|
||||||
|
.order_by(TETRIOLeagueStats.id.desc())
|
||||||
|
.limit(1)
|
||||||
|
.options(selectinload(TETRIOLeagueStats.fields))
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
compare_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOLeagueStats)
|
||||||
|
.order_by(
|
||||||
|
func.abs(
|
||||||
|
func.julianday(TETRIOLeagueStats.update_time)
|
||||||
|
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
.options(selectinload(TETRIOLeagueStats.fields))
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
await UniMessage.image(
|
||||||
|
raw=await make_image(
|
||||||
|
rank,
|
||||||
|
latest_data,
|
||||||
|
compare_data,
|
||||||
|
)
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
|
async def make_image(rank: ValidRank, latest: TETRIOLeagueStats, compare: TETRIOLeagueStats) -> bytes:
|
||||||
|
latest_data = next(filter(lambda x: x.rank == rank, latest.fields))
|
||||||
|
compare_data = next(filter(lambda x: x.rank == rank, compare.fields))
|
||||||
|
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
|
||||||
|
low_pps = get_metrics(
|
||||||
|
pps=latest_data.low_pps.league.pps, apm=latest_data.low_pps.league.apm, vs=latest_data.low_pps.league.vs
|
||||||
|
)
|
||||||
|
low_apm = get_metrics(
|
||||||
|
pps=latest_data.low_apm.league.pps, apm=latest_data.low_apm.league.apm, vs=latest_data.low_apm.league.vs
|
||||||
|
)
|
||||||
|
low_vs = get_metrics(
|
||||||
|
pps=latest_data.low_vs.league.pps, apm=latest_data.low_vs.league.apm, vs=latest_data.low_vs.league.vs
|
||||||
|
)
|
||||||
|
max_pps = get_metrics(
|
||||||
|
pps=latest_data.high_pps.league.pps, apm=latest_data.high_pps.league.apm, vs=latest_data.high_pps.league.vs
|
||||||
|
)
|
||||||
|
max_apm = get_metrics(
|
||||||
|
pps=latest_data.high_apm.league.pps, apm=latest_data.high_apm.league.apm, vs=latest_data.high_apm.league.vs
|
||||||
|
)
|
||||||
|
max_vs = get_metrics(
|
||||||
|
pps=latest_data.high_vs.league.pps, apm=latest_data.high_vs.league.apm, vs=latest_data.high_vs.league.vs
|
||||||
|
)
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v2/tetrio/rank/detail',
|
||||||
|
Data(
|
||||||
|
name=latest_data.rank,
|
||||||
|
trending=round(latest_data.tr_line - compare_data.tr_line, 2),
|
||||||
|
require_tr=round(latest_data.tr_line, 2),
|
||||||
|
players=latest_data.player_count,
|
||||||
|
minimum_data=SpecialData(
|
||||||
|
apm=low_apm.apm,
|
||||||
|
pps=low_pps.pps,
|
||||||
|
lpm=low_pps.lpm,
|
||||||
|
vs=low_vs.vs,
|
||||||
|
adpm=low_vs.adpm,
|
||||||
|
apm_holder=latest_data.low_apm.username.upper(),
|
||||||
|
pps_holder=latest_data.low_pps.username.upper(),
|
||||||
|
vs_holder=latest_data.low_vs.username.upper(),
|
||||||
|
),
|
||||||
|
average_data=SpecialData(
|
||||||
|
apm=avg.apm, pps=avg.pps, lpm=avg.lpm, vs=avg.vs, adpm=avg.adpm, apl=avg.apl, adpl=avg.adpl
|
||||||
|
),
|
||||||
|
maximum_data=SpecialData(
|
||||||
|
apm=max_apm.apm,
|
||||||
|
pps=max_pps.pps,
|
||||||
|
lpm=max_pps.lpm,
|
||||||
|
vs=max_vs.vs,
|
||||||
|
adpm=max_vs.adpm,
|
||||||
|
apm_holder=latest_data.high_apm.username.upper(),
|
||||||
|
pps_holder=latest_data.high_pps.username.upper(),
|
||||||
|
vs_holder=latest_data.high_vs.username.upper(),
|
||||||
|
),
|
||||||
|
updated_at=latest.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo('Asia/Shanghai')),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
from asyncio import gather
|
from asyncio import gather
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from nonebot.adapters import Event
|
from nonebot.adapters import Event
|
||||||
from nonebot.matcher import Matcher
|
from nonebot.matcher import Matcher
|
||||||
@@ -11,8 +10,10 @@ from nonebot_plugin_orm import get_session
|
|||||||
from nonebot_plugin_session import EventSession
|
from nonebot_plugin_session import EventSession
|
||||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
from nonebot_plugin_user import get_user
|
from nonebot_plugin_user import get_user
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from ....db import query_bind_info, trigger
|
from ....db import query_bind_info, trigger
|
||||||
|
from ....i18n import Lang
|
||||||
from ....utils.exception import RecordNotFoundError
|
from ....utils.exception import RecordNotFoundError
|
||||||
from ....utils.host import HostPage, get_self_netloc
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
from ....utils.metrics import get_metrics
|
from ....utils.metrics import get_metrics
|
||||||
@@ -22,7 +23,6 @@ from ....utils.render.schemas.tetrio.record.base import Finesse, Max, Mini, Tspi
|
|||||||
from ....utils.render.schemas.tetrio.record.blitz import Record, Statistic
|
from ....utils.render.schemas.tetrio.record.blitz import Record, Statistic
|
||||||
from ....utils.screenshot import screenshot
|
from ....utils.screenshot import screenshot
|
||||||
from ....utils.typing import Me
|
from ....utils.typing import Me
|
||||||
from ...constant import CANT_VERIFY_MESSAGE
|
|
||||||
from .. import alc
|
from .. import alc
|
||||||
from ..api.player import Player
|
from ..api.player import Player
|
||||||
from ..constant import GAME_TYPE
|
from ..constant import GAME_TYPE
|
||||||
@@ -60,9 +60,10 @@ async def _(
|
|||||||
)
|
)
|
||||||
if bind is None:
|
if bind is None:
|
||||||
await matcher.finish('未查询到绑定信息')
|
await matcher.finish('未查询到绑定信息')
|
||||||
message = UniMessage(CANT_VERIFY_MESSAGE)
|
|
||||||
player = Player(user_id=bind.game_account, trust=True)
|
player = Player(user_id=bind.game_account, trust=True)
|
||||||
await (message + UniMessage.image(raw=await make_blitz_image(player))).finish()
|
await (
|
||||||
|
UniMessage.i18n(Lang.interaction.warning.unverified) + UniMessage.image(raw=await make_blitz_image(player))
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('TETRIO.record.blitz')
|
@alc.assign('TETRIO.record.blitz')
|
||||||
@@ -94,7 +95,9 @@ async def make_blitz_image(player: Player) -> bytes:
|
|||||||
user=User(
|
user=User(
|
||||||
id=user.ID,
|
id=user.ID,
|
||||||
name=user.name.upper(),
|
name=user.name.upper(),
|
||||||
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
|
avatar=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||||
|
)
|
||||||
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
||||||
else Avatar(
|
else Avatar(
|
||||||
type='identicon',
|
type='identicon',
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
from asyncio import gather
|
from asyncio import gather
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from nonebot.adapters import Event
|
from nonebot.adapters import Event
|
||||||
from nonebot.matcher import Matcher
|
from nonebot.matcher import Matcher
|
||||||
@@ -11,8 +10,10 @@ from nonebot_plugin_orm import get_session
|
|||||||
from nonebot_plugin_session import EventSession
|
from nonebot_plugin_session import EventSession
|
||||||
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
from nonebot_plugin_user import get_user
|
from nonebot_plugin_user import get_user
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from ....db import query_bind_info, trigger
|
from ....db import query_bind_info, trigger
|
||||||
|
from ....i18n import Lang
|
||||||
from ....utils.exception import RecordNotFoundError
|
from ....utils.exception import RecordNotFoundError
|
||||||
from ....utils.host import HostPage, get_self_netloc
|
from ....utils.host import HostPage, get_self_netloc
|
||||||
from ....utils.metrics import get_metrics
|
from ....utils.metrics import get_metrics
|
||||||
@@ -22,7 +23,6 @@ from ....utils.render.schemas.tetrio.record.base import Finesse, Max, Mini, Stat
|
|||||||
from ....utils.render.schemas.tetrio.record.sprint import Record
|
from ....utils.render.schemas.tetrio.record.sprint import Record
|
||||||
from ....utils.screenshot import screenshot
|
from ....utils.screenshot import screenshot
|
||||||
from ....utils.typing import Me
|
from ....utils.typing import Me
|
||||||
from ...constant import CANT_VERIFY_MESSAGE
|
|
||||||
from .. import alc
|
from .. import alc
|
||||||
from ..api.player import Player
|
from ..api.player import Player
|
||||||
from ..constant import GAME_TYPE
|
from ..constant import GAME_TYPE
|
||||||
@@ -60,9 +60,10 @@ async def _(
|
|||||||
)
|
)
|
||||||
if bind is None:
|
if bind is None:
|
||||||
await matcher.finish('未查询到绑定信息')
|
await matcher.finish('未查询到绑定信息')
|
||||||
message = UniMessage(CANT_VERIFY_MESSAGE)
|
|
||||||
player = Player(user_id=bind.game_account, trust=True)
|
player = Player(user_id=bind.game_account, trust=True)
|
||||||
await (message + UniMessage.image(raw=await make_sprint_image(player))).finish()
|
await (
|
||||||
|
UniMessage.i18n(Lang.interaction.warning.unverified) + UniMessage.image(raw=await make_sprint_image(player))
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('TETRIO.record.sprint')
|
@alc.assign('TETRIO.record.sprint')
|
||||||
@@ -95,7 +96,9 @@ async def make_sprint_image(player: Player) -> bytes:
|
|||||||
user=User(
|
user=User(
|
||||||
id=user.ID,
|
id=user.ID,
|
||||||
name=user.name.upper(),
|
name=user.name.upper(),
|
||||||
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
|
avatar=str(
|
||||||
|
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
|
||||||
|
)
|
||||||
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
|
||||||
else Avatar(
|
else Avatar(
|
||||||
type='identicon',
|
type='identicon',
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
from pandas import read_html
|
from pandas import read_html
|
||||||
|
|
||||||
|
from ....config.config import config
|
||||||
from ....db import anti_duplicate_add
|
from ....db import anti_duplicate_add
|
||||||
from ....utils.request import Request, splice_url
|
from ....utils.request import Request
|
||||||
from ..constant import BASE_URL, USER_NAME
|
from ..constant import BASE_URL, USER_NAME
|
||||||
from .models import TOPHistoricalData
|
from .models import TOPHistoricalData
|
||||||
from .schemas.user import User
|
from .schemas.user import User
|
||||||
@@ -15,6 +15,8 @@ from .schemas.user_profile import Data, UserProfile
|
|||||||
|
|
||||||
UTC = timezone.utc
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
request = Request(config.tetris.proxy.top or config.tetris.proxy.main)
|
||||||
|
|
||||||
|
|
||||||
class Player:
|
class Player:
|
||||||
def __init__(self, *, user_name: str, trust: bool = False) -> None:
|
def __init__(self, *, user_name: str, trust: bool = False) -> None:
|
||||||
@@ -35,8 +37,7 @@ class Player:
|
|||||||
async def get_profile(self) -> UserProfile:
|
async def get_profile(self) -> UserProfile:
|
||||||
"""获取用户信息"""
|
"""获取用户信息"""
|
||||||
if self._user_profile is None:
|
if self._user_profile is None:
|
||||||
url = splice_url([BASE_URL, 'profile.php', f'?{urlencode({"user":self.user_name})}'])
|
raw_user_profile = await request.request(BASE_URL / 'profile.php' % {'user': self.user_name}, is_json=False)
|
||||||
raw_user_profile = await Request.request(url, is_json=False)
|
|
||||||
self._user_profile = self._parse_profile(raw_user_profile)
|
self._user_profile = self._parse_profile(raw_user_profile)
|
||||||
await anti_duplicate_add(
|
await anti_duplicate_add(
|
||||||
TOPHistoricalData(
|
TOPHistoricalData(
|
||||||
@@ -48,7 +49,8 @@ class Player:
|
|||||||
)
|
)
|
||||||
return self._user_profile
|
return self._user_profile
|
||||||
|
|
||||||
def _parse_profile(self, original_user_profile: bytes) -> UserProfile:
|
@staticmethod
|
||||||
|
def _parse_profile(original_user_profile: bytes) -> UserProfile:
|
||||||
html = etree.HTML(original_user_profile)
|
html = etree.HTML(original_user_profile)
|
||||||
user_name = html.xpath('//div[@class="mycontent"]/h1/text()')[0].replace("'s profile", '')
|
user_name = html.xpath('//div[@class="mycontent"]/h1/text()')[0].replace("'s profile", '')
|
||||||
today = None
|
today = None
|
||||||
@@ -67,4 +69,4 @@ class Player:
|
|||||||
total: list[Data] = []
|
total: list[Data] = []
|
||||||
for _, value in dataframe.iterrows():
|
for _, value in dataframe.iterrows():
|
||||||
total.append(Data(lpm=value['lpm'], apm=value['apm']))
|
total.append(Data(lpm=value['lpm'], apm=value['apm']))
|
||||||
return UserProfile(user_name=user_name, today=today, total=total)
|
return UserProfile(user_name=user_name, today=today, total=total or None)
|
||||||
|
|||||||
@@ -1,8 +1,10 @@
|
|||||||
from re import compile
|
from re import compile
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
GAME_TYPE: Literal['TOP'] = 'TOP'
|
GAME_TYPE: Literal['TOP'] = 'TOP'
|
||||||
|
|
||||||
BASE_URL = 'http://tetrisonline.pl/top/'
|
BASE_URL = URL('http://tetrisonline.pl/top/')
|
||||||
|
|
||||||
USER_NAME = compile(r'^[a-zA-Z0-9_]{1,16}$')
|
USER_NAME = compile(r'^[a-zA-Z0-9_]{1,16}$')
|
||||||
|
|||||||
@@ -8,12 +8,20 @@ from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[im
|
|||||||
from nonebot_plugin_user import get_user
|
from nonebot_plugin_user import get_user
|
||||||
|
|
||||||
from ...db import query_bind_info, trigger
|
from ...db import query_bind_info, trigger
|
||||||
from ...utils.metrics import get_metrics
|
from ...i18n import Lang
|
||||||
|
from ...utils.exception import FallbackError
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.metrics import TetrisMetricsBasicWithLPM, get_metrics
|
||||||
|
from ...utils.render import render
|
||||||
|
from ...utils.render.avatar import get_avatar
|
||||||
|
from ...utils.render.schemas.base import People
|
||||||
|
from ...utils.render.schemas.top_info import Data as InfoData
|
||||||
|
from ...utils.render.schemas.top_info import Info
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
from ...utils.typing import Me
|
from ...utils.typing import Me
|
||||||
from ..constant import CANT_VERIFY_MESSAGE
|
|
||||||
from . import alc
|
from . import alc
|
||||||
from .api import Player
|
from .api import Player
|
||||||
from .api.schemas.user_profile import UserProfile
|
from .api.schemas.user_profile import Data, UserProfile
|
||||||
from .constant import GAME_TYPE
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
@@ -35,8 +43,10 @@ async def _(event: Event, matcher: Matcher, target: At | Me, event_session: Even
|
|||||||
)
|
)
|
||||||
if bind is None:
|
if bind is None:
|
||||||
await matcher.finish('未查询到绑定信息')
|
await matcher.finish('未查询到绑定信息')
|
||||||
message = CANT_VERIFY_MESSAGE
|
await (
|
||||||
await (message + make_query_text(await Player(user_name=bind.game_account, trust=True).get_profile())).finish()
|
UniMessage.i18n(Lang.interaction.warning.unverified)
|
||||||
|
+ await make_query_result(await Player(user_name=bind.game_account, trust=True).get_profile())
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('TOP.query')
|
@alc.assign('TOP.query')
|
||||||
@@ -47,7 +57,34 @@ async def _(account: Player, event_session: EventSession):
|
|||||||
command_type='query',
|
command_type='query',
|
||||||
command_args=[],
|
command_args=[],
|
||||||
):
|
):
|
||||||
await (make_query_text(await account.get_profile())).finish()
|
await (await make_query_result(await account.get_profile())).finish()
|
||||||
|
|
||||||
|
|
||||||
|
def get_avg_metrics(data: list[Data]) -> TetrisMetricsBasicWithLPM:
|
||||||
|
total_lpm = total_apm = 0.0
|
||||||
|
for value in data:
|
||||||
|
total_lpm += value.lpm
|
||||||
|
total_apm += value.apm
|
||||||
|
num = len(data)
|
||||||
|
return get_metrics(lpm=total_lpm / num, apm=total_apm / num)
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_image(profile: UserProfile) -> bytes:
|
||||||
|
if profile.today is None or profile.total is None:
|
||||||
|
raise FallbackError
|
||||||
|
today = get_metrics(lpm=profile.today.lpm, apm=profile.today.apm)
|
||||||
|
history = get_avg_metrics(profile.total)
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'v1/top/info',
|
||||||
|
Info(
|
||||||
|
user=People(avatar=get_avatar(profile.user_name), name=profile.user_name),
|
||||||
|
today=InfoData(pps=today.pps, lpm=today.lpm, apm=today.apm, apl=today.apl),
|
||||||
|
history=InfoData(pps=history.pps, lpm=history.lpm, apm=history.apm, apl=history.apl),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
|
||||||
|
|
||||||
|
|
||||||
def make_query_text(profile: UserProfile) -> UniMessage:
|
def make_query_text(profile: UserProfile) -> UniMessage:
|
||||||
@@ -60,15 +97,18 @@ def make_query_text(profile: UserProfile) -> UniMessage:
|
|||||||
else:
|
else:
|
||||||
message += f'用户 {profile.user_name} 暂无24小时内统计数据'
|
message += f'用户 {profile.user_name} 暂无24小时内统计数据'
|
||||||
if profile.total is not None:
|
if profile.total is not None:
|
||||||
total_lpm = total_apm = 0.0
|
total = get_avg_metrics(profile.total)
|
||||||
for value in profile.total:
|
|
||||||
total_lpm += value.lpm
|
|
||||||
total_apm += value.apm
|
|
||||||
num = len(profile.total)
|
|
||||||
total = get_metrics(lpm=total_lpm / num, apm=total_apm / num)
|
|
||||||
message += '\n历史统计数据为: '
|
message += '\n历史统计数据为: '
|
||||||
message += f"\nL'PM: {total.lpm} ( {total.pps} pps )"
|
message += f"\nL'PM: {total.lpm} ( {total.pps} pps )"
|
||||||
message += f'\nAPM: {total.apm} ( x{total.apl} )'
|
message += f'\nAPM: {total.apm} ( x{total.apl} )'
|
||||||
else:
|
else:
|
||||||
message += '\n暂无历史统计数据'
|
message += '\n暂无历史统计数据'
|
||||||
return UniMessage(message)
|
return UniMessage(message)
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_result(profile: UserProfile) -> UniMessage:
|
||||||
|
try:
|
||||||
|
return UniMessage.image(raw=await make_query_image(profile))
|
||||||
|
except FallbackError:
|
||||||
|
...
|
||||||
|
return make_query_text(profile)
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import overload
|
from typing import cast, overload
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from httpx import TimeoutException
|
from httpx import TimeoutException
|
||||||
from nonebot.compat import type_validate_json
|
from nonebot.compat import type_validate_json
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from ....config.config import config
|
||||||
from ....db import anti_duplicate_add
|
from ....db import anti_duplicate_add
|
||||||
from ....utils.exception import RequestError
|
from ....utils.exception import RequestError
|
||||||
from ....utils.request import Request, splice_url
|
from ....utils.request import Request
|
||||||
from ..constant import BASE_URL, USER_NAME
|
from ..constant import BASE_URL, USER_NAME
|
||||||
from .models import TOSHistoricalData
|
from .models import TOSHistoricalData
|
||||||
from .schemas.user import User
|
from .schemas.user import User
|
||||||
@@ -16,6 +17,8 @@ from .schemas.user_profile import UserProfile
|
|||||||
|
|
||||||
UTC = timezone.utc
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
request = Request(config.tetris.proxy.tos or config.tetris.proxy.main)
|
||||||
|
|
||||||
|
|
||||||
class Player:
|
class Player:
|
||||||
@overload
|
@overload
|
||||||
@@ -57,28 +60,14 @@ class Player:
|
|||||||
"""获取用户信息"""
|
"""获取用户信息"""
|
||||||
if self._user_info is None:
|
if self._user_info is None:
|
||||||
if self.teaid is not None:
|
if self.teaid is not None:
|
||||||
url = [
|
path = 'getTeaIdInfo'
|
||||||
splice_url(
|
query = {'teaId': self.teaid}
|
||||||
[
|
|
||||||
i,
|
|
||||||
'getTeaIdInfo',
|
|
||||||
f'?{urlencode({"teaId":self.teaid})}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
for i in BASE_URL
|
|
||||||
]
|
|
||||||
else:
|
else:
|
||||||
url = [
|
path = 'getUsernameInfo'
|
||||||
splice_url(
|
query = {'username': cast(str, self.user_name)}
|
||||||
[
|
raw_user_info = await request.failover_request(
|
||||||
i,
|
[i / path % query for i in BASE_URL], failover_code=[502], failover_exc=(TimeoutException,)
|
||||||
'getUsernameInfo',
|
)
|
||||||
f'?{urlencode({"username":self.user_name})}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
for i in BASE_URL
|
|
||||||
]
|
|
||||||
raw_user_info = await Request.failover_request(url, failover_code=[502], failover_exc=(TimeoutException,))
|
|
||||||
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
||||||
if not isinstance(user_info, UserInfoSuccess):
|
if not isinstance(user_info, UserInfoSuccess):
|
||||||
msg = f'用户信息请求错误:\n{user_info.error}'
|
msg = f'用户信息请求错误:\n{user_info.error}'
|
||||||
@@ -98,17 +87,11 @@ class Player:
|
|||||||
"""获取用户数据"""
|
"""获取用户数据"""
|
||||||
if other_parameter is None:
|
if other_parameter is None:
|
||||||
other_parameter = {}
|
other_parameter = {}
|
||||||
params = urlencode(dict(sorted(other_parameter.items())))
|
params = (URL('') % dict(sorted(other_parameter.items()))).human_repr()
|
||||||
if self._user_profile.get(params) is None:
|
if self._user_profile.get(params) is None:
|
||||||
raw_user_profile = await Request.failover_request(
|
raw_user_profile = await request.failover_request(
|
||||||
[
|
[
|
||||||
splice_url(
|
i / 'getProfile' % {'id': self.teaid or cast(str, self.user_name), **other_parameter}
|
||||||
[
|
|
||||||
i,
|
|
||||||
'getProfile',
|
|
||||||
f'?{urlencode({"id":self.teaid or self.user_name,**other_parameter})}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
for i in BASE_URL
|
for i in BASE_URL
|
||||||
],
|
],
|
||||||
failover_code=[502],
|
failover_code=[502],
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
from re import compile
|
from re import compile
|
||||||
from typing import Literal
|
from typing import Literal
|
||||||
|
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
GAME_TYPE: Literal['TOS'] = 'TOS'
|
GAME_TYPE: Literal['TOS'] = 'TOS'
|
||||||
|
|
||||||
BASE_URL = {
|
BASE_URL = {
|
||||||
'https://teatube.cn:8888/',
|
URL('https://teatube.cn:8888/'),
|
||||||
'http://cafuuchino1.studio26f.org:19970',
|
URL('http://cafuuchino1.studio26f.org:19970'),
|
||||||
}
|
}
|
||||||
|
|
||||||
USER_NAME = compile(
|
USER_NAME = compile(
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ from nonebot_plugin_user import get_user
|
|||||||
from nonebot_plugin_userinfo import EventUserInfo, UserInfo
|
from nonebot_plugin_userinfo import EventUserInfo, UserInfo
|
||||||
|
|
||||||
from ...db import query_bind_info, trigger
|
from ...db import query_bind_info, trigger
|
||||||
|
from ...i18n import Lang
|
||||||
from ...utils.exception import RequestError
|
from ...utils.exception import RequestError
|
||||||
from ...utils.host import HostPage, get_self_netloc
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
from ...utils.image import get_avatar
|
from ...utils.image import get_avatar
|
||||||
@@ -24,7 +25,6 @@ from ...utils.render.schemas.base import People, Ranking
|
|||||||
from ...utils.render.schemas.tos_info import Info, Multiplayer, Radar
|
from ...utils.render.schemas.tos_info import Info, Multiplayer, Radar
|
||||||
from ...utils.screenshot import screenshot
|
from ...utils.screenshot import screenshot
|
||||||
from ...utils.typing import Me, Number
|
from ...utils.typing import Me, Number
|
||||||
from ..constant import CANT_VERIFY_MESSAGE
|
|
||||||
from . import alc
|
from . import alc
|
||||||
from .api import Player
|
from .api import Player
|
||||||
from .api.schemas.user_info import UserInfoSuccess
|
from .api.schemas.user_info import UserInfoSuccess
|
||||||
@@ -124,7 +124,7 @@ async def _(
|
|||||||
)
|
)
|
||||||
if bind is None:
|
if bind is None:
|
||||||
await matcher.finish('未查询到绑定信息')
|
await matcher.finish('未查询到绑定信息')
|
||||||
message = CANT_VERIFY_MESSAGE
|
message = UniMessage.i18n(Lang.interaction.warning.unverified)
|
||||||
player = Player(teaid=bind.game_account, trust=True)
|
player = Player(teaid=bind.game_account, trust=True)
|
||||||
user_info, game_data = await gather(player.get_info(), get_game_data(player))
|
user_info, game_data = await gather(player.get_info(), get_game_data(player))
|
||||||
if game_data is not None:
|
if game_data is not None:
|
||||||
@@ -223,7 +223,7 @@ async def make_query_image(user_info: UserInfoSuccess, game_data: GameData, even
|
|||||||
user=People(
|
user=People(
|
||||||
avatar=await get_avatar(event_user_info, 'Data URI', None)
|
avatar=await get_avatar(event_user_info, 'Data URI', None)
|
||||||
if event_user_info is not None
|
if event_user_info is not None
|
||||||
else get_random_avatar(),
|
else get_random_avatar(user_info.data.teaid),
|
||||||
name=user_info.data.name,
|
name=user_info.data.name,
|
||||||
),
|
),
|
||||||
ranking=Ranking(rating=float(user_info.data.ranking), rd=round(float(user_info.data.rd_now), 2)),
|
ranking=Ranking(rating=float(user_info.data.ranking), rd=round(float(user_info.data.rd_now), 2)),
|
||||||
|
|||||||
5
nonebot_plugin_tetris_stats/i18n/.config.json
Normal file
5
nonebot_plugin_tetris_stats/i18n/.config.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"default": "en-US",
|
||||||
|
"frozen": [],
|
||||||
|
"require": []
|
||||||
|
}
|
||||||
72
nonebot_plugin_tetris_stats/i18n/.lang.schema.json
Normal file
72
nonebot_plugin_tetris_stats/i18n/.lang.schema.json
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
{
|
||||||
|
"title": "Lang Schema",
|
||||||
|
"description": "Schema for lang file",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"interaction": {
|
||||||
|
"title": "Interaction",
|
||||||
|
"description": "Scope 'interaction' of lang item",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"wrong": {
|
||||||
|
"title": "Wrong",
|
||||||
|
"description": "Scope 'wrong' of lang item",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"query_bot": {
|
||||||
|
"title": "query_bot",
|
||||||
|
"description": "value of lang item type 'query_bot'",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"warning": {
|
||||||
|
"title": "Warning",
|
||||||
|
"description": "Scope 'warning' of lang item",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"unverified": {
|
||||||
|
"title": "unverified",
|
||||||
|
"description": "value of lang item type 'unverified'",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"title": "Error",
|
||||||
|
"description": "Scope 'error' of lang item",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"MessageFormatError": {
|
||||||
|
"title": "Messageformaterror",
|
||||||
|
"description": "Scope 'MessageFormatError' of lang item",
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": false,
|
||||||
|
"properties": {
|
||||||
|
"TETR.IO": {
|
||||||
|
"title": "TETR.IO",
|
||||||
|
"description": "value of lang item type 'TETR.IO'",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"TOS": {
|
||||||
|
"title": "TOS",
|
||||||
|
"description": "value of lang item type 'TOS'",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"TOP": {
|
||||||
|
"title": "TOP",
|
||||||
|
"description": "value of lang item type 'TOP'",
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
16
nonebot_plugin_tetris_stats/i18n/.template.json
Normal file
16
nonebot_plugin_tetris_stats/i18n/.template.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": ".template.schema.json",
|
||||||
|
"scopes": [
|
||||||
|
{
|
||||||
|
"scope": "interaction",
|
||||||
|
"types": [
|
||||||
|
{ "subtype": "wrong", "types": ["query_bot"] },
|
||||||
|
{ "subtype": "warning", "types": ["unverified"] }
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"scope": "error",
|
||||||
|
"types": [{ "subtype": "MessageFormatError", "types": ["TETR.IO", "TOS", "TOP"] }]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
54
nonebot_plugin_tetris_stats/i18n/.template.schema.json
Normal file
54
nonebot_plugin_tetris_stats/i18n/.template.schema.json
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
{
|
||||||
|
"title": "Template",
|
||||||
|
"description": "Template for lang items to generate schema for lang files",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"scopes": {
|
||||||
|
"title": "Scopes",
|
||||||
|
"description": "All scopes of lang items",
|
||||||
|
"type": "array",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"title": "Scope",
|
||||||
|
"description": "First level of all lang items",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"scope": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Scope name"
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "All types of lang items",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"type": "string",
|
||||||
|
"description": "Value of lang item"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"subtype": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "Subtype name of lang item"
|
||||||
|
},
|
||||||
|
"types": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "All subtypes of lang items",
|
||||||
|
"uniqueItems": true,
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/properties/scopes/items/properties/types/items"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
12
nonebot_plugin_tetris_stats/i18n/__init__.py
Normal file
12
nonebot_plugin_tetris_stats/i18n/__init__.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# This file is @generated by tarina.lang CLI tool
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
|
||||||
|
# ruff: noqa: E402, F401, PLC0414
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from tarina.lang import lang # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
lang.load(Path(__file__).parent)
|
||||||
|
|
||||||
|
from .model import Lang as Lang
|
||||||
16
nonebot_plugin_tetris_stats/i18n/en-US.json
Normal file
16
nonebot_plugin_tetris_stats/i18n/en-US.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"$schema": ".lang.schema.json",
|
||||||
|
"interaction": {
|
||||||
|
"wrong": { "query_bot": "Can't query bot's information" },
|
||||||
|
"warning": {
|
||||||
|
"unverified": "* Because I can't verify account linking information, I can't guarantee the info I found is yourself/themself."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"MessageFormatError": {
|
||||||
|
"TETR.IO": "Username/ID is invalid",
|
||||||
|
"TOS": "Username/ID is invalid",
|
||||||
|
"TOP": "Username is invalid"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
32
nonebot_plugin_tetris_stats/i18n/model.py
Normal file
32
nonebot_plugin_tetris_stats/i18n/model.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# This file is @generated by tarina.lang CLI tool
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
|
||||||
|
from tarina.lang.model import LangItem, LangModel # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
|
||||||
|
class InteractionWrong:
|
||||||
|
query_bot: LangItem = LangItem('interaction', 'wrong.query_bot')
|
||||||
|
|
||||||
|
|
||||||
|
class InteractionWarning:
|
||||||
|
unverified: LangItem = LangItem('interaction', 'warning.unverified')
|
||||||
|
|
||||||
|
|
||||||
|
class Interaction:
|
||||||
|
wrong = InteractionWrong
|
||||||
|
warning = InteractionWarning
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorMessageformaterror:
|
||||||
|
TETR_IO: LangItem = LangItem('error', 'MessageFormatError.TETR.IO')
|
||||||
|
TOS: LangItem = LangItem('error', 'MessageFormatError.TOS')
|
||||||
|
TOP: LangItem = LangItem('error', 'MessageFormatError.TOP')
|
||||||
|
|
||||||
|
|
||||||
|
class Error:
|
||||||
|
MessageFormatError = ErrorMessageformaterror
|
||||||
|
|
||||||
|
|
||||||
|
class Lang(LangModel):
|
||||||
|
interaction = Interaction
|
||||||
|
error = Error
|
||||||
14
nonebot_plugin_tetris_stats/i18n/zh-CN.json
Normal file
14
nonebot_plugin_tetris_stats/i18n/zh-CN.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"$schema": ".lang.schema.json",
|
||||||
|
"interaction": {
|
||||||
|
"wrong": { "query_bot": "不能查询bot的信息" },
|
||||||
|
"warning": { "unverified": "* 由于无法验证绑定信息, 不能保证查询到的用户为本人" }
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"MessageFormatError": {
|
||||||
|
"TETR.IO": "用户名/ID不合法",
|
||||||
|
"TOS": "用户名/ID不合法",
|
||||||
|
"TOP": "用户名不合法"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,12 +1,14 @@
|
|||||||
import sys
|
import sys
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
from os import environ
|
from os import environ
|
||||||
from platform import system
|
from platform import system
|
||||||
from re import sub
|
from re import sub
|
||||||
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
from nonebot import get_driver
|
from nonebot import get_driver
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
from playwright.__main__ import main
|
from playwright.__main__ import main
|
||||||
from playwright.async_api import Browser, async_playwright
|
from playwright.async_api import Browser, BrowserContext, async_playwright
|
||||||
|
|
||||||
driver = get_driver()
|
driver = get_driver()
|
||||||
|
|
||||||
@@ -27,6 +29,7 @@ class BrowserManager:
|
|||||||
"""浏览器管理类"""
|
"""浏览器管理类"""
|
||||||
|
|
||||||
_browser: Browser | None = None
|
_browser: Browser | None = None
|
||||||
|
_contexts: ClassVar[dict[str, BrowserContext]] = {}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def init_playwright(cls) -> None:
|
async def init_playwright(cls) -> None:
|
||||||
@@ -72,7 +75,11 @@ class BrowserManager:
|
|||||||
async def _start_browser(cls) -> Browser:
|
async def _start_browser(cls) -> Browser:
|
||||||
"""启动浏览器实例"""
|
"""启动浏览器实例"""
|
||||||
playwright = await async_playwright().start()
|
playwright = await async_playwright().start()
|
||||||
cls._browser = await playwright.firefox.launch()
|
cls._browser = await playwright.firefox.launch(
|
||||||
|
firefox_user_prefs={
|
||||||
|
'network.http.max-persistent-connections-per-server': 64,
|
||||||
|
},
|
||||||
|
)
|
||||||
return cls._browser
|
return cls._browser
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -80,8 +87,26 @@ class BrowserManager:
|
|||||||
"""获取浏览器实例"""
|
"""获取浏览器实例"""
|
||||||
return cls._browser or await cls._start_browser()
|
return cls._browser or await cls._start_browser()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get_context(
|
||||||
|
cls, context_id: str = 'default', factory: Callable[[], Coroutine[Any, Any, BrowserContext]] | None = None
|
||||||
|
) -> BrowserContext:
|
||||||
|
"""获取浏览器上下文"""
|
||||||
|
return cls._contexts.setdefault(
|
||||||
|
context_id, await factory() if factory is not None else await (await cls.get_browser()).new_context()
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def del_context(cls, context_id: str) -> None:
|
||||||
|
"""删除浏览器上下文"""
|
||||||
|
if context_id in cls._contexts:
|
||||||
|
await cls._contexts[context_id].close()
|
||||||
|
del cls._contexts[context_id]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def close_browser(cls) -> None:
|
async def close_browser(cls) -> None:
|
||||||
"""关闭浏览器实例"""
|
"""关闭浏览器实例"""
|
||||||
|
for i in cls._contexts.values():
|
||||||
|
await i.close()
|
||||||
if isinstance(cls._browser, Browser):
|
if isinstance(cls._browser, Browser):
|
||||||
await cls._browser.close()
|
await cls._browser.close()
|
||||||
|
|||||||
@@ -1,16 +1,20 @@
|
|||||||
|
from functools import cache
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
from ipaddress import IPv4Address, IPv6Address
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
from pathlib import Path as FilePath
|
||||||
from typing import TYPE_CHECKING, ClassVar, Literal
|
from typing import TYPE_CHECKING, ClassVar, Literal
|
||||||
|
|
||||||
from fastapi import FastAPI, Path, status
|
from aiofiles import open
|
||||||
|
from fastapi import BackgroundTasks, FastAPI, Path, status
|
||||||
from fastapi.responses import FileResponse, HTMLResponse, Response
|
from fastapi.responses import FileResponse, HTMLResponse, Response
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from nonebot import get_app, get_driver
|
from nonebot import get_app, get_driver
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
from ..config.config import CACHE_PATH
|
from ..config.config import CACHE_PATH
|
||||||
|
from ..games.tetrio.api.cache import request
|
||||||
from .image import img_to_png
|
from .image import img_to_png
|
||||||
from .request import Request
|
|
||||||
from .templates import TEMPLATES_DIR
|
from .templates import TEMPLATES_DIR
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -22,6 +26,7 @@ driver = get_driver()
|
|||||||
|
|
||||||
global_config = driver.config
|
global_config = driver.config
|
||||||
|
|
||||||
|
BASE_URL = URL('https://tetr.io/user-content/')
|
||||||
|
|
||||||
if not isinstance(app, FastAPI):
|
if not isinstance(app, FastAPI):
|
||||||
msg = '本插件需要 FastAPI 驱动器才能运行'
|
msg = '本插件需要 FastAPI 驱动器才能运行'
|
||||||
@@ -55,7 +60,7 @@ def _():
|
|||||||
|
|
||||||
|
|
||||||
@app.get('/host/{page_hash}.html', status_code=status.HTTP_200_OK)
|
@app.get('/host/{page_hash}.html', status_code=status.HTTP_200_OK)
|
||||||
async def _(page_hash: str) -> HTMLResponse:
|
def _(page_hash: str) -> HTMLResponse:
|
||||||
if page_hash in HostPage.pages:
|
if page_hash in HostPage.pages:
|
||||||
return HTMLResponse(HostPage.pages[page_hash])
|
return HTMLResponse(HostPage.pages[page_hash])
|
||||||
return NOT_FOUND
|
return NOT_FOUND
|
||||||
@@ -63,20 +68,30 @@ async def _(page_hash: str) -> HTMLResponse:
|
|||||||
|
|
||||||
@app.get('/host/resource/tetrio/{resource_type}/{user_id}', status_code=status.HTTP_200_OK)
|
@app.get('/host/resource/tetrio/{resource_type}/{user_id}', status_code=status.HTTP_200_OK)
|
||||||
async def _(
|
async def _(
|
||||||
resource_type: Literal['avatars', 'banners'], revision: int, user_id: str = Path(regex=r'^[a-f0-9]{24}$')
|
resource_type: Literal['avatars', 'banners'],
|
||||||
|
revision: int,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
user_id: str = Path(regex=r'^[a-f0-9]{24}$'),
|
||||||
) -> Response:
|
) -> Response:
|
||||||
if not (path := CACHE_PATH / 'tetrio' / resource_type / f'{user_id}_{revision}.png').exists():
|
if not (path := CACHE_PATH / 'tetrio' / resource_type / f'{user_id}_{revision}.png').exists():
|
||||||
path.parent.mkdir(parents=True, exist_ok=True)
|
image = img_to_png(
|
||||||
path.write_bytes(
|
await request.request(
|
||||||
img_to_png(
|
BASE_URL / resource_type / f'{user_id}.jpg' % {'rv': revision},
|
||||||
await Request.request(
|
is_json=False,
|
||||||
f'https://tetr.io/user-content/{resource_type}/{user_id}.jpg?rv={revision}', is_json=False
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
background_tasks.add_task(write_cache, path=path, data=image)
|
||||||
|
return Response(content=image, media_type='image/png')
|
||||||
return FileResponse(path)
|
return FileResponse(path)
|
||||||
|
|
||||||
|
|
||||||
|
async def write_cache(path: FilePath, data: bytes) -> None:
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
async with open(path, 'wb') as file:
|
||||||
|
await file.write(data)
|
||||||
|
|
||||||
|
|
||||||
|
@cache
|
||||||
def get_self_netloc() -> str:
|
def get_self_netloc() -> str:
|
||||||
host: IPv4Address | IPv6Address | IPvAnyAddress = global_config.host
|
host: IPv4Address | IPv6Address | IPvAnyAddress = global_config.host
|
||||||
if isinstance(host, IPv4Address):
|
if isinstance(host, IPv4Address):
|
||||||
|
|||||||
33
nonebot_plugin_tetris_stats/utils/limit.py
Normal file
33
nonebot_plugin_tetris_stats/utils/limit.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from asyncio import Lock, sleep
|
||||||
|
from collections.abc import Callable, Coroutine
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import wraps
|
||||||
|
from time import time
|
||||||
|
from typing import Any, ParamSpec, TypeVar
|
||||||
|
|
||||||
|
from nonebot.log import logger
|
||||||
|
|
||||||
|
P = ParamSpec('P')
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
def limit(limit: timedelta) -> Callable[[Callable[P, Coroutine[Any, Any, T]]], Callable[P, Coroutine[Any, Any, T]]]:
|
||||||
|
limit_seconds = limit.total_seconds()
|
||||||
|
|
||||||
|
def decorator(func: Callable[P, Coroutine[Any, Any, T]]) -> Callable[P, Coroutine[Any, Any, T]]:
|
||||||
|
last_call = 0.0
|
||||||
|
lock = Lock()
|
||||||
|
|
||||||
|
@wraps(func)
|
||||||
|
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||||
|
nonlocal last_call
|
||||||
|
async with lock:
|
||||||
|
if (diff := (time() - last_call)) < limit_seconds:
|
||||||
|
logger.debug(f'func: {func.__name__} trigger limit, wait {(limit_time:=limit_seconds-diff):.3f}s')
|
||||||
|
await sleep(limit_time)
|
||||||
|
last_call = time()
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
return decorator
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from random import choice, randint
|
from random import Random
|
||||||
|
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
from PIL.Image import Resampling
|
from PIL.Image import Resampling
|
||||||
@@ -8,12 +8,13 @@ from PIL.Image import Resampling
|
|||||||
from .draw import PIECE_MEMBERS, SkinManager
|
from .draw import PIECE_MEMBERS, SkinManager
|
||||||
|
|
||||||
|
|
||||||
def get_avatar() -> str:
|
def get_avatar(send: float | str | bytes | bytearray | None = None) -> str:
|
||||||
|
random = Random(send) # noqa: S311
|
||||||
skin = (
|
skin = (
|
||||||
SkinManager.get_skin()
|
SkinManager.get_skin(send)
|
||||||
.get_piece(choice(PIECE_MEMBERS)) # noqa: S311
|
.get_piece(random.choice(PIECE_MEMBERS))
|
||||||
.rotate(
|
.rotate(
|
||||||
randint(-360, 360), # noqa: S311
|
random.randint(-360, 360),
|
||||||
expand=True,
|
expand=True,
|
||||||
resample=Resampling.BICUBIC,
|
resample=Resampling.BICUBIC,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
from random import choice
|
from random import Random
|
||||||
from typing import Any, ClassVar
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
from PIL.Image import Image
|
from PIL.Image import Image
|
||||||
@@ -151,8 +151,8 @@ class SkinManager:
|
|||||||
cls.skin.append(skin)
|
cls.skin.append(skin)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_skin(cls) -> 'Skin':
|
def get_skin(cls, send: float | str | bytes | bytearray | None = None) -> 'Skin':
|
||||||
return choice(cls.skin) # noqa: S311
|
return Random(send).choice(cls.skin) # noqa: S311
|
||||||
|
|
||||||
|
|
||||||
class Skin(ABC):
|
class Skin(ABC):
|
||||||
|
|||||||
@@ -90,5 +90,5 @@ class TechSkin(Skin):
|
|||||||
@driver.on_startup
|
@driver.on_startup
|
||||||
def _():
|
def _():
|
||||||
path = Path(__file__).parent / 'skins'
|
path = Path(__file__).parent / 'skins'
|
||||||
for i in path.iterdir():
|
for i in sorted(path.iterdir()):
|
||||||
TechSkin(i)
|
TechSkin(i)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from typing import Literal
|
|||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ......games.tetrio.api.typing import Rank, ValidRank
|
from ......games.tetrio.api.typing import Rank
|
||||||
from .....typing import Number
|
from .....typing import Number
|
||||||
from ...base import Avatar
|
from ...base import Avatar
|
||||||
from .base import TetraLeagueHistoryData
|
from .base import TetraLeagueHistoryData
|
||||||
@@ -21,7 +21,7 @@ class User(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
country: str | None
|
country: str | None
|
||||||
|
|
||||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
|
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'hidden', 'banned']
|
||||||
|
|
||||||
avatar: str | Avatar
|
avatar: str | Avatar
|
||||||
banner: str | None
|
banner: str | None
|
||||||
@@ -31,7 +31,6 @@ class User(BaseModel):
|
|||||||
friend_count: int | None
|
friend_count: int | None
|
||||||
supporter_tier: int
|
supporter_tier: int
|
||||||
|
|
||||||
verified: bool
|
|
||||||
bad_standing: bool
|
bad_standing: bool
|
||||||
|
|
||||||
badges: list[Badge]
|
badges: list[Badge]
|
||||||
@@ -53,7 +52,7 @@ class TetraLeagueStatistic(BaseModel):
|
|||||||
|
|
||||||
class TetraLeague(BaseModel):
|
class TetraLeague(BaseModel):
|
||||||
rank: Rank
|
rank: Rank
|
||||||
highest_rank: ValidRank
|
highest_rank: Rank
|
||||||
|
|
||||||
tr: Number
|
tr: Number
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ class User(BaseModel):
|
|||||||
name: str
|
name: str
|
||||||
avatar: str | Avatar
|
avatar: str | Avatar
|
||||||
country: str | None
|
country: str | None
|
||||||
verified: bool
|
|
||||||
tetra_league: TetraLeague
|
tetra_league: TetraLeague
|
||||||
xp: Number
|
xp: Number
|
||||||
join_at: datetime | None
|
join_at: datetime | None
|
||||||
|
|||||||
@@ -1,54 +1,79 @@
|
|||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
from urllib.parse import urljoin, urlparse
|
from typing import Any
|
||||||
|
|
||||||
from aiofiles import open
|
|
||||||
from httpx import AsyncClient, HTTPError
|
from httpx import AsyncClient, HTTPError
|
||||||
from nonebot import get_driver, get_plugin_config
|
from msgspec import DecodeError, Struct, json
|
||||||
|
from nonebot import get_driver
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
from playwright.async_api import Response
|
from playwright.async_api import Response
|
||||||
from ujson import JSONDecodeError, dumps, loads
|
from yarl import URL
|
||||||
|
|
||||||
from ..config.config import CACHE_PATH, Config
|
from ..config.config import CACHE_PATH, config
|
||||||
from .browser import BrowserManager
|
from .browser import BrowserManager
|
||||||
from .exception import RequestError
|
from .exception import RequestError
|
||||||
|
|
||||||
driver = get_driver()
|
driver = get_driver()
|
||||||
config = get_plugin_config(Config)
|
|
||||||
|
|
||||||
|
|
||||||
@driver.on_startup
|
class CloudflareCache(Struct):
|
||||||
async def _():
|
headers: dict[str, Any] | None = None
|
||||||
await Request.init_cache()
|
cookies: dict[str, Any] | None = None
|
||||||
await Request.read_cache()
|
|
||||||
|
|
||||||
|
|
||||||
@driver.on_shutdown
|
encoder = json.Encoder()
|
||||||
async def _():
|
decoder = json.Decoder()
|
||||||
await Request.write_cache()
|
|
||||||
|
|
||||||
|
|
||||||
def splice_url(url_list: list[str]) -> str:
|
class AntiCloudflare:
|
||||||
url = ''
|
cache_decoder = json.Decoder(type=CloudflareCache)
|
||||||
if len(url_list):
|
|
||||||
url = url_list.pop(0)
|
|
||||||
for i in url_list:
|
|
||||||
url = urljoin(url, i)
|
|
||||||
return url
|
|
||||||
|
|
||||||
|
def __init__(self, domain_suffix: str) -> None:
|
||||||
|
self.domain_suffix = domain_suffix
|
||||||
|
self.cache_path = CACHE_PATH / f'{self.domain_suffix}_cloudflare_cache.json'
|
||||||
|
self._headers: dict | None = None
|
||||||
|
self._cookies: dict | None = None
|
||||||
|
self.read_cache()
|
||||||
|
|
||||||
class Request:
|
def read_cache(self) -> None:
|
||||||
"""网络请求相关类"""
|
"""读取缓存文件"""
|
||||||
|
try:
|
||||||
|
cache: CloudflareCache = self.cache_decoder.decode(self.cache_path.read_text(encoding='UTF-8'))
|
||||||
|
self._headers = cache.headers
|
||||||
|
self._cookies = cache.cookies
|
||||||
|
except (OSError, DecodeError):
|
||||||
|
self.cache_path.unlink()
|
||||||
|
self.write_cache()
|
||||||
|
|
||||||
_CACHE_FILE = CACHE_PATH / 'cloudflare_cache.json'
|
def write_cache(self) -> None:
|
||||||
_headers: dict | None = None
|
"""写入缓存文件"""
|
||||||
_cookies: dict | None = None
|
self.cache_path.write_bytes(json.encode(CloudflareCache(headers=self.headers, cookies=self.cookies)))
|
||||||
|
|
||||||
@classmethod
|
@property
|
||||||
async def _anti_cloudflare(cls, url: str) -> bytes:
|
def headers(self) -> dict | None:
|
||||||
|
return self._headers
|
||||||
|
|
||||||
|
@headers.setter
|
||||||
|
def headers(self, value: dict | None) -> None:
|
||||||
|
self._headers = value
|
||||||
|
self.write_cache()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def cookies(self) -> dict | None:
|
||||||
|
return self._cookies
|
||||||
|
|
||||||
|
@cookies.setter
|
||||||
|
def cookies(self, value: dict | None) -> None:
|
||||||
|
self._cookies = value
|
||||||
|
self.write_cache()
|
||||||
|
|
||||||
|
async def __call__(self, url: str, proxy: str | None = None) -> bytes:
|
||||||
"""用firefox硬穿五秒盾"""
|
"""用firefox硬穿五秒盾"""
|
||||||
browser = await BrowserManager.get_browser()
|
browser = await BrowserManager.get_browser()
|
||||||
async with await browser.new_context() as context, await context.new_page() as page:
|
async with (
|
||||||
|
await browser.new_context(proxy={'server': proxy} if proxy is not None else None) as context,
|
||||||
|
await context.new_page() as page,
|
||||||
|
):
|
||||||
response = await page.goto(url)
|
response = await page.goto(url)
|
||||||
attempts = 0
|
attempts = 0
|
||||||
while attempts < 60: # noqa: PLR2004
|
while attempts < 60: # noqa: PLR2004
|
||||||
@@ -61,84 +86,70 @@ class Request:
|
|||||||
logger.warning('疑似触发了 Cloudflare 的验证码')
|
logger.warning('疑似触发了 Cloudflare 的验证码')
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
loads(text)
|
decoder.decode(text)
|
||||||
except JSONDecodeError:
|
except DecodeError:
|
||||||
await page.wait_for_timeout(1000)
|
await page.wait_for_timeout(1000)
|
||||||
else:
|
else:
|
||||||
if not isinstance(response, Response):
|
if not isinstance(response, Response):
|
||||||
msg = 'api请求失败'
|
msg = 'api请求失败'
|
||||||
raise RequestError(msg)
|
raise RequestError(msg)
|
||||||
cls._headers = await response.request.all_headers()
|
self.headers = await response.request.all_headers()
|
||||||
try:
|
try:
|
||||||
cls._cookies = {
|
self.cookies = {
|
||||||
name: value
|
name: value
|
||||||
for i in await context.cookies()
|
for i in await context.cookies()
|
||||||
if (name := i.get('name')) is not None and (value := i.get('value')) is not None
|
if (name := i.get('name')) is not None and (value := i.get('value')) is not None
|
||||||
}
|
}
|
||||||
except KeyError:
|
except KeyError:
|
||||||
cls._cookies = None
|
self.cookies = None
|
||||||
return await response.body()
|
return await response.body()
|
||||||
msg = '绕过五秒盾失败'
|
msg = '绕过五秒盾失败'
|
||||||
raise RequestError(msg)
|
raise RequestError(msg)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def init_cache(cls) -> None:
|
|
||||||
"""初始化缓存文件"""
|
|
||||||
if not cls._CACHE_FILE.exists():
|
|
||||||
async with open(file=cls._CACHE_FILE, mode='w', encoding='UTF-8') as file:
|
|
||||||
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
|
||||||
|
|
||||||
@classmethod
|
class Request:
|
||||||
async def read_cache(cls) -> None:
|
"""网络请求相关类"""
|
||||||
"""读取缓存文件"""
|
|
||||||
try:
|
|
||||||
async with open(file=cls._CACHE_FILE, mode='r', encoding='UTF-8') as file:
|
|
||||||
json = loads(await file.read())
|
|
||||||
except FileNotFoundError:
|
|
||||||
await cls.init_cache()
|
|
||||||
except (PermissionError, JSONDecodeError):
|
|
||||||
cls._CACHE_FILE.unlink()
|
|
||||||
await cls.init_cache()
|
|
||||||
else:
|
|
||||||
cls._headers = json['headers']
|
|
||||||
cls._cookies = json['cookies']
|
|
||||||
|
|
||||||
@classmethod
|
def __init__(self, proxy: str | None) -> None:
|
||||||
async def write_cache(cls) -> None:
|
self.proxy = proxy
|
||||||
"""写入缓存文件"""
|
self.anti_cloudflares: dict[str, AntiCloudflare] = {}
|
||||||
try:
|
|
||||||
async with open(file=cls._CACHE_FILE, mode='r+', encoding='UTF-8') as file:
|
|
||||||
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
|
||||||
except FileNotFoundError:
|
|
||||||
await cls.init_cache()
|
|
||||||
except (PermissionError, JSONDecodeError):
|
|
||||||
cls._CACHE_FILE.unlink()
|
|
||||||
await cls.init_cache()
|
|
||||||
|
|
||||||
@classmethod
|
async def request(
|
||||||
async def request(cls, url: str, *, is_json: bool = True) -> bytes:
|
self,
|
||||||
|
url: URL,
|
||||||
|
extra_headers: dict | None = None,
|
||||||
|
*,
|
||||||
|
is_json: bool = True,
|
||||||
|
enable_anti_cloudflare: bool = False,
|
||||||
|
) -> bytes:
|
||||||
"""请求api"""
|
"""请求api"""
|
||||||
|
if (anti_cloudflare := self.anti_cloudflares.get(url.host or '')) is not None:
|
||||||
|
cookies = anti_cloudflare.cookies
|
||||||
|
headers = anti_cloudflare.headers
|
||||||
|
else:
|
||||||
|
cookies = None
|
||||||
|
headers = None
|
||||||
|
headers = headers if extra_headers is None else extra_headers if headers is None else headers | extra_headers
|
||||||
try:
|
try:
|
||||||
async with AsyncClient(cookies=cls._cookies, timeout=config.tetris_req_timeout) as session:
|
async with AsyncClient(cookies=cookies, timeout=config.tetris.request_timeout, proxy=self.proxy) as session:
|
||||||
response = await session.get(url, headers=cls._headers)
|
response = await session.get(str(url), headers=headers)
|
||||||
if response.status_code != HTTPStatus.OK:
|
if response.status_code != HTTPStatus.OK:
|
||||||
msg = f'请求错误 code: {response.status_code} {HTTPStatus(response.status_code).phrase}\n{response.text}'
|
msg = f'请求错误 code: {response.status_code} {HTTPStatus(response.status_code).phrase}\n{response.text}'
|
||||||
raise RequestError(msg, status_code=response.status_code)
|
raise RequestError(msg, status_code=response.status_code)
|
||||||
if is_json:
|
if is_json:
|
||||||
loads(response.content)
|
decoder.decode(response.content)
|
||||||
return response.content
|
return response.content
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
msg = f'请求错误 \n{e!r}'
|
msg = f'请求错误 \n{e!r}'
|
||||||
raise RequestError(msg) from e
|
raise RequestError(msg) from e
|
||||||
except JSONDecodeError:
|
except DecodeError: # 由于捕获的是 DecodeError 所以一定是 is_json = True
|
||||||
if urlparse(url).netloc.lower().endswith('tetr.io'):
|
if enable_anti_cloudflare and url.host is not None:
|
||||||
return await cls._anti_cloudflare(url)
|
return await self.anti_cloudflares.setdefault(url.host, AntiCloudflare(url.host))(str(url), self.proxy)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def failover_request(
|
async def failover_request(
|
||||||
cls,
|
self,
|
||||||
urls: Sequence[str],
|
urls: Sequence[URL],
|
||||||
*,
|
*,
|
||||||
failover_code: Sequence[int],
|
failover_code: Sequence[int],
|
||||||
failover_exc: tuple[type[BaseException], ...],
|
failover_exc: tuple[type[BaseException], ...],
|
||||||
@@ -148,7 +159,7 @@ class Request:
|
|||||||
for i in urls:
|
for i in urls:
|
||||||
logger.debug(f'尝试请求 {i}')
|
logger.debug(f'尝试请求 {i}')
|
||||||
try:
|
try:
|
||||||
return await cls.request(i, is_json=is_json)
|
return await self.request(i, is_json=is_json)
|
||||||
except RequestError as e:
|
except RequestError as e:
|
||||||
if e.status_code in failover_code: # 如果状态码在 failover_code 中, 则继续尝试下一个URL
|
if e.status_code in failover_code: # 如果状态码在 failover_code 中, 则继续尝试下一个URL
|
||||||
error_list.append(e)
|
error_list.append(e)
|
||||||
|
|||||||
@@ -1,23 +1,21 @@
|
|||||||
from nonebot import get_plugin_config
|
from playwright.async_api import BrowserContext, TimeoutError, ViewportSize
|
||||||
from playwright.async_api import TimeoutError, ViewportSize
|
|
||||||
|
|
||||||
from ..config.config import Config
|
from ..config.config import config
|
||||||
from .browser import BrowserManager
|
from .browser import BrowserManager
|
||||||
from .retry import retry
|
from .retry import retry
|
||||||
from .time_it import time_it
|
from .time_it import time_it
|
||||||
|
|
||||||
config = get_plugin_config(Config)
|
|
||||||
|
async def context_factory() -> BrowserContext:
|
||||||
|
return await (await BrowserManager.get_browser()).new_context(device_scale_factor=config.tetris.screenshot_quality)
|
||||||
|
|
||||||
|
|
||||||
@retry(exception_type=TimeoutError, reply='截图失败, 重试中')
|
@retry(exception_type=TimeoutError, reply='截图失败, 重试中')
|
||||||
@time_it
|
@time_it
|
||||||
async def screenshot(url: str) -> bytes:
|
async def screenshot(url: str) -> bytes:
|
||||||
browser = await BrowserManager.get_browser()
|
context = await BrowserManager.get_context('screenshot', factory=context_factory)
|
||||||
async with (
|
async with await context.new_page() as page:
|
||||||
await browser.new_page(device_scale_factor=config.tetris_screenshot_quality) as page,
|
|
||||||
):
|
|
||||||
await page.goto(url)
|
await page.goto(url)
|
||||||
await page.wait_for_load_state('networkidle')
|
|
||||||
size: ViewportSize = await page.evaluate("""
|
size: ViewportSize = await page.evaluate("""
|
||||||
() => {
|
() => {
|
||||||
const element = document.querySelector('#content');
|
const element = document.querySelector('#content');
|
||||||
@@ -28,4 +26,5 @@ async def screenshot(url: str) -> bytes:
|
|||||||
};
|
};
|
||||||
""")
|
""")
|
||||||
await page.set_viewport_size(size)
|
await page.set_viewport_size(size)
|
||||||
return await page.locator('id=content').screenshot(timeout=5000, type='png')
|
await page.wait_for_load_state('networkidle')
|
||||||
|
return await page.locator('id=content').screenshot(animations='disabled', timeout=5000, type='png')
|
||||||
|
|||||||
@@ -11,19 +11,20 @@ from nonebot import get_driver
|
|||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
from nonebot.permission import SUPERUSER
|
from nonebot.permission import SUPERUSER
|
||||||
from nonebot_plugin_alconna import Alconna, Args, Option, on_alconna
|
from nonebot_plugin_alconna import Alconna, Args, Option, on_alconna
|
||||||
from nonebot_plugin_localstore import get_cache_file, get_data_dir
|
|
||||||
from rich.progress import Progress
|
from rich.progress import Progress
|
||||||
|
|
||||||
|
from ..config.config import CACHE_PATH, DATA_PATH, config
|
||||||
|
|
||||||
driver = get_driver()
|
driver = get_driver()
|
||||||
|
|
||||||
TEMPLATES_DIR = get_data_dir('nonebot_plugin_tetris_stats') / 'templates'
|
TEMPLATES_DIR = DATA_PATH / 'templates'
|
||||||
|
|
||||||
alc = on_alconna(Alconna('更新模板', Option('--revision', Args['revision', str], alias={'-R'})), permission=SUPERUSER)
|
alc = on_alconna(Alconna('更新模板', Option('--revision', Args['revision', str], alias={'-R'})), permission=SUPERUSER)
|
||||||
|
|
||||||
|
|
||||||
async def download_templates(tag: str) -> Path:
|
async def download_templates(tag: str) -> Path:
|
||||||
logger.info(f'开始下载模板 {tag}')
|
logger.info(f'开始下载模板 {tag}')
|
||||||
async with AsyncClient() as client:
|
async with AsyncClient(proxy=config.tetris.proxy.github or config.tetris.proxy.main) as client:
|
||||||
if tag == 'latest':
|
if tag == 'latest':
|
||||||
logger.info('目标为 latest, 正在获取最新版本号')
|
logger.info('目标为 latest, 正在获取最新版本号')
|
||||||
tag = (
|
tag = (
|
||||||
@@ -36,7 +37,7 @@ async def download_templates(tag: str) -> Path:
|
|||||||
.rsplit('/', 1)[-1]
|
.rsplit('/', 1)[-1]
|
||||||
)
|
)
|
||||||
logger.success(f'获取到的最新版本号: {tag}')
|
logger.success(f'获取到的最新版本号: {tag}')
|
||||||
path = get_cache_file('nonebot_plugin_tetris_stats', f'dist_{time_ns()}.zip')
|
path = CACHE_PATH / f'dist_{time_ns()}.zip'
|
||||||
with Progress() as progress:
|
with Progress() as progress:
|
||||||
task_id = progress.add_task('[red]Downloading...', total=None)
|
task_id = progress.add_task('[red]Downloading...', total=None)
|
||||||
async with (
|
async with (
|
||||||
@@ -56,7 +57,7 @@ async def download_templates(tag: str) -> Path:
|
|||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
async def unzip_templates(zip_path: Path) -> Path:
|
def unzip_templates(zip_path: Path) -> Path:
|
||||||
logger.info('开始解压模板')
|
logger.info('开始解压模板')
|
||||||
temp_path = TEMPLATES_DIR.parent / f'temp_{time_ns()}'
|
temp_path = TEMPLATES_DIR.parent / f'temp_{time_ns()}'
|
||||||
with ZipFile(zip_path) as zip_file:
|
with ZipFile(zip_path) as zip_file:
|
||||||
@@ -91,7 +92,7 @@ async def check_hash(hash_file_path: Path) -> bool:
|
|||||||
|
|
||||||
async def init_templates(tag: str) -> bool:
|
async def init_templates(tag: str) -> bool:
|
||||||
logger.info(f'开始初始化模板 {tag}')
|
logger.info(f'开始初始化模板 {tag}')
|
||||||
temp_path = await unzip_templates(await download_templates(tag))
|
temp_path = unzip_templates(await download_templates(tag))
|
||||||
if not await check_hash(temp_path / 'hash.sha256'):
|
if not await check_hash(temp_path / 'hash.sha256'):
|
||||||
rmtree(temp_path)
|
rmtree(temp_path)
|
||||||
return False
|
return False
|
||||||
@@ -104,7 +105,7 @@ async def init_templates(tag: str) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
async def check_tag(tag: str) -> bool:
|
async def check_tag(tag: str) -> bool:
|
||||||
async with AsyncClient() as client:
|
async with AsyncClient(proxy=config.tetris.proxy.github or config.tetris.proxy.main) as client:
|
||||||
return (
|
return (
|
||||||
await client.get(f'https://github.com/A-Minos/tetris-stats-templates/releases/tag/{tag}')
|
await client.get(f'https://github.com/A-Minos/tetris-stats-templates/releases/tag/{tag}')
|
||||||
).status_code != HTTPStatus.NOT_FOUND
|
).status_code != HTTPStatus.NOT_FOUND
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from typing import Literal
|
from typing import Literal, TypeAlias
|
||||||
|
|
||||||
Number = float | int
|
Number: TypeAlias = float | int
|
||||||
GameType = Literal['IO', 'TOP', 'TOS']
|
GameType: TypeAlias = Literal['IO', 'TOP', 'TOS']
|
||||||
BaseCommandType = Literal['bind', 'query']
|
BaseCommandType: TypeAlias = Literal['bind', 'query']
|
||||||
TETRIOCommandType = BaseCommandType | Literal['rank', 'config', 'list', 'record']
|
TETRIOCommandType: TypeAlias = BaseCommandType | Literal['rank', 'config', 'list', 'record']
|
||||||
AllCommandType = BaseCommandType | TETRIOCommandType
|
AllCommandType: TypeAlias = BaseCommandType | TETRIOCommandType
|
||||||
Me = Literal[
|
Me: TypeAlias = Literal[
|
||||||
'我',
|
'我',
|
||||||
'自己',
|
'自己',
|
||||||
'我等',
|
'我等',
|
||||||
|
|||||||
3743
poetry.lock
generated
3743
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
261
pyproject.toml
261
pyproject.toml
@@ -1,136 +1,167 @@
|
|||||||
[tool.poetry]
|
[project]
|
||||||
name = 'nonebot-plugin-tetris-stats'
|
name = "nonebot-plugin-tetris-stats"
|
||||||
version = '1.4.6'
|
version = "1.6.2"
|
||||||
description = '一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件'
|
description = "一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件"
|
||||||
authors = ['scdhh <wallfjjd@gmail.com>']
|
readme = "README.md"
|
||||||
readme = 'README.md'
|
authors = [{ name = "shoucandanghehe", email = "wallfjjd@gmail.com" }]
|
||||||
homepage = 'https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats'
|
requires-python = ">=3.10"
|
||||||
repository = 'https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats'
|
dependencies = [
|
||||||
license = 'AGPL-3.0'
|
"aiocache>=0.12.3",
|
||||||
|
"aiofiles>=24.1.0",
|
||||||
|
"arclet-alconna<2",
|
||||||
|
"async-lru>=2.0.4",
|
||||||
|
"httpx>=0.27.2",
|
||||||
|
"jinja2>=3.1.4",
|
||||||
|
"lxml>=5.3.0",
|
||||||
|
"msgspec>=0.18.6",
|
||||||
|
"nonebot-plugin-alconna>=0.53.1",
|
||||||
|
"nonebot-plugin-apscheduler>=0.5.0",
|
||||||
|
"nonebot-plugin-localstore>=0.7.1",
|
||||||
|
"nonebot-plugin-orm>=0.7.6",
|
||||||
|
"nonebot-plugin-session>=0.3.2",
|
||||||
|
"nonebot-plugin-session-orm>=0.2.0",
|
||||||
|
"nonebot-plugin-user>=0.4.4",
|
||||||
|
"nonebot-plugin-userinfo>=0.2.6",
|
||||||
|
"nonebot2[fastapi]>=2.3.3",
|
||||||
|
"pandas>=2.2.3",
|
||||||
|
"pillow>=11.0.0",
|
||||||
|
"playwright>=1.48.0",
|
||||||
|
"rich>=13.9.3",
|
||||||
|
"yarl>=1.16.0",
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
]
|
||||||
|
keywords = ["nonebot2"]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[project.urls]
|
||||||
python = '^3.10'
|
Homepage = "https://github.com/A-Minos/nonebot-plugin-tetris-stats"
|
||||||
nonebot2 = { extras = ['fastapi'], version = '^2.3.0' }
|
Repository = "https://github.com/A-Minos/nonebot-plugin-tetris-stats"
|
||||||
nonebot-plugin-alconna = '>=0.48.0'
|
Issues = "https://github.com/A-Minos/nonebot-plugin-tetris-stats/issues"
|
||||||
nonebot-plugin-apscheduler = '>=0.4,<0.6'
|
|
||||||
nonebot-plugin-localstore = '>=0.6,<0.8'
|
|
||||||
nonebot-plugin-orm = '>=0.1.1,<0.8.0'
|
|
||||||
nonebot-plugin-session = '^0.3.1'
|
|
||||||
nonebot-plugin-session-orm = '^0.2.0'
|
|
||||||
nonebot-plugin-user = '>=0.2,<0.5'
|
|
||||||
nonebot-plugin-userinfo = '^0.2.4'
|
|
||||||
aiocache = '^0.12.2'
|
|
||||||
aiofiles = '>=23.2.1,<25.0.0'
|
|
||||||
async-lru = '^2.0.4'
|
|
||||||
httpx = '^0.27.0'
|
|
||||||
jinja2 = '^3.1.3'
|
|
||||||
lxml = '^5.1.0'
|
|
||||||
pandas = '>=1.4.3,<3.0.0'
|
|
||||||
pillow = '^10.3.0'
|
|
||||||
playwright = '^1.41.2'
|
|
||||||
rich = '^13.7.1'
|
|
||||||
ujson = '^5.9.0'
|
|
||||||
zstandard = '>=0.22,<0.24'
|
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
mypy = '>=1.9'
|
|
||||||
ruff = '>=0.3.0'
|
|
||||||
types-aiofiles = '>=23.2.0.20240106,<25.0.0.0'
|
|
||||||
types-lxml = '^2024.2.9'
|
|
||||||
types-pillow = '^10.2.0.20240423'
|
|
||||||
types-ujson = '^5.9.0'
|
|
||||||
pandas-stubs = '>=1.5.2,<3.0.0'
|
|
||||||
nonebot2 = { extras = ['all'], version = '^2.3.0' }
|
|
||||||
nonebot-adapter-discord = '^0.1.3'
|
|
||||||
nonebot-adapter-kaiheila = '^0.3.4'
|
|
||||||
nonebot-adapter-onebot = '^2.4.1'
|
|
||||||
nonebot-adapter-qq = '^1.4.4'
|
|
||||||
nonebot-adapter-satori = '>=0.11.4,<0.13.0'
|
|
||||||
nonebot-plugin-orm = { extras = ['default'], version = '>=0.3,<0.8' }
|
|
||||||
|
|
||||||
[tool.poetry.group.debug.dependencies]
|
|
||||||
memory-profiler = '^0.61.0'
|
|
||||||
objprint = '^0.2.2'
|
|
||||||
viztracer = '^0.16.2'
|
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ['poetry-core>=1.0.0']
|
requires = ["hatchling"]
|
||||||
build-backend = 'poetry.core.masonry.api'
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"basedpyright>=1.19.1",
|
||||||
|
"mypy>=1.13.0",
|
||||||
|
"nonebot-adapter-discord>=0.1.8",
|
||||||
|
"nonebot-adapter-kaiheila>=0.3.4",
|
||||||
|
"nonebot-adapter-onebot>=2.4.6",
|
||||||
|
"nonebot-adapter-qq>=1.5.3",
|
||||||
|
"ruff>=0.7.1",
|
||||||
|
]
|
||||||
|
typecheck = [
|
||||||
|
"pandas-stubs>=2.2.3.241009",
|
||||||
|
"types-aiofiles>=24.1.0.20240626",
|
||||||
|
"types-lxml>=2024.9.16",
|
||||||
|
"types-pillow>=10.2.0.20240822",
|
||||||
|
]
|
||||||
|
test = ["nonebot-adapter-satori>=0.12.6", "nonebot-plugin-orm[default]>=0.7.6", "nonebot2[aiohttp,fastapi]>=2.3.3"]
|
||||||
|
debug = ["matplotlib>=3.9.2", "memory-profiler>=0.61.0", "objprint>=0.2.3", "pyqt6>=6.7.1", "viztracer>=0.17.0"]
|
||||||
|
release = ["bump-my-version>=0.28.0"]
|
||||||
|
|
||||||
|
[tool.uv]
|
||||||
|
default-groups = ["dev", "typecheck"]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
line-length = 120
|
line-length = 120
|
||||||
target-version = 'py310'
|
target-version = "py310"
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
'F', # pyflakes
|
"F", # pyflakes
|
||||||
'E', # pycodestyle errors
|
"E", # pycodestyle errors
|
||||||
'W', # pycodestyle warnings
|
"W", # pycodestyle warnings
|
||||||
'C90', # mccabe
|
"C90", # mccabe
|
||||||
'I', # isort
|
"I", # isort
|
||||||
'N', # PEP8-naming
|
"N", # PEP8-naming
|
||||||
'UP', # pyupgrade
|
"UP", # pyupgrade
|
||||||
'YTT', # flake8-2020
|
"YTT", # flake8-2020
|
||||||
'ANN', # flake8-annotations
|
"ANN", # flake8-annotations
|
||||||
'ASYNC', # flake8-async
|
"ASYNC", # flake8-async
|
||||||
'S', # flake8-bandit
|
"S", # flake8-bandit
|
||||||
'BLE', # flake8-blind-except
|
"BLE", # flake8-blind-except
|
||||||
'FBT', # flake8-boolean-trap
|
"FBT", # flake8-boolean-trap
|
||||||
'B', # flake8-bugbear
|
"B", # flake8-bugbear
|
||||||
'A', # flake8-builtins
|
"A", # flake8-builtins
|
||||||
'COM', # flake8-commas
|
"COM", # flake8-commas
|
||||||
'C4', # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
'DTZ', # flake8-datetimez
|
"DTZ", # flake8-datetimez
|
||||||
'T10', # flake8-debugger
|
"T10", # flake8-debugger
|
||||||
'EM', # flake8-errmsg
|
"EM", # flake8-errmsg
|
||||||
'FA', # flake8-future-annotations
|
"FA", # flake8-future-annotations
|
||||||
'ISC', # flake8-implicit-str-concat
|
"ISC", # flake8-implicit-str-concat
|
||||||
'ICN', # flake8-import-conventions
|
"ICN", # flake8-import-conventions
|
||||||
'PIE', # flake8-pie
|
"PIE", # flake8-pie
|
||||||
'T20', # flake8-print
|
"T20", # flake8-print
|
||||||
'PYI', # flake8-pyi
|
"PYI", # flake8-pyi
|
||||||
'Q', # flake8-quotes
|
"Q", # flake8-quotes
|
||||||
'RSE', # flake8-raise
|
"RSE", # flake8-raise
|
||||||
'RET', # flake8-return
|
"RET", # flake8-return
|
||||||
'SLF', # flake8-self
|
"SLF", # flake8-self
|
||||||
'SLOT', # flake8-slots
|
"SLOT", # flake8-slots
|
||||||
'SIM', # flake8-simplify
|
"SIM", # flake8-simplify
|
||||||
'TID', # flake8-tidy-imports
|
"TID", # flake8-tidy-imports
|
||||||
'TCH', # flake8-type-checking
|
"TCH", # flake8-type-checking
|
||||||
'ARG', # flake8-unused-arguments
|
"ARG", # flake8-unused-arguments
|
||||||
'PTH', # flake8-use-pathlib
|
"PTH", # flake8-use-pathlib
|
||||||
'ERA', # eradicate
|
"ERA", # eradicate
|
||||||
'PD', # pandas-vet
|
"PD", # pandas-vet
|
||||||
'PGH', # pygrep-hooks
|
"PGH", # pygrep-hooks
|
||||||
'PL', # pylint
|
"PL", # pylint
|
||||||
'TRY', # tryceratops
|
"TRY", # tryceratops
|
||||||
'FLY', # flynt
|
"FLY", # flynt
|
||||||
'FAST', # FastAPI
|
"FAST", # FastAPI
|
||||||
'PERF', # Perflint
|
"PERF", # Perflint
|
||||||
'FURB', # refurb
|
"FURB", # refurb
|
||||||
'RUF', # Ruff-specific rules
|
"RUF", # Ruff-specific rules
|
||||||
]
|
]
|
||||||
ignore = [
|
ignore = [
|
||||||
'E501', # 过长的行由 ruff format 处理, 剩余的都是字符串
|
"E501", # 过长的行由 ruff format 处理, 剩余的都是字符串
|
||||||
'ANN101', # 由 type checker 自动推断
|
"ANN101", # 由 type checker 自动推断
|
||||||
'ANN102', # 由 type checker 自动推断
|
"ANN102", # 由 type checker 自动推断
|
||||||
'ANN202', # 向 NoneBot 注册的函数
|
"ANN202", # 向 NoneBot 注册的函数
|
||||||
'TRY003',
|
"TRY003",
|
||||||
'COM812', # 强制尾随逗号
|
"COM812", # 强制尾随逗号
|
||||||
'TID252', # 相对导入
|
"TID252", # 相对导入
|
||||||
'ISC001', # format warning
|
"ISC001", # format warning
|
||||||
]
|
]
|
||||||
flake8-quotes = { inline-quotes = 'single', multiline-quotes = 'double' }
|
flake8-quotes = { inline-quotes = "single", multiline-quotes = "double" }
|
||||||
|
|
||||||
[tool.ruff.lint.flake8-annotations]
|
[tool.ruff.lint.flake8-annotations]
|
||||||
mypy-init-return = true
|
mypy-init-return = true
|
||||||
|
|
||||||
[tool.ruff.lint.flake8-builtins]
|
[tool.ruff.lint.flake8-builtins]
|
||||||
builtins-ignorelist = ['id']
|
builtins-ignorelist = ["id"]
|
||||||
|
|
||||||
[tool.ruff.format]
|
[tool.ruff.format]
|
||||||
quote-style = 'single'
|
quote-style = "single"
|
||||||
|
|
||||||
|
[tool.basedpyright]
|
||||||
|
pythonVersion = "3.10"
|
||||||
|
pythonPlatform = "All"
|
||||||
|
defineConstant = { PYDANTIC_V2 = true }
|
||||||
|
typeCheckingMode = "standard"
|
||||||
|
|
||||||
|
[tool.bumpversion]
|
||||||
|
current_version = "1.6.2"
|
||||||
|
tag = true
|
||||||
|
sign_tags = true
|
||||||
|
tag_name = "{new_version}"
|
||||||
|
commit = true
|
||||||
|
message = ":bookmark: {new_version}"
|
||||||
|
|
||||||
|
[[tool.bumpversion.files]]
|
||||||
|
filename = "pyproject.toml"
|
||||||
|
search = "version = \"{current_version}\""
|
||||||
|
replace = "version = \"{new_version}\""
|
||||||
|
|
||||||
[tool.nonebot]
|
[tool.nonebot]
|
||||||
plugins = ['nonebot_plugin_tetris_stats']
|
plugins = ["nonebot_plugin_tetris_stats"]
|
||||||
# plugins = ['test_aps']
|
|
||||||
|
|||||||
21
renovate.json
Normal file
21
renovate.json
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
{
|
||||||
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
|
"labels": ["dependencies"],
|
||||||
|
"dependencyDashboardTitle": "📌 Dependency Dashboard",
|
||||||
|
"dependencyDashboardLabels": ["dependencies"],
|
||||||
|
"commitMessagePrefix": "⬆️",
|
||||||
|
"commitMessageAction": "Upgrade",
|
||||||
|
"packageRules": [
|
||||||
|
{
|
||||||
|
"matchUpdateTypes": "pin",
|
||||||
|
"commitMessagePrefix": "📌",
|
||||||
|
"commitMessageAction": "Pin"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"matchUpdateTypes": "rollback",
|
||||||
|
"commitMessagePrefix": "⬇️",
|
||||||
|
"commitMessageAction": "Downgrade"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"extends": ["config:recommended"]
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user