Compare commits

..

2 Commits

Author SHA1 Message Date
800b9fd66e 🔖 1.0.0.a1.post1 2023-11-14 00:46:56 +08:00
ee058d4a88 🐛 修复排行榜 Users.League 的部分字段为 None 时 错误处理的错误 2023-11-14 00:45:38 +08:00
210 changed files with 4312 additions and 14185 deletions

12
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,12 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/" # Location of package manifests
target-branch: "dev"
schedule:
interval: "daily"

View File

@@ -3,48 +3,23 @@ name: Release CI
on:
push:
tags:
- '*'
- "*"
jobs:
release:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: write
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v6
name: Setup UV
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
enable-cache: true
- name: 'Set up Python'
uses: actions/setup-python@v5
with:
python-version-file: '.python-version'
- run: uv sync
python-version: '3.11'
- name: Install Poetry
shell: bash
- name: Get Version
id: version
run: |
echo "VERSION=$(uvx pdm show --version)" >> $GITHUB_OUTPUT
echo "TAG_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
echo "TAG_NAME=${GITHUB_REF#refs/tags/}" >> $GITHUB_OUTPUT
- name: Check Version
if: steps.version.outputs.VERSION != steps.version.outputs.TAG_VERSION
run: exit 1
- name: Build Package
run: uv build
- name: Publish Package to PyPI
run: uv publish
- name: Publish Package to GitHub Release
run: gh release create ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl -t "🔖 ${{ steps.version.outputs.TAG_NAME }}" --generate-notes
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
pip install poetry
- name: Build
shell: bash
run: |
poetry install
poetry env use python
poetry publish --build -u ${{ secrets.USERNAME }} -p ${{ secrets.PASSWORD }} -n

View File

@@ -1,58 +0,0 @@
name: Code Coverage
on:
push:
branches:
- 'main'
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
test:
name: Test
runs-on: ${{ matrix.os }}
strategy:
matrix:
# python-version: ['3.10', '3.11', '3.12', '3.13']
python-version: ['3.10', '3.11', '3.12']
os: [ubuntu-latest, windows-latest, macos-latest]
fail-fast: false
env:
OS: ${{ matrix.os }}
PYTHON_VERSION: ${{ matrix.python-version }}
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup uv
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
cache-suffix: ${{ env.PYTHON_VERSION }}_${{ env.OS }}
- name: Install Dependencies
run: |
uv python pin ${{ env.PYTHON_VERSION }}
uv sync --group test
- name: Run tests
run: uv run pytest --cov=nonebot_plugin_tetris_stats --cov-report xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
env_vars: OS,PYTHON_VERSION
check:
if: always()
needs: test
runs-on: ubuntu-latest
steps:
- name: Decide whether the needed jobs succeeded or failed
uses: re-actors/alls-green@223e4bb7a751b91f43eda76992bcfbf23b8b0302
with:
jobs: ${{ toJSON(needs) }}

View File

@@ -1,33 +0,0 @@
name: TypeCheck
on:
push:
jobs:
TypeCheck:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: astral-sh/setup-uv@v6
name: Setup UV
with:
enable-cache: true
- name: 'Set up Python'
uses: actions/setup-python@v5
with:
python-version-file: '.python-version'
- run: uv sync
shell: bash
- name: Run Mypy
shell: bash
run: |
uv run mypy ./nonebot_plugin_tetris_stats
- name: Run BasedPyright
shell: bash
run: |
uv run basedpyright ./nonebot_plugin_tetris_stats/

View File

@@ -9,14 +9,14 @@
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: 'CodeQL'
name: "CodeQL"
on:
push:
branches: [main]
branches: [ main ]
pull_request:
# The branches below must be a subset of the branches above
branches: [main]
branches: [ main ]
schedule:
- cron: '17 6 * * 5'
@@ -32,40 +32,41 @@ jobs:
strategy:
fail-fast: false
matrix:
language: ['python']
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v3
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2

536
.gitignore vendored
View File

@@ -1,528 +1,10 @@
# Created by https://www.toptal.com/developers/gitignore/api/linux,macos,python,pycharm,windows,visualstudiocode,node
# Edit at https://www.toptal.com/developers/gitignore?templates=linux,macos,python,pycharm,windows,visualstudiocode,node
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.cache
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
.idea
dist
# Gatsby files
.cache/
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
### Node Patch ###
# Serverless Webpack directories
.webpack/
# Optional stylelint cache
# SvelteKit build / generate output
.svelte-kit
### PyCharm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
.idea/**/sonarlint/
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
.idea/**/markdown-navigator.xml
.idea/**/markdown-navigator-enh.xml
.idea/**/markdown-navigator/
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
.idea/**/azureSettings.xml
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
### Windows ###
# Windows thumbnail cache files
Thumbs.db
Thumbs.db:encryptable
ehthumbs.db
ehthumbs_vista.db
# Dump file
*.stackdump
# Folder config file
[Dd]esktop.ini
# Recycle Bin used on file shares
$RECYCLE.BIN/
# Windows Installer files
*.cab
*.msi
*.msix
*.msm
*.msp
# Windows shortcuts
*.lnk
# End of https://www.toptal.com/developers/gitignore/api/linux,macos,python,pycharm,windows,visualstudiocode,node
# NoneBot2
bot.py
.env*
# Misc
ignore_*
*.backup
TODO*
test_*
Untitled*
*copy*
.vscode
*dev*
*cache*
*backup*
*.pyc

View File

@@ -1,22 +0,0 @@
default_install_hook_types: [pre-commit, prepare-commit-msg]
ci:
autofix_commit_msg: ':rotating_light: auto fix by pre-commit hooks'
autofix_prs: true
autoupdate_branch: main
autoupdate_schedule: weekly
autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks'
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.4
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
stages: [pre-commit]
- id: ruff-format
stages: [pre-commit]
- repo: https://github.com/nonebot/nonemoji
rev: v0.1.4
hooks:
- id: nonemoji
stages: [prepare-commit-msg]

View File

@@ -1 +0,0 @@
3.10

View File

@@ -1,58 +0,0 @@
# How to Contribute?
## Setting Up the Environment
### For Developers with Basic Python Knowledge
First, you need install [uv](https://docs.astral.sh/uv/).
Then:
```bash
# Set up the basic Python environment
uv python install 3.10
# Clone the repository
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
cd nonebot-plugin-tetris-stats
# Install dependencies
uv sync
```
## Development
### Code Development
1. For static code analysis, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff check ./nonebot_plugin_tetris_stats/` to check the code.
2. For code formatting, use [ruff](https://docs.astral.sh/ruff/). You can install the corresponding plugin for your IDE or use the command line with `ruff format ./nonebot_plugin_tetris_stats/` to format the code.
3. For type checking, use both [basedpyright](https://docs.basedpyright.com/latest/) and [mypy](https://www.mypy-lang.org/). You can install the corresponding plugins for your IDE or use the following commands in the terminal to check the code:
```bash
# basedpyright
basedpyright ./nonebot_plugin_tetris_stats/
# mypy
mypy ./nonebot_plugin_tetris_stats/
```
### Internationalization
This project uses [Tarina](https://github.com/ArcletProject/Tarina) for internationalization support.
#### Adding a New Language
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
2. Run `tarina-lang create {language_code}` \* Please note that the language code should preferably follow the [IETF language tag](https://en.wikipedia.org/wiki/IETF_language_tag) standard.
3. Edit the generated `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
#### Updating an Existing Language
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
2. Edit the corresponding `./nonebot_plugin_tetris_stats/i18n/{language_code}.json` file.
#### Adding New Entries
1. Navigate to the `./nonebot_plugin_tetris_stats/i18n/` directory.
2. Edit the `.template.json` file.
3. Run `tarina-lang schema && tarina-lang model`.
4. Modify the language files, adding new entries at least to `en-US.json`.

View File

@@ -1,57 +0,0 @@
# 我该如何参与开发?
## 配置环境
首先你需要安装 [uv](https://docs.astral.sh/uv/)。
然后:
```bash
# 配置基础 Python 环境
uv python install 3.10
# 克隆仓库
git clone https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
cd nonebot-plugin-tetris-stats
# 安装依赖
uv sync
```
## 开发
### 代码开发
1. 代码静态检查使用 [ruff](https://docs.astral.sh/ruff/)你可以为你的ide安装对应插件来使用也可以在命令行使用`ruff check ./nonebot_plugin_tetris_stats/`来检查代码。
2. 代码格式化使用 [ruff](https://docs.astral.sh/ruff/)你可以为你的ide安装对应插件来使用也可以在命令行使用`ruff format ./nonebot_plugin_tetris_stats/`来格式化代码。
3. 类型检查同时使用 [basedpyright](https://docs.basedpyright.com/latest/) 和 [mypy](https://www.mypy-lang.org/)你可以为你的ide安装对应插件来使用。
也可以在命令行使用下面的命令来检查代码:
```bash
# basedpyright
basedpyright ./nonebot_plugin_tetris_stats/
# mypy
mypy ./nonebot_plugin_tetris_stats/
```
### 国际化
本项目使用 [Tarina](https://github.com/ArcletProject/Tarina) 提供国际化支持。
#### 添加新的语言
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
2. 运行 `tarina-lang create {语言代码}` \* 请注意,语言代码最好符合 [IETF语言标签](https://zh.wikipedia.org/wiki/IETF%E8%AF%AD%E8%A8%80%E6%A0%87%E7%AD%BE) 的规范。
3. 编辑生成的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
#### 更新已有语言
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
2. 编辑对应的 `./nonebot_plugin_tetris_stats/i18n/{语言代码}.json` 文件。
#### 添加新的条目
1. 进入 `./nonebot_plugin_tetris_stats/i18n/` 目录。
2. 编辑 `.template.json` 文件。
3. 运行 `tarina-lang schema && tarina-lang model`
4. 修改语言文件,至少为`en-US.json`添加新的条目。

View File

@@ -87,13 +87,3 @@ pip install nonebot-plugin-tetris-stats
## 📝 开源
本项目使用 [AGPL-3.0](https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats/blob/main/LICENSE) 许可证开源
## 🤓☝ 给个 star 吧
<a href="https://star-history.com/#A-Minos/nonebot-plugin-tetris-stats&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=A-Minos/nonebot-plugin-tetris-stats&type=Date" />
</picture>
</a>

View File

@@ -1,38 +1,22 @@
from nonebot import require
from nonebot.plugin import PluginMetadata, inherit_supported_adapters
from nonebot.plugin import PluginMetadata
require_plugins = {
'nonebot_plugin_alconna',
'nonebot_plugin_apscheduler',
'nonebot_plugin_localstore',
'nonebot_plugin_orm',
'nonebot_plugin_uninfo',
'nonebot_plugin_user',
'nonebot_plugin_waiter',
}
require('nonebot_plugin_localstore')
require('nonebot_plugin_orm')
require('nonebot_plugin_alconna')
require('nonebot_plugin_apscheduler')
for i in require_plugins:
require(i)
from nonebot_plugin_alconna import namespace # noqa: E402
with namespace('tetris_stats') as ns:
ns.enable_message_cache = False
from .config import migrations # noqa: E402
from .config.config import Config # noqa: E402
from .config.config import migrations # noqa: E402
__plugin_meta__ = PluginMetadata(
name='Tetris Stats',
description='一个用于查询 Tetris 相关游戏玩家数据的插件',
usage='发送 tstats --help 查询使用方法',
usage='发送 {游戏名} --help 查询使用方法',
type='application',
homepage='https://github.com/A-minos/nonebot-plugin-tetris-stats',
config=Config,
supported_adapters=inherit_supported_adapters(*require_plugins),
homepage='https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats',
extra={
'orm_version_location': migrations,
},
)
from . import games # noqa: F401, E402
from . import game_data_processor # noqa: F401, E402

View File

@@ -1,29 +1,14 @@
from nonebot import get_driver, get_plugin_config
from nonebot_plugin_localstore import get_plugin_cache_dir, get_plugin_data_dir
from pydantic import BaseModel, Field
from pathlib import Path
CACHE_PATH = get_plugin_cache_dir()
DATA_PATH = get_plugin_data_dir()
from nonebot_plugin_localstore import get_cache_dir # type: ignore[import-untyped]
from pydantic import BaseModel
from . import migrations # noqa: F401
class Proxy(BaseModel):
main: str | None = None
github: str | None = None
tetrio: str | None = None
tos: str | None = None
top: str | None = None
class ScopedConfig(BaseModel):
request_timeout: float = 30.0
screenshot_quality: float = 2
proxy: Proxy = Field(default_factory=Proxy)
development: bool = False
CACHE_PATH: Path = get_cache_dir('nonebot_plugin_tetris_stats')
class Config(BaseModel):
tetris: ScopedConfig = Field(default_factory=ScopedConfig)
"""配置类"""
config = get_plugin_config(Config)
global_config = get_driver().config
db_url: str = 'sqlite://data/nonebot_plugin_tetris_stats/data.db'

View File

@@ -1,58 +0,0 @@
"""Rename field
迁移 ID: 09d4bb60160d
父迁移: b9d65badc713
创建时间: 2024-04-23 23:42:04.541672
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '09d4bb60160d'
down_revision: str | Sequence[str] | None = 'b9d65badc713'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.alter_column('create_time', new_column_name='update_time', existing_type=sa.DateTime())
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_create_time')
op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_update_time'),
'nonebot_plugin_tetris_stats_iorank',
['update_time'],
unique=False,
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.alter_column('update_time', new_column_name='create_time')
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_update_time'))
op.create_index(
'ix_nonebot_plugin_tetris_stats_iorank_create_time',
'nonebot_plugin_tetris_stats_iorank',
['create_time'],
unique=False,
)
# ### end Alembic commands ###

View File

@@ -1,50 +0,0 @@
"""add field
迁移 ID: 0d50142b780f
父迁移: 09d4bb60160d
创建时间: 2024-04-24 14:55:08.064098
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '0d50142b780f'
down_revision: str | Sequence[str] | None = '09d4bb60160d'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_hash', sa.String(length=128), nullable=True))
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash'), ['file_hash'], unique=False
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash'))
batch_op.drop_column('file_hash')
# ### end Alembic commands ###

View File

@@ -1,286 +0,0 @@
"""Refactor Historical
迁移 ID: 3c25a5a8c050
父迁移: b7fbdafc339a
创建时间: 2024-05-14 09:16:35.193001
"""
from __future__ import annotations
from datetime import datetime
from typing import TYPE_CHECKING, Any
import sqlalchemy as sa
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn
from sqlalchemy import desc, select
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '3c25a5a8c050'
down_revision: str | Sequence[str] | None = 'b7fbdafc339a'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def migrate_old_data() -> None: # noqa: C901
from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=op.get_bind())
OldHistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
TETRIOHistoricalData = Base.classes.nonebot_plugin_tetris_stats_tetriohistoricaldata # noqa: N806
TOSHistoricalData = Base.classes.nonebot_plugin_tetris_stats_toshistoricaldata # noqa: N806
with (
Session(op.get_bind()) as session,
Progress(
TextColumn('[progress.description]{task.description}'),
BarColumn(),
MofNCompleteColumn(),
TaskProgressColumn(),
TimeRemainingColumn(),
) as progress,
):
if session.query(OldHistoricalData).count() == 0:
logger.info('空表, 跳过')
return
task_id = progress.add_task('[cyan]Migrating:', total=session.query(OldHistoricalData).count())
pointer = 0
while pointer < session.query(OldHistoricalData).order_by(desc(OldHistoricalData.id)).limit(1).one().id:
result = session.scalars(
select(OldHistoricalData)
.where(OldHistoricalData.id > pointer)
.order_by(OldHistoricalData.id)
.limit(100)
).all()
for j in result:
processed_data: dict[str, Any] = loads(j.processed_data)
if j.game_platform == 'IO':
if (data := processed_data.get('user_info')) is not None:
session.add(
TETRIOHistoricalData(
user_unique_identifier=j.user_unique_identifier,
api_type='User Info',
data=dumps(data),
update_time=datetime.fromisoformat(data['cache']['cached_at']),
)
)
if (data := processed_data.get('user_records')) is not None:
session.add(
TETRIOHistoricalData(
user_unique_identifier=j.user_unique_identifier,
api_type='User Records',
data=dumps(data),
update_time=datetime.fromisoformat(data['cache']['cached_at']),
)
)
if j.game_platform == 'TOS' and not j.user_unique_identifier.isdigit():
if (data := processed_data.get('user_info')) is not None:
session.add(
TOSHistoricalData(
user_unique_identifier=j.user_unique_identifier,
api_type='User Info',
data=dumps(data),
update_time=j.finish_time,
)
)
if (data := processed_data.get('user_profile')) is not None:
for v in data.values():
session.add(
TOSHistoricalData(
user_unique_identifier=j.user_unique_identifier,
api_type='User Profile',
data=dumps(v),
update_time=j.finish_time,
)
)
progress.update(task_id, advance=1)
session.commit()
pointer = result[-1].id
logger.success('Migrate successfully')
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nonebot_plugin_tetris_stats_tetriohistoricaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriohistoricaldata')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'), ['api_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'), ['update_time'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'),
['user_unique_identifier'],
unique=False,
)
op.create_table(
'nonebot_plugin_tetris_stats_tophistoricaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tophistoricaldata')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'), ['api_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'), ['update_time'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'),
['user_unique_identifier'],
unique=False,
)
op.create_table(
'nonebot_plugin_tetris_stats_toshistoricaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_toshistoricaldata')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'), ['api_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'), ['update_time'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'),
['user_unique_identifier'],
unique=False,
)
op.create_table(
'nonebot_plugin_tetris_stats_triggerhistoricaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('session_persist_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_triggerhistoricaldata')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'),
['command_type'],
unique=False,
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'),
['game_platform'],
unique=False,
)
migrate_old_data()
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier')
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', sa.JSON(), nullable=False),
sa.Column('processed_data', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier',
['user_unique_identifier'],
unique=False,
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'))
op.drop_table('nonebot_plugin_tetris_stats_triggerhistoricaldata')
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'))
op.drop_table('nonebot_plugin_tetris_stats_toshistoricaldata')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'))
op.drop_table('nonebot_plugin_tetris_stats_tophistoricaldata')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'))
op.drop_table('nonebot_plugin_tetris_stats_tetriohistoricaldata')
# ### end Alembic commands ###

View File

@@ -1,82 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguestats
迁移 ID: 3d900bb0e8d4
父迁移: 405c6936a164
创建时间: 2025-07-18 02:22:03.771903
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '3d900bb0e8d4'
down_revision: str | Sequence[str] | None = '405c6936a164'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguestats' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguestats # noqa: N806
New = Base.classes.nb_t_io_tl_stats # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,85 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguehistorical
迁移 ID: 405c6936a164
父迁移: bbbdfd94e6fa
创建时间: 2025-07-18 01:55:27.406032
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '405c6936a164'
down_revision: str | Sequence[str] | None = 'bbbdfd94e6fa'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguehistorical' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguehistorical # noqa: N806
New = Base.classes.nb_t_io_tl_hist # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
request_id=i.request_id,
data=i.data,
update_time=i.update_time,
stats_id=i.stats_id,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,123 +0,0 @@
"""add TETRIOLeagueStats
迁移 ID: 5a1b93948494
父迁移: cfeab6961dce
创建时间: 2024-08-24 00:22:41.359500
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '5a1b93948494'
down_revision: str | Sequence[str] | None = 'cfeab6961dce'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nonebot_plugin_tetris_stats_tetrioleaguestats',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestats')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'), ['update_time'], unique=False
)
op.create_table(
'nonebot_plugin_tetris_stats_tetrioleaguehistorical',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('request_id', sa.Uuid(), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'],
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
name=op.f(
'fk_nonebot_plugin_tetris_stats_tetrioleaguehistorical_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguehistorical')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'), ['request_id'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'),
['update_time'],
unique=False,
)
op.create_table(
'nonebot_plugin_tetris_stats_tetrioleaguestatsfield',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rank', sa.String(length=2), nullable=False),
sa.Column('tr_line', sa.Float(), nullable=False),
sa.Column('player_count', sa.Integer(), nullable=False),
sa.Column('low_pps', sa.JSON(), nullable=False),
sa.Column('low_apm', sa.JSON(), nullable=False),
sa.Column('low_vs', sa.JSON(), nullable=False),
sa.Column('avg_pps', sa.Float(), nullable=False),
sa.Column('avg_apm', sa.Float(), nullable=False),
sa.Column('avg_vs', sa.Float(), nullable=False),
sa.Column('high_pps', sa.JSON(), nullable=False),
sa.Column('high_apm', sa.JSON(), nullable=False),
sa.Column('high_vs', sa.JSON(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'],
['nonebot_plugin_tetris_stats_tetrioleaguestats.id'],
name=op.f(
'fk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_stats_id_nonebot_plugin_tetris_stats_tetrioleaguestats'
),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetrioleaguestatsfield')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'), ['rank'], unique=False
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'))
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_update_time'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguehistorical_request_id'))
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguehistorical')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestats', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestats_update_time'))
op.drop_table('nonebot_plugin_tetris_stats_tetrioleaguestats')
# ### end Alembic commands ###

View File

@@ -1,64 +0,0 @@
"""Create a new table
迁移 ID: 612d8b00d9ac
父迁移: 5a1b93948494
创建时间: 2025-05-26 04:49:29.664480
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '612d8b00d9ac'
down_revision: str | Sequence[str] | None = '5a1b93948494'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
op.create_table(
'nonebot_plugin_tetris_stats_triggerhistoricaldatav2',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('session_persist_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_triggerhistoricaldatav2')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldatav2', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_command_type'),
['command_type'],
unique=False,
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_game_platform'),
['game_platform'],
unique=False,
)
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldatav2', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_game_platform'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_command_type'))
op.drop_table('nonebot_plugin_tetris_stats_triggerhistoricaldatav2')

View File

@@ -1,74 +0,0 @@
"""Add redundant platform field
迁移 ID: 6c3206f90cc3
父迁移: 9f6582279ce2
创建时间: 2023-11-26 20:15:56.033892
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '6c3206f90cc3'
down_revision: str | Sequence[str] | None = '9f6582279ce2'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806
connection = op.get_bind()
Base.prepare(autoload_with=connection)
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
with Session(connection) as session:
for row in session.query(HistoricalData):
platform = row.game_platform
game_user = loads(row.game_user)
processed_data = loads(row.processed_data)
game_user['platform'] = platform
processed_data['platform'] = platform
row.game_user = dumps(game_user)
row.processed_data = dumps(processed_data)
session.add(row)
session.commit()
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806
connection = op.get_bind()
Base.prepare(autoload_with=connection)
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
with Session(connection) as session:
for row in session.query(HistoricalData):
game_user = loads(row.game_user)
processed_data = loads(row.processed_data)
game_user.pop('platform', None)
processed_data.pop('platform', None)
row.game_user = dumps(game_user)
row.processed_data = dumps(processed_data)
session.add(row)
session.commit()

View File

@@ -1,120 +0,0 @@
"""Migrate to uninfo
迁移 ID: 766cc7e75a62
父迁移: 612d8b00d9ac
创建时间: 2025-05-26 04:51:54.665200
"""
from __future__ import annotations
import math
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '766cc7e75a62'
down_revision: str | Sequence[str] | None = '612d8b00d9ac'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_triggerhistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
TriggerHistoricalData = Base.classes.nonebot_plugin_tetris_stats_triggerhistoricaldata # noqa: N806
TriggerHistoricalDataV2 = Base.classes.nonebot_plugin_tetris_stats_triggerhistoricaldatav2 # noqa: N806
with Session(conn) as db_session:
count = db_session.query(TriggerHistoricalData).count()
if count == 0:
return
try:
from nonebot_session_to_uninfo import ( # type: ignore[import-untyped] # noqa: PLC0415
check_tables,
get_id_map,
)
except ImportError as err:
msg = '请安装 `nonebot-session-to-uninfo` 以迁移数据'
raise ValueError(msg) from err
check_tables()
migration_limit = 10000 # 每次迁移的数据量为 10000 条
last_id = -1
id_map: dict[int, int] = {}
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for _ in range(math.ceil(count / migration_limit)):
records = (
db_session.query(TriggerHistoricalData)
.order_by(TriggerHistoricalData.id)
.where(TriggerHistoricalData.id > last_id)
.limit(migration_limit)
.all()
)
last_id = records[-1].id
session_ids = [
record.session_persist_id for record in records if record.session_persist_id not in id_map
]
if session_ids:
id_map.update(get_id_map(session_ids))
db_session.add_all(
TriggerHistoricalDataV2(
id=record.id,
session_persist_id=id_map[record.session_persist_id],
trigger_time=record.trigger_time,
game_platform=record.game_platform,
command_type=record.command_type,
command_args=record.command_args,
finish_time=record.finish_time,
)
for record in records
)
progress.update(task, advance=len(records))
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -1,94 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguestatsfield
迁移 ID: 8459b2a4b7a3
父迁移: 3d900bb0e8d4
创建时间: 2025-07-18 02:24:59.560252
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '8459b2a4b7a3'
down_revision: str | Sequence[str] | None = '3d900bb0e8d4'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguestatsfield' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguestatsfield # noqa: N806
New = Base.classes.nb_t_io_tl_stats_field # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
rank=i.rank,
tr_line=i.tr_line,
player_count=i.player_count,
low_pps=i.low_pps,
low_apm=i.low_apm,
low_vs=i.low_vs,
avg_pps=i.avg_pps,
avg_apm=i.avg_apm,
avg_vs=i.avg_vs,
high_pps=i.high_pps,
high_apm=i.high_apm,
high_vs=i.high_vs,
stats_id=i.stats_id,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,107 +0,0 @@
"""Correct the data in HistoricalData
迁移 ID: 8a91210ce14d
父迁移: 0d50142b780f
创建时间: 2024-05-06 08:16:38.487214
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from sqlalchemy import select
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '8a91210ce14d'
down_revision: str | Sequence[str] | None = '0d50142b780f'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: # noqa: C901
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
from nonebot.compat import PYDANTIC_V2, type_validate_json # noqa: PLC0415
from pydantic import BaseModel, ValidationError # noqa: PLC0415
from rich.progress import ( # noqa: PLC0415
BarColumn,
MofNCompleteColumn,
Progress,
TaskProgressColumn,
TextColumn,
TimeRemainingColumn,
)
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=op.get_bind())
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
if PYDANTIC_V2:
def model_to_json(value: BaseModel) -> str:
return value.model_dump_json(by_alias=True)
else:
def model_to_json(value: BaseModel) -> str:
return value.json(by_alias=True)
with Session(op.get_bind()) as session:
count = session.query(HistoricalData).count()
if count == 0:
logger.info('空表, 跳过')
return
from nonebot_plugin_tetris_stats.version import __version__ # noqa: PLC0415
if __version__ != '1.0.3':
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
logger.critical(msg)
raise RuntimeError(msg)
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped] # noqa: PLC0415
BaseProcessedData,
)
models = BaseProcessedData.__subclasses__()
def json_to_model(value: str) -> BaseModel:
for i in models:
try:
return type_validate_json(i, value)
except ValidationError: # noqa: PERF203
...
raise ValueError
with Progress(
TextColumn('[progress.description]{task.description}'),
BarColumn(),
MofNCompleteColumn(),
TaskProgressColumn(),
TimeRemainingColumn(),
) as progress:
task_id = progress.add_task('[cyan]Updateing:', total=count)
for i in range(0, count, 100):
for j in session.scalars(
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
):
model = json_to_model(j.processed_data)
j.processed_data = model_to_json(model)
progress.update(task_id, advance=1)
session.commit()
logger.success('Corrected HistoricalData')
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -5,17 +5,13 @@
创建时间: 2023-11-11 16:24:11.826667
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '9866f53ce44f'
down_revision: str | Sequence[str] | None = None
branch_labels: str | Sequence[str] | None = ('nonebot_plugin_tetris_stats',)
@@ -25,8 +21,6 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nonebot_plugin_tetris_stats_bind',
@@ -124,8 +118,6 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_rank'))

View File

@@ -5,12 +5,11 @@
创建时间: 2023-11-11 16:51:30.718277
"""
from __future__ import annotations
from collections.abc import Sequence
from pathlib import Path
from shutil import copyfile
from typing import TYPE_CHECKING
from alembic import op
from nonebot import get_driver
@@ -19,9 +18,6 @@ from sqlalchemy import Connection, create_engine, inspect, text
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '9cd1647db502'
down_revision: str | Sequence[str] | None = '9866f53ce44f'
branch_labels: str | Sequence[str] | None = None
@@ -37,10 +33,14 @@ def migrate_old_data(connection: Connection) -> None:
Bind = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
def non_empty(obj: str) -> bool:
return bool(obj != '' and not obj.isspace())
if obj != '' and not obj.isspace():
return True
return False
def is_int(obj: int | str) -> bool:
return bool(isinstance(obj, int) or obj.isdigit())
if isinstance(obj, int) or obj.isdigit():
return True
return False
bind_list = [
Bind(chat_platform='OneBot V11', chat_account=int(row.QQ), game_platform='IO', game_account=row.USER)
@@ -63,10 +63,8 @@ def migrate_old_data(connection: Connection) -> None:
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
try:
db_path = Path(config.db_url)
db_path = Path(config.db_path)
except AttributeError:
db_path = Path('data/nonebot_plugin_tetris_stats/data.db')
if db_path.exists() is False:
@@ -82,16 +80,13 @@ def upgrade(name: str = '') -> None:
logger.success('nonebot_plugin_tetris_stats: 跳过迁移')
return
if 'IORANK' not in tables:
msg = 'nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级'
logger.warning(msg)
raise RuntimeError(msg)
logger.warning('nonebot_plugin_tetris_stats: 发现过早版本的数据, 请先更新到 0.4.4 版本')
raise RuntimeError('nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级')
logger.info('nonebot_plugin_tetris_stats: 发现来自老版本的数据, 正在迁移...')
migrate_old_data(connection)
db_path.unlink()
db_path.unlink()
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -1,117 +0,0 @@
"""Recreate HistoricalData
迁移 ID: 9f6582279ce2
父迁移: 9cd1647db502
创建时间: 2023-11-21 08:35:50.393246
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '9f6582279ce2'
down_revision: str | Sequence[str] | None = '9cd1647db502'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
op.create_table(
'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', sa.JSON(), nullable=False),
sa.Column('processed_data', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_historicaldata')),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'), ['command_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'), ['game_platform'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'), ['source_account'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'), ['source_type'], unique=False
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'))
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
op.create_table(
'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', sa.PickleType(), nullable=False),
sa.Column('processed_data', sa.PickleType(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
)
# ### end Alembic commands ###

View File

@@ -1,48 +0,0 @@
"""Add TETRIO user configuration
迁移 ID: a1195e989cc6
父迁移: b15844837693
创建时间: 2024-06-09 04:20:07.819194
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'a1195e989cc6'
down_revision: str | Sequence[str] | None = 'b15844837693'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nonebot_plugin_tetris_stats_tetriouserconfig',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('query_template', sa.String(length=2), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriouserconfig')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('nonebot_plugin_tetris_stats_tetriouserconfig')
# ### end Alembic commands ###

View File

@@ -1,75 +0,0 @@
"""Migrate to nonobot-plugin-user
迁移 ID: b15844837693
父迁移: 3c25a5a8c050
创建时间: 2024-06-08 02:27:35.227596
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b15844837693'
down_revision: str | Sequence[str] | None = '3c25a5a8c050'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_account')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_platform')
op.drop_table('nonebot_plugin_tetris_stats_bind')
op.create_table(
'nonebot_plugin_tetris_stats_bind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('game_account', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_bind')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id'), ['user_id'], unique=False)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id'))
op.drop_table('nonebot_plugin_tetris_stats_bind')
op.create_table(
'nonebot_plugin_tetris_stats_bind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('chat_platform', sa.String(length=32), nullable=False),
sa.Column('chat_account', sa.String(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('game_account', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_bind'),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.create_index('ix_nonebot_plugin_tetris_stats_bind_chat_platform', ['chat_platform'], unique=False)
batch_op.create_index('ix_nonebot_plugin_tetris_stats_bind_chat_account', ['chat_account'], unique=False)
# ### end Alembic commands ###

View File

@@ -1,215 +0,0 @@
"""create new tables
迁移 ID: b2075a5ce371
父迁移: 766cc7e75a62
创建时间: 2025-07-17 22:57:32.245327
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b2075a5ce371'
down_revision: str | Sequence[str] | None = '766cc7e75a62'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nb_t_bind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('game_account', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_bind')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_bind', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_bind_user_id'), ['user_id'], unique=False)
op.create_table(
'nb_t_io_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=32), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_io_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_io_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_io_tl_stats',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_stats')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_stats', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_stats_update_time'), ['update_time'], unique=False)
op.create_table(
'nb_t_io_u_cfg',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('query_template', sa.String(length=2), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_u_cfg')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
op.create_table(
'nb_t_top_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_top_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_top_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_top_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_top_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_top_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_tos_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_tos_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_tos_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_tos_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_tos_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_trigger_hist_v2',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('session_persist_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_trigger_hist_v2')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_trigger_hist_v2', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_trigger_hist_v2_command_type'), ['command_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_trigger_hist_v2_game_platform'), ['game_platform'], unique=False)
op.create_table(
'nb_t_io_tl_hist',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('request_id', sa.Uuid(), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'], ['nb_t_io_tl_stats.id'], name=op.f('fk_nb_t_io_tl_hist_stats_id_nb_t_io_tl_stats')
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_hist')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_hist', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_hist_request_id'), ['request_id'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_hist_update_time'), ['update_time'], unique=False)
op.create_table(
'nb_t_io_tl_stats_field',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rank', sa.String(length=2), nullable=False),
sa.Column('tr_line', sa.Float(), nullable=False),
sa.Column('player_count', sa.Integer(), nullable=False),
sa.Column('low_pps', sa.JSON(), nullable=False),
sa.Column('low_apm', sa.JSON(), nullable=False),
sa.Column('low_vs', sa.JSON(), nullable=False),
sa.Column('avg_pps', sa.Float(), nullable=False),
sa.Column('avg_apm', sa.Float(), nullable=False),
sa.Column('avg_vs', sa.Float(), nullable=False),
sa.Column('high_pps', sa.JSON(), nullable=False),
sa.Column('high_apm', sa.JSON(), nullable=False),
sa.Column('high_vs', sa.JSON(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'], ['nb_t_io_tl_stats.id'], name=op.f('fk_nb_t_io_tl_stats_field_stats_id_nb_t_io_tl_stats')
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_stats_field')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_stats_field', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_stats_field_rank'), ['rank'], unique=False)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nb_t_io_tl_stats_field', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_stats_field_rank'))
op.drop_table('nb_t_io_tl_stats_field')
with op.batch_alter_table('nb_t_io_tl_hist', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_hist_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_hist_request_id'))
op.drop_table('nb_t_io_tl_hist')
with op.batch_alter_table('nb_t_trigger_hist_v2', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_trigger_hist_v2_game_platform'))
batch_op.drop_index(batch_op.f('ix_nb_t_trigger_hist_v2_command_type'))
op.drop_table('nb_t_trigger_hist_v2')
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_api_type'))
op.drop_table('nb_t_tos_hist_data')
with op.batch_alter_table('nb_t_top_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_api_type'))
op.drop_table('nb_t_top_hist_data')
op.drop_table('nb_t_io_u_cfg')
with op.batch_alter_table('nb_t_io_tl_stats', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_stats_update_time'))
op.drop_table('nb_t_io_tl_stats')
with op.batch_alter_table('nb_t_io_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_api_type'))
op.drop_table('nb_t_io_hist_data')
with op.batch_alter_table('nb_t_bind', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_bind_user_id'))
op.drop_table('nb_t_bind')
# ### end Alembic commands ###

View File

@@ -1,117 +0,0 @@
"""Add user_unique_identifier field to HistoricalData
迁移 ID: b7fbdafc339a
父迁移: 8a91210ce14d
创建时间: 2024-05-07 16:55:29.527215
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from nonebot.log import logger
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b7fbdafc339a'
down_revision: str | Sequence[str] | None = '8a91210ce14d'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: # noqa: C901
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
from nonebot.compat import type_validate_json # noqa: PLC0415
from pydantic import ValidationError # noqa: PLC0415
from rich.progress import ( # noqa: PLC0415
BarColumn,
MofNCompleteColumn,
Progress,
TaskProgressColumn,
TextColumn,
TimeRemainingColumn,
)
from sqlalchemy import select # noqa: PLC0415
from sqlalchemy.ext.automap import automap_base # noqa: PLC0415
from sqlalchemy.orm import Session # noqa: PLC0415
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier'),
['user_unique_identifier'],
unique=False,
)
Base = automap_base() # noqa: N806
connection = op.get_bind()
Base.prepare(autoload_with=connection)
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
with Session(op.get_bind()) as session:
count = session.query(HistoricalData).count()
if count == 0:
logger.info('空表, 跳过')
else:
from nonebot_plugin_tetris_stats.version import __version__ # noqa: PLC0415
if __version__ != '1.0.4':
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
logger.critical(msg)
raise RuntimeError(msg)
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped] # noqa: PLC0415
BaseUser,
)
models: list[type[BaseUser]] = BaseUser.__subclasses__()
def json_to_model(value: str) -> BaseUser:
for i in models:
try:
return type_validate_json(i, value)
except ValidationError: # noqa: PERF203
...
raise ValueError
with Progress(
TextColumn('[progress.description]{task.description}'),
BarColumn(),
MofNCompleteColumn(),
TaskProgressColumn(),
TimeRemainingColumn(),
) as progress:
task_id = progress.add_task('[cyan]Updateing:', total=count)
for i in range(0, count, 100):
for j in session.scalars(
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
):
model = json_to_model(j.game_user)
try:
j.user_unique_identifier = model.unique_identifier
except ValueError:
session.delete(j)
progress.update(task_id, advance=1)
session.commit()
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.alter_column('user_unique_identifier', existing_type=sa.VARCHAR(length=32), nullable=False)
logger.success('database upgrade success')
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier'))
batch_op.drop_column('user_unique_identifier')
# ### end Alembic commands ###

View File

@@ -1,82 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tetriouserconfig
迁移 ID: b96c8c18b79a
父迁移: 8459b2a4b7a3
创建时间: 2025-07-18 04:25:44.190319
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b96c8c18b79a'
down_revision: str | Sequence[str] | None = '8459b2a4b7a3'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetriouserconfig' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetriouserconfig # noqa: N806
New = Base.classes.nb_t_io_u_cfg # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
query_template=i.query_template,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,46 +0,0 @@
"""Del old TOS bind data
迁移 ID: b9d65badc713
父迁移: 6c3206f90cc3
创建时间: 2023-12-30 00:27:40.991704
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b9d65badc713'
down_revision: str | Sequence[str] | None = '6c3206f90cc3'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
Base = automap_base() # noqa: N806
connection = op.get_bind()
Base.prepare(autoload_with=connection)
Bind = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
with Session(connection) as session:
session.query(Bind).filter(Bind.game_platform == 'TOS').delete()
session.commit()
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -1,85 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tetriohistoricaldata
迁移 ID: bbbdfd94e6fa
父迁移: d61e6ae36586
创建时间: 2025-07-18 00:42:33.730885
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'bbbdfd94e6fa'
down_revision: str | Sequence[str] | None = 'd61e6ae36586'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetriohistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetriohistoricaldata # noqa: N806
New = Base.classes.nb_t_io_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,87 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_triggerhistoricaldatav2
迁移 ID: bc6abd57928f
父迁移: ee76ae37d70a
创建时间: 2025-07-18 04:33:04.222045
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'bc6abd57928f'
down_revision: str | Sequence[str] | None = 'ee76ae37d70a'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_triggerhistoricaldatav2' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_triggerhistoricaldatav2 # noqa: N806
New = Base.classes.nb_t_trigger_hist_v2 # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
trigger_time=i.trigger_time,
session_persist_id=i.session_persist_id,
game_platform=i.game_platform,
command_type=i.command_type,
command_args=i.command_args,
finish_time=i.finish_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,85 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_tophistoricaldata
迁移 ID: ce073d279d19
父迁移: b96c8c18b79a
创建时间: 2025-07-18 04:28:13.820635
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'ce073d279d19'
down_revision: str | Sequence[str] | None = 'b96c8c18b79a'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tophistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tophistoricaldata # noqa: N806
New = Base.classes.nb_t_top_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,54 +0,0 @@
"""Extend api_type field length
迁移 ID: cfeab6961dce
父迁移: f5b4a6d1325b
创建时间: 2024-08-09 14:20:59.789030
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
from nonebot.log import logger
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'cfeab6961dce'
down_revision: str | Sequence[str] | None = 'f5b4a6d1325b'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.alter_column(
'api_type', existing_type=sa.VARCHAR(length=16), type_=sa.String(length=32), existing_nullable=False
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ###
logger.warning('新数据可能不支持降级!')
logger.warning('请确认数据库内数据可以迁移到旧版本!')
input('如果确认可以迁移, 请按回车键继续!')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.alter_column(
'api_type', existing_type=sa.String(length=32), type_=sa.VARCHAR(length=16), existing_nullable=False
)
# ### end Alembic commands ###

View File

@@ -1,84 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_bind
迁移 ID: d61e6ae36586
父迁移: b2075a5ce371
创建时间: 2025-07-17 23:58:13.408384
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'd61e6ae36586'
down_revision: str | Sequence[str] | None = 'b2075a5ce371'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_bind' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
New = Base.classes.nb_t_bind # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(100):
db_session.add(
New(
id=i.id,
user_id=i.user_id,
game_platform=i.game_platform,
game_account=i.game_account,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,85 +0,0 @@
"""migrate nonebot_plugin_tetris_stats_toshistoricaldata
迁移 ID: ee76ae37d70a
父迁移: ce073d279d19
创建时间: 2025-07-18 04:29:52.976624
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'ee76ae37d70a'
down_revision: str | Sequence[str] | None = 'ce073d279d19'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_toshistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_toshistoricaldata # noqa: N806
New = Base.classes.nb_t_tos_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -1,95 +0,0 @@
"""TETR.IO new season
迁移 ID: f5b4a6d1325b
父迁移: a1195e989cc6
创建时间: 2024-08-01 20:44:48.644912
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'f5b4a6d1325b'
down_revision: str | Sequence[str] | None = 'a1195e989cc6'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_rank')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_update_time')
op.drop_table('nonebot_plugin_tetris_stats_iorank')
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier')
op.drop_table('nonebot_plugin_tetris_stats_tetriohistoricaldata')
op.create_table(
'nonebot_plugin_tetris_stats_tetriohistoricaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriohistoricaldata')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'), ['api_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'), ['update_time'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'),
['user_unique_identifier'],
unique=False,
)
def downgrade(name: str = '') -> None:
if name:
return
if op.get_bind().dialect.name == 'postgresql':
return
op.create_table(
'nonebot_plugin_tetris_stats_iorank',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rank', sa.String(length=2), nullable=False),
sa.Column('tr_line', sa.Float(), nullable=False),
sa.Column('player_count', sa.Integer(), nullable=False),
sa.Column('low_pps', sa.JSON(), nullable=False),
sa.Column('low_apm', sa.JSON(), nullable=False),
sa.Column('low_vs', sa.JSON(), nullable=False),
sa.Column('avg_pps', sa.Float(), nullable=False),
sa.Column('avg_apm', sa.Float(), nullable=False),
sa.Column('avg_vs', sa.Float(), nullable=False),
sa.Column('high_pps', sa.JSON(), nullable=False),
sa.Column('high_apm', sa.JSON(), nullable=False),
sa.Column('high_vs', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('file_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_iorank'),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_update_time', ['update_time'], unique=False)
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_rank', ['rank'], unique=False)
batch_op.create_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash', ['file_hash'], unique=False)

View File

@@ -1,152 +1,50 @@
from asyncio import Lock
from collections.abc import AsyncGenerator
from contextlib import asynccontextmanager
from datetime import datetime, timezone
from enum import Enum, auto
from typing import TYPE_CHECKING, Literal, TypeVar, overload
from nonebot.exception import FinishedException
from nonebot.log import logger
from nonebot_plugin_orm import AsyncSession, get_session
from nonebot_plugin_user import User
from nonebot_plugin_orm import AsyncSession
from sqlalchemy import select
from ..utils.typedefs import AllCommandType, BaseCommandType, GameType, TETRIOCommandType
from .models import Bind, TriggerHistoricalDataV2
UTC = timezone.utc
if TYPE_CHECKING:
from ..games.tetrio.api.models import TETRIOHistoricalData
from ..games.top.api.models import TOPHistoricalData
from ..games.tos.api.models import TOSHistoricalData
class BindStatus(Enum):
SUCCESS = auto()
UPDATE = auto()
from ..utils.typing import GameType
from .models import Bind
async def query_bind_info(
session: AsyncSession,
user: User,
chat_platform: str,
chat_account: str,
game_platform: GameType,
) -> Bind | None:
return (
await session.scalars(select(Bind).where(Bind.user_id == user.id).where(Bind.game_platform == game_platform))
await session.scalars(
select(Bind)
.where(Bind.chat_platform == chat_platform)
.where(Bind.chat_account == chat_account)
.where(Bind.game_platform == game_platform)
)
).one_or_none()
async def create_or_update_bind(
session: AsyncSession,
user: User,
chat_platform: str,
chat_account: str,
game_platform: GameType,
game_account: str,
) -> BindStatus:
) -> str:
bind = await query_bind_info(
session=session,
user=user,
chat_platform=chat_platform,
chat_account=chat_account,
game_platform=game_platform,
)
if bind is None:
bind = Bind(
user_id=user.id,
chat_platform=chat_platform,
chat_account=chat_account,
game_platform=game_platform,
game_account=game_account,
)
session.add(bind)
status = BindStatus.SUCCESS
message = '绑定成功'
else:
bind.game_account = game_account
status = BindStatus.UPDATE
message = '更新绑定成功'
await session.commit()
return status
async def remove_bind(
session: AsyncSession,
user: User,
game_platform: GameType,
) -> bool:
bind = await query_bind_info(
session=session,
user=user,
game_platform=game_platform,
)
if bind is not None:
await session.delete(bind)
await session.commit()
return True
return False
T = TypeVar('T', 'TETRIOHistoricalData', 'TOPHistoricalData', 'TOSHistoricalData')
lock = Lock()
async def anti_duplicate_add(model: T) -> None:
async with lock, get_session() as session:
result = (
await session.scalars(
select(cls := model.__class__)
.where(cls.update_time == model.update_time)
.where(cls.user_unique_identifier == model.user_unique_identifier)
.where(cls.api_type == model.api_type)
)
).all()
if result:
for i in result:
if i.data == model.data:
logger.debug('Anti duplicate successfully')
return
session.add(model)
await session.commit()
@asynccontextmanager
@overload
async def trigger(
session_persist_id: int,
game_platform: Literal['IO'],
command_type: TETRIOCommandType,
command_args: list[str],
) -> AsyncGenerator:
yield
@asynccontextmanager
@overload
async def trigger(
session_persist_id: int,
game_platform: GameType,
command_type: BaseCommandType,
command_args: list[str],
) -> AsyncGenerator:
yield
@asynccontextmanager
async def trigger(
session_persist_id: int,
game_platform: GameType,
command_type: AllCommandType,
command_args: list[str],
) -> AsyncGenerator:
trigger_time = datetime.now(UTC)
try:
yield
except FinishedException:
async with get_session() as session:
session.add(
TriggerHistoricalDataV2(
trigger_time=trigger_time,
session_persist_id=session_persist_id,
game_platform=game_platform,
command_type=command_type,
command_args=command_args,
finish_time=datetime.now(UTC),
)
)
await session.commit()
raise
return message

View File

@@ -1,85 +1,33 @@
from collections.abc import Callable, Sequence
from datetime import datetime
from typing import Any
from nonebot.compat import PYDANTIC_V2, type_validate_json
from nonebot.adapters import Message
from nonebot_plugin_orm import Model
from pydantic import BaseModel, ValidationError
from sqlalchemy import JSON, DateTime, Dialect, String, TypeDecorator
from sqlalchemy import JSON, DateTime, PickleType, String
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
from typing_extensions import override
from ..utils.typedefs import AllCommandType, GameType
class PydanticType(TypeDecorator):
impl = JSON
@override
def __init__(
self,
get_model: Sequence[Callable[[], Sequence[type[BaseModel]]]],
models: set[type[BaseModel]],
*args: Any,
**kwargs: Any,
):
self.get_model = get_model
self._models = models
super().__init__(*args, **kwargs)
if PYDANTIC_V2:
@override
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
# 将 Pydantic 模型实例转换为 JSON
if isinstance(value, tuple(self.models)):
return value.model_dump_json(by_alias=True) # type: ignore[union-attr]
raise TypeError
else:
@override
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
# 将 Pydantic 模型实例转换为 JSON
if isinstance(value, tuple(self.models)):
return value.json(by_alias=True) # type: ignore[union-attr]
raise TypeError
@override
def process_result_value(self, value: Any | None, dialect: Dialect) -> BaseModel:
# 将 JSON 转换回 Pydantic 模型实例
if isinstance(value, str | bytes):
for i in self.models:
try:
return type_validate_json(i, value)
except ValidationError: # noqa: PERF203
...
raise ValueError
@property
def models(self) -> tuple[type[BaseModel], ...]:
models: set[type[BaseModel]] = set()
for i in self.get_model:
models.update(i())
models.update(self._models)
return tuple(models)
from ..game_data_processor import ProcessedData, User
from ..utils.typing import CommandType, GameType
class Bind(MappedAsDataclass, Model):
__tablename__ = 'nb_t_bind'
id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_id: Mapped[int] = mapped_column(index=True)
chat_platform: Mapped[str] = mapped_column(String(32), index=True)
chat_account: Mapped[str] = mapped_column(index=True)
game_platform: Mapped[GameType] = mapped_column(String(32))
game_account: Mapped[str]
class TriggerHistoricalDataV2(MappedAsDataclass, Model):
__tablename__ = 'nb_t_trigger_hist_v2'
class HistoricalData(MappedAsDataclass, Model):
id: Mapped[int] = mapped_column(init=False, primary_key=True)
trigger_time: Mapped[datetime] = mapped_column(DateTime)
session_persist_id: Mapped[int]
game_platform: Mapped[GameType] = mapped_column(String(32), index=True)
command_type: Mapped[AllCommandType] = mapped_column(String(16), index=True)
command_args: Mapped[list[str]] = mapped_column(JSON)
finish_time: Mapped[datetime] = mapped_column(DateTime)
bot_platform: Mapped[str | None] = mapped_column(String(32))
bot_account: Mapped[str | None]
source_type: Mapped[str | None] = mapped_column(String(32), index=True)
source_account: Mapped[str | None] = mapped_column(index=True)
message: Mapped[Message | None] = mapped_column(PickleType)
game_platform: Mapped[GameType] = mapped_column(String(32), index=True, init=False)
command_type: Mapped[CommandType] = mapped_column(String(16), index=True, init=False)
command_args: Mapped[list[str]] = mapped_column(JSON, init=False)
game_user: Mapped[User] = mapped_column(PickleType, init=False)
processed_data: Mapped[ProcessedData] = mapped_column(PickleType, init=False)
finish_time: Mapped[datetime] = mapped_column(DateTime, init=False)

View File

@@ -0,0 +1,82 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from datetime import UTC, datetime
from ..utils.typing import CommandType, GameType
@dataclass
class User:
"""游戏用户"""
@dataclass
class RawResponse:
"""原始请求数据"""
@dataclass
class ProcessedData:
"""处理/验证后的数据"""
from ..utils.recorder import Recorder # noqa: E402 避免循环导入
class Processor(ABC):
event_id: int
command_type: CommandType
command_args: list[str]
user: User
raw_response: RawResponse
processed_data: ProcessedData
@abstractmethod
def __init__(
self,
event_id: int,
user: User,
command_args: list[str],
) -> None:
self.event_id = event_id
self.user = user
self.command_args = command_args
@property
@abstractmethod
def game_platform(self) -> GameType:
"""游戏平台"""
raise NotImplementedError
@abstractmethod
async def handle_bind(self, platform: str, account: str) -> str:
"""处理绑定消息"""
raise NotImplementedError
@abstractmethod
async def handle_query(self) -> str:
"""处理查询消息"""
raise NotImplementedError
@abstractmethod
async def generate_message(self) -> str:
"""生成消息"""
raise NotImplementedError
def __del__(self) -> None:
finish_time = datetime.now(tz=UTC)
historical_data = Recorder.get_historical_data(self.event_id)
historical_data.game_platform = self.game_platform
historical_data.command_type = self.command_type
historical_data.command_args = self.command_args
historical_data.game_user = self.user
historical_data.processed_data = self.processed_data
historical_data.finish_time = finish_time
Recorder.update_historical_data(self.event_id, historical_data)
from . import ( # noqa: F401, E402
io_data_processor,
top_data_processor,
tos_data_processor,
)

View File

@@ -0,0 +1,2 @@
BIND_COMMAND: list[str] = ['绑定', 'bind']
QUERY_COMMAND: list[str] = ['', '查询', 'query', 'stats']

View File

@@ -0,0 +1,187 @@
from datetime import timedelta
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
from nonebot.adapters import Bot, Event
from nonebot.matcher import Matcher
from nonebot_plugin_alconna import AlcMatches, At, on_alconna
from nonebot_plugin_orm import get_session
from sqlalchemy import select
from ...db import query_bind_info
from ...utils.exception import MessageFormatError, NeedCatchError
from ...utils.metrics import get_metrics
from ...utils.platform import get_platform
from ...utils.typing import Me
from ..constant import BIND_COMMAND, QUERY_COMMAND
from .constant import GAME_TYPE
from .model import IORank
from .processor import Processor, User, check_rank_data, identify_user_info
from .typing import Rank
alc = on_alconna(
Alconna(
'io',
Option(
BIND_COMMAND[0],
Args(
Arg(
'account',
identify_user_info,
notice='IO 用户名 / ID',
flags=[ArgFlag.HIDDEN],
)
),
alias=BIND_COMMAND[1:],
compact=True,
dest='bind',
help_text='绑定 IO 账号',
),
Option(
QUERY_COMMAND[0],
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 | 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
identify_user_info,
notice='IO 用户名 / ID',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
),
alias=QUERY_COMMAND[1:],
compact=True,
dest='query',
help_text='查询 IO 游戏信息',
),
Option(
'rank',
Args(Arg('rank', Rank, notice='IO 段位')),
alias={'Rank', 'RANK', '段位'},
compact=True,
dest='rank',
help_text='查询 IO 段位信息',
),
meta=CommandMeta(
description='查询 TETR.IO 的信息',
example='io绑定scdhh\nio查我\niorankx',
compact=True,
fuzzy_match=True,
),
),
skip_for_unmatch=False,
auto_send_output=True,
aliases={'IO'},
)
alc.shortcut('fkosk', {'command': 'io查', 'args': ['']})
@alc.assign('bind')
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
async with get_session() as session:
bind = await query_bind_info(
session=session,
chat_platform=get_platform(bot),
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
game_platform=GAME_TYPE,
)
if bind is None:
await matcher.finish('未查询到绑定信息')
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
proc = Processor(
event_id=id(event),
user=User(ID=bind.game_account),
command_args=[],
)
try:
await matcher.finish(message + await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('rank')
async def _(event: Event, matcher: Matcher, rank: Rank):
if rank == 'z':
await matcher.finish('暂不支持查询未知段位')
try:
await check_rank_data()
except NeedCatchError as e:
await matcher.finish(str(f'段位信息获取失败\n{e}'))
async with get_session() as session:
data = (
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(5))
).all()
latest_data = data[0]
message = f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
if len(data) > 1:
message += f'对比 {(latest_data.create_time-data[-1].create_time).total_seconds()/3600:.2f} 小时前趋势: {f"{difference:.2f}" if (difference:=latest_data.tr_line-data[-1].tr_line) > 0 else f"{-difference:.2f}" if difference < 0 else ""}'
else:
message += '暂无对比数据'
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
low_pps = get_metrics(pps=latest_data.low_pps[1])
low_vs = get_metrics(vs=latest_data.low_vs[1])
max_pps = get_metrics(pps=latest_data.high_pps[1])
max_vs = get_metrics(vs=latest_data.high_vs[1])
message += (
'\n'
'平均数据:\n'
f"L'PM: {avg.lpm} ( {avg.pps} pps )\n"
f'APM: {avg.apm} ( x{avg.apl} )\n'
f'ADPM: {avg.adpm} ( x{avg.adpl} ) ( {avg.vs}vs )\n'
'\n'
'最低数据:\n'
f"L'PM: {low_pps.lpm} ( {low_pps.pps} pps ) By: {latest_data.low_pps[0]['name'].upper()}\n"
f'APM: {latest_data.low_apm[1]} By: {latest_data.low_apm[0]["name"].upper()}\n'
f'ADPM: {low_vs.adpm} ( {low_vs.vs}vs ) By: {latest_data.low_vs[0]["name"].upper()}\n'
'\n'
'最高数据:\n'
f"L'PM: {max_pps.lpm} ( {max_pps.pps} pps ) By: {latest_data.high_pps[0]['name'].upper()}\n"
f'APM: {latest_data.high_apm[1]} By: {latest_data.high_apm[0]["name"].upper()}\n'
f'ADPM: {max_vs.adpm} ( {max_vs.vs}vs ) By: {latest_data.high_vs[0]["name"].upper()}\n'
'\n'
f'数据更新时间: {(latest_data.create_time+timedelta(hours=8)).strftime("%Y-%m-%d %H:%M:%S")}'
)
await matcher.finish(message)
@alc.handle()
async def _(matcher: Matcher, account: MessageFormatError):
await matcher.finish(str(account))
@alc.handle()
async def _(matcher: Matcher, matches: AlcMatches):
if matches.head_matched:
await matcher.finish(
f'{matches.error_info!r}\n' if matches.error_info is not None else '' + '输入"io --help"查看帮助'
)

View File

@@ -0,0 +1,24 @@
from ...utils.typing import GameType
from .typing import Rank
GAME_TYPE: GameType = 'IO'
BASE_URL = 'https://ch.tetr.io/api/'
RANK_PERCENTILE: dict[Rank, float] = {
'x': 1,
'u': 5,
'ss': 11,
's+': 17,
's': 23,
's-': 30,
'a+': 38,
'a': 46,
'a-': 54,
'b+': 62,
'b': 70,
'b-': 78,
'c+': 84,
'c': 90,
'c-': 95,
'd+': 97.5,
'd': 100,
}

View File

@@ -0,0 +1,29 @@
from datetime import UTC, datetime
from nonebot_plugin_orm import Model
from sqlalchemy import JSON, DateTime, String
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
from .typing import Rank
class IORank(MappedAsDataclass, Model):
id: Mapped[int] = mapped_column(init=False, primary_key=True)
rank: Mapped[Rank] = mapped_column(String(2), index=True)
tr_line: Mapped[float]
player_count: Mapped[int]
low_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
low_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
low_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
avg_pps: Mapped[float]
avg_apm: Mapped[float]
avg_vs: Mapped[float]
high_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
high_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
high_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
create_time: Mapped[datetime] = mapped_column(
DateTime,
default=lambda: datetime.now(tz=UTC),
index=True,
init=False,
)

View File

@@ -0,0 +1,254 @@
from collections import defaultdict
from collections.abc import Callable
from dataclasses import asdict, dataclass
from datetime import UTC, datetime, timedelta
from math import floor
from re import match
from statistics import mean
from nonebot import get_driver
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
from nonebot_plugin_orm import get_session
from pydantic import parse_raw_as
from sqlalchemy import select
from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError, WhatTheFuckError
from ...utils.request import Request, splice_url
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from .constant import BASE_URL, GAME_TYPE, RANK_PERCENTILE
from .model import IORank
from .schemas.league_all import FailedModel as LeagueAllFailed
from .schemas.league_all import LeagueAll
from .schemas.league_all import ValidUser as LeagueAllUser
from .schemas.user_info import FailedModel as InfoFailed
from .schemas.user_info import (
NeverPlayedLeague,
NeverRatedLeague,
UserInfo,
)
from .schemas.user_info import SuccessModel as InfoSuccess
from .schemas.user_records import FailedModel as RecordsFailed
from .schemas.user_records import SoloRecord, UserRecords
from .schemas.user_records import SuccessModel as RecordsSuccess
from .typing import Rank
driver = get_driver()
@dataclass
class User(UserMeta):
ID: str | None = None
name: str | None = None
@dataclass
class RawResponse(RawResponseMeta):
user_info: bytes | None = None
user_records: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_info: InfoSuccess | None = None
user_records: RecordsSuccess | None = None
def identify_user_info(info: str) -> User | MessageFormatError:
if match(r'^[a-f0-9]{24}$', info):
return User(ID=info)
if match(r'^[a-zA-Z0-9_-]{3,16}$', info):
return User(name=info.lower())
return MessageFormatError('用户名/ID不合法')
class Processor(ProcessorMeta):
user: User
raw_response: RawResponse
processed_data: ProcessedData
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
super().__init__(event_id, user, command_args)
self.raw_response = RawResponse()
self.processed_data = ProcessedData()
@property
def game_platform(self) -> GameType:
return GAME_TYPE
async def handle_bind(self, platform: str, account: str) -> str:
"""处理绑定消息"""
self.command_type = 'bind'
await self.get_user()
if self.user.ID is None:
raise # FIXME: 不知道怎么才能把这类型给变过来了
async with get_session() as session:
return await create_or_update_bind(
session=session,
chat_platform=platform,
chat_account=account,
game_platform=GAME_TYPE,
game_account=self.user.ID,
)
async def handle_query(self) -> str:
"""处理查询消息"""
self.command_type = 'query'
await self.get_user()
return await self.generate_message()
async def get_user(self) -> None:
"""
用于获取 UserName 和 UserID 的函数
"""
if self.user.name is None:
self.user.name = (await self.get_user_info()).data.user.username
if self.user.ID is None:
self.user.ID = (await self.get_user_info()).data.user.id
async def get_user_info(self) -> InfoSuccess:
"""获取用户数据"""
if self.processed_data.user_info is None:
self.raw_response.user_info = await Request.request(
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}'])
)
user_info: UserInfo = parse_raw_as(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
if isinstance(user_info, InfoFailed):
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
self.processed_data.user_info = user_info
return self.processed_data.user_info
async def get_user_records(self) -> RecordsSuccess:
"""获取Solo数据"""
if self.processed_data.user_records is None:
self.raw_response.user_records = await Request.request(
splice_url(
[
BASE_URL,
'users/',
f'{self.user.ID or self.user.name}/',
'records',
]
)
)
user_records: UserRecords = parse_raw_as(
UserRecords, # type: ignore[arg-type]
self.raw_response.user_records,
)
if isinstance(user_records, RecordsFailed):
raise RequestError(f'用户Solo数据请求错误:\n{user_records.error}')
self.processed_data.user_records = user_records
return self.processed_data.user_records
async def generate_message(self) -> str:
"""生成消息"""
user_info = await self.get_user_info()
user_name = user_info.data.user.username.upper()
league = user_info.data.user.league
ret_message = ''
if isinstance(league, NeverPlayedLeague):
ret_message += f'用户 {user_name} 没有排位统计数据'
else:
if isinstance(league, NeverRatedLeague):
ret_message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
elif league.rank == 'z':
ret_message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
else:
ret_message += (
f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
)
ret_message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
lpm = league.pps * 24
ret_message += f"\nL'PM: {round(lpm, 2)} ( {league.pps} pps )"
ret_message += f'\nAPM: {league.apm} ( x{round(league.apm/(league.pps*24),2)} )'
if league.vs is not None:
adpm = league.vs * 0.6
ret_message += f'\nADPM: {round(adpm,2)} ( x{round(adpm/lpm,2)} ) ( {league.vs}vs )'
user_records = await self.get_user_records()
sprint = user_records.data.records.sprint
if sprint.record is not None:
if not isinstance(sprint.record, SoloRecord):
raise WhatTheFuckError('40L记录不是单人记录')
ret_message += f'\n40L: {round(sprint.record.endcontext.final_time/1000,2)}s'
ret_message += f' ( #{sprint.rank} )' if sprint.rank is not None else ''
blitz = user_records.data.records.blitz
if blitz.record is not None:
if not isinstance(blitz.record, SoloRecord):
raise WhatTheFuckError('Blitz记录不是单人记录')
ret_message += f'\nBlitz: {blitz.record.endcontext.score}'
ret_message += f' ( #{blitz.rank} )' if blitz.rank is not None else ''
return ret_message
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
async def get_io_rank_data() -> None:
league_all: LeagueAll = parse_raw_as(
LeagueAll, # type: ignore[arg-type]
await Request.request(splice_url([BASE_URL, 'users/lists/league/all'])),
)
if isinstance(league_all, LeagueAllFailed):
raise RequestError(f'用户Solo数据请求错误:\n{league_all.error}')
def pps(user: LeagueAllUser) -> float:
return user.league.pps
def apm(user: LeagueAllUser) -> float:
return user.league.apm
def vs(user: LeagueAllUser) -> float:
return user.league.vs
def _min(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
return min(users, key=field)
def _max(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
return max(users, key=field)
def build_extremes_data(
users: list[LeagueAllUser],
field: Callable[[LeagueAllUser], float],
sort: Callable[[list[LeagueAllUser], Callable[[LeagueAllUser], float]], LeagueAllUser],
) -> tuple[dict[str, str], float]:
user = sort(users, field)
return asdict(User(ID=user.id, name=user.username)), field(user)
users = [i for i in league_all.data.users if isinstance(i, LeagueAllUser)]
rank_to_users: defaultdict[Rank, list[LeagueAllUser]] = defaultdict(list)
for i in users:
rank_to_users[i.league.rank].append(i)
rank_info: list[IORank] = []
for rank, percentile in RANK_PERCENTILE.items():
offset = floor((percentile / 100) * len(users)) - 1
tr_line = users[offset].league.rating
rank_users = rank_to_users[rank]
rank_info.append(
IORank(
rank=rank,
tr_line=tr_line,
player_count=len(rank_users),
low_pps=(build_extremes_data(rank_users, pps, _min)),
low_apm=(build_extremes_data(rank_users, apm, _min)),
low_vs=(build_extremes_data(rank_users, vs, _min)),
avg_pps=mean({i.league.pps for i in rank_users}),
avg_apm=mean({i.league.apm for i in rank_users}),
avg_vs=mean({i.league.vs for i in rank_users}),
high_pps=(build_extremes_data(rank_users, pps, _max)),
high_apm=(build_extremes_data(rank_users, apm, _max)),
high_vs=(build_extremes_data(rank_users, vs, _max)),
)
)
async with get_session() as session:
session.add_all(rank_info)
await session.commit()
@driver.on_startup
async def check_rank_data() -> None:
async with get_session() as session:
latest_time = await session.scalar(select(IORank.create_time).order_by(IORank.id.desc()).limit(1))
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
await get_io_rank_data()

View File

@@ -0,0 +1,20 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel
class Cache(BaseModel):
status: str
cached_at: datetime
cached_until: datetime
class SuccessModel(BaseModel):
success: Literal[True]
cache: Cache
class FailedModel(BaseModel):
success: Literal[False]
error: str

View File

@@ -0,0 +1,63 @@
from pydantic import BaseModel, Field
from ..typing import Rank
from .base import FailedModel
from .base import SuccessModel as BaseSuccessModel
class SuccessModel(BaseSuccessModel):
class Data(BaseModel):
class ValidUser(BaseModel):
class League(BaseModel):
gamesplayed: int
gameswon: int
rating: float
glicko: float
rd: float
rank: Rank
bestrank: Rank
apm: float
pps: float
vs: float
decaying: bool
id: str = Field(..., alias='_id')
username: str
role: str
xp: float
league: League
supporter: bool
verified: bool
country: str | None
class InvalidUser(BaseModel):
class League(BaseModel):
gamesplayed: int
gameswon: int
rating: float
glicko: float | None
rd: float | None
rank: Rank
bestrank: Rank
apm: float | None
pps: float | None
vs: float | None
decaying: bool
id: str = Field(..., alias='_id')
username: str
role: str
xp: float
league: League
supporter: bool
verified: bool
country: str | None
users: list[ValidUser | InvalidUser]
data: Data
LeagueAll = SuccessModel | FailedModel
ValidUser = SuccessModel.Data.ValidUser
InvalidUser = SuccessModel.Data.InvalidUser

View File

@@ -0,0 +1,125 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from ..typing import Rank
from .base import FailedModel
from .base import SuccessModel as BaseSuccessModel
class SuccessModel(BaseSuccessModel):
class Data(BaseModel):
class User(BaseModel):
class Badge(BaseModel):
id: str
label: str
ts: datetime | None
class NeverPlayedLeague(BaseModel):
gamesplayed: Literal[0]
gameswon: Literal[0]
rating: Literal[-1]
rank: Literal['z']
standing: Literal[-1]
standing_local: Literal[-1]
next_rank: None
prev_rank: None
next_at: Literal[-1]
prev_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
apm: None
pps: None
vs: None
decaying: bool
class NeverRatedLeague(BaseModel):
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
gameswon: int
rating: Literal[-1]
rank: Literal['z']
standing: Literal[-1]
standing_local: Literal[-1]
next_rank: None
prev_rank: None
next_at: Literal[-1]
prev_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
apm: float
pps: float
vs: float
decaying: bool
class RatedLeague(BaseModel):
gamesplayed: int
gameswon: int
rating: float
rank: Rank
bestrank: Rank
standing: int
standing_local: int
next_rank: Rank | None
prev_rank: Rank | None
next_at: int
prev_at: int
percentile: float
percentile_rank: str
glicko: float
rd: float
apm: float
pps: float
vs: float | None
decaying: bool
class Connections(BaseModel):
class Discord(BaseModel):
id: str
username: str
discord: Discord | None
class Distinguishment(BaseModel):
type: str # noqa: A003
id: str = Field(..., alias='_id')
username: str
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
ts: datetime | None
botmaster: str | None
badges: list[Badge]
xp: float
gamesplayed: int
gameswon: int
gametime: float
country: str | None
badstanding: bool | None
supporter: bool | None # osk说是必有, 但实际上不是 fk osk
supporter_tier: int
verified: bool
league: NeverPlayedLeague | NeverRatedLeague | RatedLeague
avatar_revision: int | None
"""This user's avatar ID. Get their avatar at
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
banner_revision: int | None
"""This user's banner ID. Get their banner at
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
Ignore this field if the user is not a supporter."""
bio: str | None
connections: Connections
friend_count: int
distinguishment: Distinguishment | None
user: User
data: Data
NeverPlayedLeague = SuccessModel.Data.User.NeverPlayedLeague
NeverRatedLeague = SuccessModel.Data.User.NeverRatedLeague
RatedLeague = SuccessModel.Data.User.RatedLeague
UserInfo = SuccessModel | FailedModel

View File

@@ -0,0 +1,125 @@
from datetime import datetime
from pydantic import BaseModel, Field
from .base import FailedModel
from .base import SuccessModel as BaseSuccessModel
class EndContext(BaseModel):
class Time(BaseModel):
start: int
zero: bool
locked: bool
prev: int
frameoffset: int
class Clears(BaseModel):
singles: int
doubles: int
triples: int
quads: int
realtspins: int
minitspins: int
minitspinsingles: int
tspinsingles: int
minitspindoubles: int
tspindoubles: int
tspintriples: int
tspinquads: int
allclear: int
class Garbage(BaseModel):
sent: int
received: int
attack: int
cleared: int
class Finesse(BaseModel):
combo: int
faults: int
perfectpieces: int
seed: int
lines: int
level_lines: int
level_lines_needed: int
inputs: int
holds: int
time: Time
score: int
zenlevel: int
zenprogress: int
level: int
combo: int
currentcombopower: int # WTF
topcombo: int
btb: int
topbtb: int
currentbtbchainpower: int | None # WTF * 2 40l 里有 但是 blitz 没有
tspins: int
piecesplaced: int
clears: Clears
garbage: Garbage
kills: int
finesse: Finesse
final_time: float = Field(..., alias='finalTime')
gametype: str
class BaseModeRecord(BaseModel):
class SoloRecord(BaseModel):
class User(BaseModel):
id: str = Field(..., alias='_id')
username: str
id: str = Field(..., alias='_id')
stream: str
replayid: str
user: User
ts: datetime
ismulti: bool | None
endcontext: EndContext
class MultiRecord(BaseModel):
class User(BaseModel):
id: str = Field(..., alias='_id')
username: str
id: str = Field(..., alias='_id')
stream: str
replayid: str
user: User
ts: datetime
ismulti: bool | None
endcontext: list[EndContext]
record: SoloRecord | MultiRecord | None
rank: int | None
class SuccessModel(BaseSuccessModel):
class Data(BaseModel):
class Records(BaseModel):
class Sprint(BaseModeRecord):
...
class Blitz(BaseModeRecord):
...
sprint: Sprint = Field(..., alias='40l')
blitz: Blitz
class Zen(BaseModel):
level: int
score: int
records: Records
zen: Zen
data: Data
SoloRecord = BaseModeRecord.SoloRecord
MultiRecord = BaseModeRecord.MultiRecord
UserRecords = SuccessModel | FailedModel

View File

@@ -0,0 +1,22 @@
from typing import Literal
Rank = Literal[
'x',
'u',
'ss',
's+',
's',
's-',
'a+',
'a',
'a-',
'b+',
'b',
'b-',
'c+',
'c',
'c-',
'd+',
'd',
'z', # 未定级
]

View File

@@ -0,0 +1,126 @@
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
from nonebot.adapters import Bot, Event
from nonebot.matcher import Matcher
from nonebot_plugin_alconna import AlcMatches, At, on_alconna
from nonebot_plugin_orm import get_session
from ...db import query_bind_info
from ...utils.exception import MessageFormatError, NeedCatchError
from ...utils.platform import get_platform
from ...utils.typing import Me
from ..constant import BIND_COMMAND, QUERY_COMMAND
from .constant import GAME_TYPE
from .processor import Processor, User, identify_user_info
alc = on_alconna(
Alconna(
'top',
Option(
BIND_COMMAND[0],
Args(
Arg(
'account',
identify_user_info,
notice='TOP 用户名',
flags=[ArgFlag.HIDDEN],
)
),
alias=BIND_COMMAND[1:],
compact=True,
dest='bind',
help_text='绑定 TOP 账号',
),
Option(
QUERY_COMMAND[0],
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 | 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
identify_user_info | Me | At,
notice='TOP 用户名',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
),
alias=QUERY_COMMAND[1:],
compact=True,
dest='query',
help_text='查询 TOP 游戏信息',
),
meta=CommandMeta(
description='查询 TetrisOnline波兰服 的信息',
example='top绑定scdhh\ntop查我',
compact=True,
fuzzy_match=True,
),
),
skip_for_unmatch=False,
auto_send_output=True,
aliases={'TOP'},
)
@alc.assign('bind')
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
async with get_session() as session:
bind = await query_bind_info(
session=session,
chat_platform=get_platform(bot),
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
game_platform=GAME_TYPE,
)
if bind is None:
await matcher.finish('未查询到绑定信息')
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
proc = Processor(
event_id=id(event),
user=User(name=bind.game_account),
command_args=[],
)
try:
await matcher.finish(message + await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.handle()
async def _(matcher: Matcher, account: MessageFormatError):
await matcher.finish(str(account))
@alc.handle()
async def _(matcher: Matcher, matches: AlcMatches):
if matches.head_matched:
await matcher.finish(
f'{matches.error_info!r}\n' if matches.error_info is not None else '' + '输入"top --help"查看帮助'
)

View File

@@ -0,0 +1,4 @@
from ...utils.typing import GameType
GAME_TYPE: GameType = 'TOP'
BASE_URL = 'http://tetrisonline.pl/top/'

View File

@@ -0,0 +1,142 @@
from contextlib import suppress
from dataclasses import dataclass
from io import StringIO
from re import match
from typing import NoReturn
from urllib.parse import urlencode
from lxml import etree
from nonebot_plugin_orm import get_session
from pandas import read_html
from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError
from ...utils.request import Request, splice_url
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from .constant import BASE_URL, GAME_TYPE
@dataclass
class User(UserMeta):
name: str
@dataclass
class RawResponse(RawResponseMeta):
user_profile: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_profile: str | None = None
@dataclass
class Data:
lpm: float
apm: float
@dataclass
class GameData:
day: Data | None
total: Data | None
def identify_user_info(info: str) -> User | MessageFormatError:
if match(r'^[a-zA-Z0-9_]{1,16}$', info):
return User(name=info)
return MessageFormatError('用户名不合法')
class Processor(ProcessorMeta):
user: User
raw_response: RawResponse
processed_data: ProcessedData
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
super().__init__(event_id, user, command_args)
self.raw_response = RawResponse()
self.processed_data = ProcessedData()
@property
def game_platform(self) -> GameType:
return GAME_TYPE
async def handle_bind(self, platform: str, account: str) -> str:
"""处理绑定消息"""
self.command_type = 'bind'
await self.check_user()
async with get_session() as session:
return await create_or_update_bind(
session=session,
chat_platform=platform,
chat_account=account,
game_platform=GAME_TYPE,
game_account=self.user.name,
)
async def handle_query(self) -> str:
"""处理查询消息"""
self.command_type = 'query'
await self.check_user()
return await self.generate_message()
async def get_user_profile(self) -> str:
"""获取用户信息"""
if self.processed_data.user_profile is None:
url = splice_url([BASE_URL, 'profile.php', f'?{urlencode({"user":self.user.name})}'])
self.raw_response.user_profile = await Request.request(url, is_json=False)
self.processed_data.user_profile = self.raw_response.user_profile.decode()
return self.processed_data.user_profile
async def check_user(self) -> None | NoReturn:
if 'user not found!' in await self.get_user_profile():
raise RequestError('用户不存在!')
return None
async def get_user_name(self) -> str:
"""获取用户名"""
data = etree.HTML(await self.get_user_profile()).xpath('//div[@class="mycontent"]/h1/text()')
return data[0].replace("'s profile", '')
async def get_game_data(self) -> GameData:
"""获取游戏统计数据"""
html = etree.HTML(await self.get_user_profile())
day = None
with suppress(ValueError):
day = Data(
lpm=float(str(html.xpath('//div[@class="mycontent"]/text()[3]')[0]).replace('lpm:', '').strip()),
apm=float(str(html.xpath('//div[@class="mycontent"]/text()[4]')[0]).replace('apm:', '').strip()),
)
table = StringIO(
etree.tostring(
html.xpath('//div[@class="mycontent"]/table[@class="mytable"]')[0],
encoding='utf-8',
).decode()
)
dataframe = read_html(table, encoding='utf-8', header=0)[0]
total = Data(lpm=dataframe['lpm'].mean(), apm=dataframe['apm'].mean()) if len(dataframe) != 0 else None
return GameData(day=day, total=total)
async def generate_message(self) -> str:
"""生成消息"""
game_data = await self.get_game_data()
message = ''
if game_data.day is not None:
message += f'用户 {self.user.name} 24小时内统计数据为: '
message += f"\nL'PM: {round(game_data.day.lpm,2)} ( {round(game_data.day.lpm/24,2)} pps )"
message += f'\nAPM: {round(game_data.day.apm,2)} ( x{round(game_data.day.apm/game_data.day.lpm,2)} )'
else:
message += f'用户 {self.user.name} 暂无24小时内统计数据'
if game_data.total is not None:
message += '\n历史统计数据为: '
message += f"\nL'PM: {round(game_data.total.lpm,2)} ( {round(game_data.total.lpm/24,2)} pps )"
message += f'\nAPM: {round(game_data.total.apm,2)} ( x{round(game_data.total.apm/game_data.total.lpm,2)} )'
else:
message += '\n暂无历史统计数据'
return message

View File

@@ -0,0 +1,151 @@
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
from nonebot.adapters import Bot, Event
from nonebot.matcher import Matcher
from nonebot_plugin_alconna import AlcMatches, At, on_alconna
from nonebot_plugin_orm import get_session
from ...db import query_bind_info
from ...utils.exception import MessageFormatError, NeedCatchError
from ...utils.platform import get_platform
from ...utils.typing import Me
from ..constant import BIND_COMMAND, QUERY_COMMAND
from .constant import GAME_TYPE
from .processor import Processor, User, identify_user_info
alc = on_alconna(
Alconna(
'茶服',
Option(
BIND_COMMAND[0],
Args(
Arg(
'account',
identify_user_info,
notice='茶服 用户名 / TeaID',
flags=[ArgFlag.HIDDEN],
)
),
alias=BIND_COMMAND[1:],
compact=True,
dest='bind',
help_text='绑定 茶服 账号',
),
Option(
QUERY_COMMAND[0],
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 | 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
identify_user_info,
notice='茶服 用户名 / TeaID',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
# 如果放在一个 Union Args 里, 验证顺序不能保证, 可能出错
),
alias=QUERY_COMMAND[1:],
compact=True,
dest='query',
help_text='查询 茶服 游戏信息',
),
meta=CommandMeta(
description='查询 TetrisOnline茶服 的信息',
example='茶服查我',
compact=True,
fuzzy_match=True,
),
),
skip_for_unmatch=False,
auto_send_output=True,
aliases={'tos', 'TOS'},
)
try:
from nonebot.adapters.onebot.v11 import GROUP, MessageEvent
from nonebot.adapters.onebot.v11 import Bot as OB11Bot
@alc.assign('bind')
async def _(event: MessageEvent, matcher: Matcher):
await matcher.finish('QQ 平台无需绑定')
@alc.assign('query')
async def _(bot: OB11Bot, event: MessageEvent, matcher: Matcher, target: At | Me):
if event.is_tome() and await GROUP(bot, event):
await matcher.finish('不能查询bot的信息')
proc = Processor(
event_id=id(event),
user=User(teaid=target.target if isinstance(target, At) else event.get_user_id()),
command_args=[],
)
try:
await matcher.finish(await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
except ImportError:
pass
@alc.assign('bind')
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
async with get_session() as session:
bind = await query_bind_info(
session=session,
chat_platform=get_platform(bot),
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
game_platform=GAME_TYPE,
)
if bind is None:
await matcher.finish('未查询到绑定信息')
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
proc = Processor(
event_id=id(event),
user=User(name=bind.game_account),
command_args=[],
)
try:
await matcher.finish(message + await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.assign('query')
async def _(event: Event, matcher: Matcher, account: User):
proc = Processor(
event_id=id(event),
user=account,
command_args=[],
)
try:
await matcher.finish(await proc.handle_query())
except NeedCatchError as e:
await matcher.finish(str(e))
@alc.handle()
async def _(matcher: Matcher, account: MessageFormatError):
await matcher.finish(str(account))
@alc.handle()
async def _(matcher: Matcher, matches: AlcMatches):
if matches.head_matched:
await matcher.finish(
f'{matches.error_info!r}\n' if matches.error_info is not None else '' + '输入"茶服 --help"查看帮助'
)

View File

@@ -0,0 +1,4 @@
from ...utils.typing import GameType
GAME_TYPE: GameType = 'TOS'
BASE_URL = 'https://teatube.cn:8888/'

View File

@@ -0,0 +1,221 @@
from dataclasses import dataclass
from re import match
from typing import Any
from urllib.parse import urlencode
from nonebot_plugin_orm import get_session
from pydantic import parse_raw_as
from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError
from ...utils.request import Request, splice_url
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from .constant import BASE_URL, GAME_TYPE
from .schemas.user_info import SuccessModel as InfoSuccess
from .schemas.user_info import UserInfo
from .schemas.user_profile import UserProfile
@dataclass
class User(UserMeta):
teaid: str | None = None
name: str | None = None
@dataclass
class RawResponse(RawResponseMeta):
user_profile: dict[frozenset[tuple[str, Any]], bytes]
user_info: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_profile: dict[frozenset[tuple[str, Any]], UserProfile]
user_info: InfoSuccess | None = None
@dataclass
class GameData:
num: int
pps: float
lpm: float
apm: float
adpm: float
apl: float
adpl: float
vs: float
def identify_user_info(info: str) -> User | MessageFormatError:
if (
match(
r'^(?!\.)(?!com[0-9]$)(?!con$)(?!lpt[0-9]$)(?!nul$)(?!prn$)[^\-][^\+][^\|\*\?\\\s\!:<>/$"]*[^\.\|\*\?\\\s\!:<>/$"]+$',
info,
)
and info.isdigit() is False
and 2 <= len(info) <= 18 # noqa: PLR2004
):
return User(name=info)
if info.isdigit():
return User(teaid=info)
return MessageFormatError('用户名/QQ号不合法')
class Processor(ProcessorMeta):
user: User
raw_response: RawResponse
processed_data: ProcessedData
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
super().__init__(event_id, user, command_args)
self.raw_response = RawResponse(user_profile={})
self.processed_data = ProcessedData(user_profile={})
@property
def game_platform(self) -> GameType:
return GAME_TYPE
async def handle_bind(self, platform: str, account: str) -> str:
"""处理绑定消息"""
self.command_type = 'bind'
await self.get_user()
if self.user.name is None:
raise # FIXME: 不知道怎么才能把这类型给变过来了
async with get_session() as session:
return await create_or_update_bind(
session=session,
chat_platform=platform,
chat_account=account,
game_platform=GAME_TYPE,
game_account=self.user.name,
)
async def handle_query(self) -> str:
"""处理查询消息"""
self.command_type = 'query'
await self.get_user()
return await self.generate_message()
async def get_user(self) -> None:
"""
用于获取 UserName 和 UserID 的函数
"""
if self.user.name is None:
self.user.name = (await self.get_user_info()).data.name
if self.user.teaid is None:
self.user.teaid = (await self.get_user_info()).data.teaid
async def get_user_info(self) -> InfoSuccess:
"""获取用户信息"""
if self.processed_data.user_info is None:
if self.user.teaid is not None:
url = splice_url(
[
BASE_URL,
'getTeaIdInfo',
f'?{urlencode({"teaId":self.user.teaid})}',
]
)
else:
url = splice_url(
[
BASE_URL,
'getUsernameInfo',
f'?{urlencode({"username":self.user.name})}',
]
)
self.raw_response.user_info = await Request.request(url)
user_info: UserInfo = parse_raw_as(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
if not isinstance(user_info, InfoSuccess):
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
self.processed_data.user_info = user_info
return self.processed_data.user_info
async def get_user_profile(self, other_parameter: dict[str, Any] | None = None) -> UserProfile:
"""获取用户数据"""
if other_parameter is None:
other_parameter = {}
fset = frozenset(other_parameter.items())
if self.processed_data.user_profile.get(fset) is None:
self.raw_response.user_profile[fset] = await Request.request(
splice_url(
[
BASE_URL,
'getProfile',
f'?{urlencode({"id":self.user.teaid or self.user.name},**other_parameter)}',
]
)
)
self.processed_data.user_profile[fset] = UserProfile.parse_raw(self.raw_response.user_profile[fset])
return self.processed_data.user_profile[fset]
async def get_game_data(self) -> GameData | None:
"""获取游戏数据"""
user_profile = await self.get_user_profile()
if user_profile.data == []:
return None
weighted_total_lpm = weighted_total_apm = weighted_total_adpm = 0.0
total_time = 0.0
num = 0
for i in user_profile.data:
# 排除单人局和时间为0的游戏
# 茶: 不计算没挖掘的局, 即使apm和lpm也如此
if i.num_players == 1 or i.time == 0 or i.dig is None:
continue
# 加权计算
time = i.time / 1000
lpm = 24 * (i.pieces / time)
apm = (i.attack / time) * 60
adpm = ((i.attack + i.dig) / time) * 60
weighted_total_lpm += lpm * time
weighted_total_apm += apm * time
weighted_total_adpm += adpm * time
total_time += time
num += 1
if num == 50: # noqa: PLR2004 # TODO: 将查询局数作为可选命令参数
break
if num == 0:
return None
# TODO: 如果有效局数不满50, 没有无dig信息的局, 且userData['data']内有50个局, 则继续往前获取信息
lpm = weighted_total_lpm / total_time
apm = weighted_total_apm / total_time
adpm = weighted_total_adpm / total_time
return GameData(
num=num,
pps=round(lpm / 24, 2),
lpm=round(lpm, 2),
apm=round(apm, 2),
adpm=round(adpm, 2),
apl=round((apm / lpm), 2),
adpl=round((adpm / lpm), 2),
vs=round((adpm / 60 * 100), 2),
)
async def generate_message(self) -> str:
"""生成消息"""
user_info = (await self.get_user_info()).data
message = f'用户 {user_info.name} ({user_info.teaid}) '
if user_info.ranked_games == '0':
message += '暂无段位统计数据'
else:
message += f', 段位分 {round(float(user_info.rating_now),2)}±{round(float(user_info.rd_now),2)} ({round(float(user_info.vol_now),2)}) '
game_data = await self.get_game_data()
if game_data is None:
message += ', 暂无游戏数据'
else:
message += f', 最近 {game_data.num} 局数据'
message += f"\nL'PM: {game_data.lpm} ( {game_data.pps} pps )"
message += f'\nAPM: {game_data.apm} ( x{game_data.apl} )'
message += f'\nADPM: {game_data.adpm} ( x{game_data.adpl} ) ( {game_data.vs}vs )'
message += (
f'\n40L: {float(user_info.pb_sprint)/1000:.2f}s'
if user_info.pb_sprint != 2147483647 # noqa: PLR2004
else ''
)
message += f'\nMarathon: {user_info.pb_marathon}' if user_info.pb_marathon != 0 else ''
message += f'\nChallenge: {user_info.pb_challenge}' if user_info.pb_challenge != 0 else ''
return message

View File

@@ -0,0 +1,86 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
class SuccessModel(BaseModel):
class Data(BaseModel):
class PeriodMatch(BaseModel):
name: str
teaid: str = Field(..., alias='teaId')
rating: str
rd: str
start_time: datetime = Field(..., alias='startTime')
end_time: datetime = Field(..., alias='endTime')
win: str
lose: str
score: str
class UserDataTotalItem(BaseModel):
time_map: str = Field(..., alias='timeMap')
pieces_map: str = Field(..., alias='piecesMap')
clear_lines_map: str = Field(..., alias='clearLinesMap')
attacks_map: str = Field(..., alias='attacksMap')
dig_map: str = Field(..., alias='digMap')
send_map: str = Field(..., alias='sendMap')
rise_map: str = Field(..., alias='riseMap')
offset_map: str = Field(..., alias='offsetMap')
receive_map: str = Field(..., alias='receiveMap')
games_map: str = Field(..., alias='gamesMap')
tetris_map: str = Field(..., alias='tetrisMap')
combo_map: str = Field(..., alias='comboMap')
tspin_map: str = Field(..., alias='tspinMap')
b2b_map: str = Field(..., alias='b2bMap')
perfect_clear_map: str = Field(..., alias='perfectClearMap')
time_no_map: str = Field(..., alias='timeNoMap')
pieces_no_map: str = Field(..., alias='piecesNoMap')
clear_lines_no_map: str = Field(..., alias='clearLinesNoMap')
attacks_no_map: str = Field(..., alias='attacksNoMap')
dig_no_map: str = Field(..., alias='digNoMap')
send_no_map: str = Field(..., alias='sendNoMap')
rise_no_map: str = Field(..., alias='riseNoMap')
offset_no_map: str = Field(..., alias='offsetNoMap')
receive_no_map: str = Field(..., alias='receiveNoMap')
games_no_map: str = Field(..., alias='gamesNoMap')
tetris_no_map: str = Field(..., alias='tetrisNoMap')
combo_no_map: str = Field(..., alias='comboNoMap')
tspin_no_map: str = Field(..., alias='tspinNoMap')
b2b_no_map: str = Field(..., alias='b2bNoMap')
perfect_clear_no_map: str = Field(..., alias='perfectClearNoMap')
teaid: str = Field(..., alias='teaId')
name: str
total_exp: str = Field(..., alias='totalExp')
ranking: str
ranked_games: str = Field(..., alias='rankedGames')
rating_now: str = Field(..., alias='ratingNow')
rd_now: str = Field(..., alias='rdNow')
vol_now: str = Field(..., alias='volNow')
rating_last: str = Field(..., alias='ratingLast')
rd_last: str = Field(..., alias='rdLast')
vol_last: str = Field(..., alias='volLast')
period_matches: list[PeriodMatch] = Field(..., alias='periodMatches')
user_data_total: list[UserDataTotalItem] = Field(..., alias='userDataTotal')
ranking_items: str = Field(..., alias='rankingItems')
ranking_game_items: str = Field(..., alias='rankingGameItems')
training_level: str = Field(..., alias='trainingLevel')
training_wins: str = Field(..., alias='trainingWins')
pb_sprint: str = Field(..., alias='PBSprint')
pb_marathon: str = Field(..., alias='PBMarathon')
pb_challenge: str = Field(..., alias='PBChallenge')
register_date: datetime = Field(..., alias='registerDate')
last_login_date: datetime = Field(..., alias='lastLoginDate')
code: int
success: Literal[True]
data: Data
class FailedModel(BaseModel):
code: int
success: Literal[False]
error: str
UserInfo = SuccessModel | FailedModel

View File

@@ -0,0 +1,33 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel
class UserProfile(BaseModel):
class Data(BaseModel):
idmultiplayergameresult: int
iduser: str
teaid: str
time: int
clear_lines: int
attack: int
send: int
offset: int
receive: int
rise: int
dig: int
pieces: int
max_combo: int
pc_count: int
place: int
num_players: int
fumen_code: Literal['0', '1'] # wtf
rule_set: str
garbage: str
idmultiplayergame: int
datetime: datetime
code: int
success: bool
data: list[Data]

View File

@@ -1,56 +0,0 @@
from collections.abc import Callable
from nonebot.adapters import Bot
from nonebot.matcher import Matcher
from nonebot.message import run_postprocessor
from nonebot.typing import T_Handler
from nonebot_plugin_alconna import AlcMatches, Alconna, At, CommandMeta, on_alconna
from .. import ns
from ..i18n.model import Lang
from ..utils.exception import MessageFormatError, NeedCatchError
command: Alconna = Alconna(
['tetris-stats', 'tstats'],
namespace=ns,
meta=CommandMeta(
description='俄罗斯方块相关游戏数据查询',
fuzzy_match=True,
),
)
alc = on_alconna(
command=command,
skip_for_unmatch=False,
auto_send_output=True,
use_origin=True,
)
def add_block_handlers(handler: Callable[[T_Handler], T_Handler]) -> None:
@handler
async def _(bot: Bot, matcher: Matcher, target: At):
if isinstance(target, At) and target.target == bot.self_id:
await matcher.finish(Lang.interaction.wrong.query_bot())
from . import tetrio, top, tos # noqa: F401, E402
@alc.handle()
async def _(matcher: Matcher, account: MessageFormatError):
await matcher.finish(str(account))
@alc.handle()
async def _(matcher: Matcher, matches: AlcMatches):
if (matches.head_matched and matches.options != {}) or matches.main_args == {}:
await matcher.finish(
(f'{matches.error_info!r}\n' if matches.error_info is not None else '')
+ f'输入"{matches.header_result} --help"查看帮助'
)
@run_postprocessor
async def _(matcher: Matcher, exception: NeedCatchError):
await matcher.send(str(exception))

View File

@@ -1,26 +0,0 @@
from abc import ABC, abstractmethod
from typing import Generic, TypeVar
from pydantic import BaseModel
from ..utils.typedefs import GameType
T = TypeVar('T', bound=GameType)
class BaseUser(BaseModel, ABC, Generic[T]):
"""游戏用户"""
platform: T
def __eq__(self, other: object) -> bool:
if isinstance(other, BaseUser):
return self.unique_identifier == other.unique_identifier
return False
@property
@abstractmethod
def unique_identifier(self) -> str:
raise NotImplementedError
__hash__ = BaseModel.__hash__

View File

@@ -1,39 +0,0 @@
from nonebot_plugin_alconna import Subcommand
from ...utils.exception import MessageFormatError
from .. import alc
from .. import command as main_command
from .api import Player
from .constant import USER_ID, USER_NAME
def get_player(user_id_or_name: str) -> Player | MessageFormatError:
if USER_ID.match(user_id_or_name):
return Player(user_id=user_id_or_name, trust=True)
if USER_NAME.match(user_id_or_name):
return Player(user_name=user_id_or_name, trust=True)
return MessageFormatError('用户名/ID不合法')
command = Subcommand(
'TETR.IO',
alias=['TETRIO', 'tetr.io', 'tetrio', 'io'],
dest='TETRIO',
help_text='TETR.IO 游戏相关指令',
)
from . import bind, config, list, query, rank, record, unbind # noqa: A004, E402
main_command.add(command)
__all__ = [
'alc',
'bind',
'config',
'list',
'query',
'rank',
'record',
'unbind',
]

View File

@@ -1,5 +0,0 @@
from .player import Player
from .schemas.user import User
from .schemas.user_info import UserInfoSuccess
__all__ = ['Player', 'User', 'UserInfoSuccess']

View File

@@ -1,42 +0,0 @@
from asyncio import Lock
from datetime import datetime, timedelta, timezone
from typing import ClassVar
from weakref import WeakValueDictionary
from aiocache import Cache as ACache # type: ignore[import-untyped]
from nonebot.compat import type_validate_json
from nonebot.log import logger
from yarl import URL
from ....config.config import config
from ....utils.limit import limit
from ....utils.request import Request
from .schemas.base import FailedModel, SuccessModel
UTC = timezone.utc
request = Request(config.tetris.proxy.tetrio or config.tetris.proxy.main)
request.request = limit(timedelta(seconds=1))(request.request) # type: ignore[method-assign]
class Cache:
cache = ACache(ACache.MEMORY)
task: ClassVar[WeakValueDictionary[URL, Lock]] = WeakValueDictionary()
@classmethod
async def get(cls, url: URL, extra_headers: dict | None = None) -> bytes:
lock = cls.task.setdefault(url, Lock())
async with lock:
if (cached_data := await cls.cache.get(url)) is not None:
logger.debug(f'{url}: Cache hit!')
return cached_data
response_data = await request.request(url, extra_headers, enable_anti_cloudflare=True)
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
if isinstance(parsed_data, SuccessModel):
await cls.cache.add(
url,
response_data,
(parsed_data.cache.cached_until - datetime.now(UTC)).total_seconds(),
)
return response_data

View File

@@ -1,96 +0,0 @@
from typing import Literal, overload
from uuid import UUID
from nonebot import __version__ as __nonebot_version__
from nonebot.compat import type_validate_json
from yarl import URL
from ....utils.exception import RequestError
from ....version import __version__
from ..constant import BASE_URL
from .cache import Cache
from .schemas.base import FailedModel
from .schemas.leaderboards import Parameter
from .schemas.leaderboards.by import By, BySuccessModel
from .schemas.leaderboards.solo import Solo, SoloSuccessModel
from .schemas.leaderboards.zenith import Zenith, ZenithSuccessModel
async def by(
by_type: Literal['league', 'xp', 'ar'], parameter: Parameter, x_session_id: UUID | None = None
) -> BySuccessModel:
model: By = type_validate_json(
By, # type: ignore[arg-type]
await get(
BASE_URL / f'users/by/{by_type}',
parameter,
{
'X-Session-ID': str(x_session_id),
'User-Agent': f'nonebot-plugin-tetris-stats/{__version__} (Windows NT 10.0; Win64; x64) NoneBot2/{__nonebot_version__}',
}
if x_session_id is not None
else None,
),
)
if isinstance(model, FailedModel):
msg = f'排行榜信息请求错误:\n{model.error}'
raise RequestError(msg)
return model
@overload
async def records(
records_type: Literal['40l', 'blitz'],
scope: str = '_global',
revolution_id: str | None = None,
*,
parameter: Parameter,
) -> SoloSuccessModel: ...
@overload
async def records(
records_type: Literal['zenith', 'zenithex'],
scope: str = '_global',
revolution_id: str | None = None,
*,
parameter: Parameter,
) -> ZenithSuccessModel: ...
async def records(
records_type: Literal['40l', 'blitz', 'zenith', 'zenithex'],
scope: str = '_global',
revolution_id: str | None = None,
*,
parameter: Parameter,
) -> SoloSuccessModel | ZenithSuccessModel:
model: Solo | Zenith
match records_type:
case '40l' | 'blitz':
model = type_validate_json(
Solo, # type: ignore[arg-type]
await get(
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
parameter,
),
)
case 'zenith' | 'zenithex':
model = type_validate_json(
Zenith, # type: ignore[arg-type]
await get(
BASE_URL / 'records' / f'{records_type}{scope}{revolution_id if revolution_id is not None else ""}',
parameter,
),
)
case _:
msg = f'records_type: {records_type} is not supported'
raise ValueError(msg)
if isinstance(model, FailedModel):
msg = f'排行榜信息请求错误:\n{model.error}' # type: ignore[attr-defined]
raise RequestError(msg)
return model
async def get(url: URL, parameter: Parameter, extra_headers: dict | None = None) -> bytes:
return await Cache.get(url % parameter.to_params(), extra_headers)

View File

@@ -1,20 +0,0 @@
from datetime import datetime
from typing import Literal
from nonebot_plugin_orm import Model
from sqlalchemy import DateTime, String
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
from ....db.models import PydanticType
from .schemas.base import SuccessModel
from .typedefs import Records, Summaries
class TETRIOHistoricalData(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_hist_data'
id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
api_type: Mapped[Literal['User Info', Records, Summaries]] = mapped_column(String(32), index=True)
data: Mapped[SuccessModel] = mapped_column(PydanticType(get_model=[SuccessModel.__subclasses__], models=set()))
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)

View File

@@ -1,243 +0,0 @@
from enum import Enum
from types import MappingProxyType
from typing import Literal, NamedTuple, cast, overload
from async_lru import alru_cache
from nonebot.compat import type_validate_json
from ....db import anti_duplicate_add
from ....utils.exception import RequestError
from ..constant import BASE_URL, USER_ID, USER_NAME
from .cache import Cache
from .models import TETRIOHistoricalData
from .schemas.base import FailedModel
from .schemas.labs.leagueflow import LeagueFlow, LeagueFlowSuccess
from .schemas.records.solo import Solo as SoloRecord
from .schemas.records.solo import SoloSuccessModel as RecordsSoloSuccessModel
from .schemas.summaries import (
AchievementsSuccessModel,
SummariesModel,
ZenithSuccessModel,
ZenSuccessModel,
)
from .schemas.summaries import (
SoloSuccessModel as SummariesSoloSuccessModel,
)
from .schemas.summaries.base import User as SummariesUser
from .schemas.summaries.league import LeagueSuccessModel
from .schemas.user import User
from .schemas.user_info import UserInfo, UserInfoSuccess
from .typedefs import Records, Summaries
class RecordModeType(str, Enum):
Sprint = '40l'
Blitz = 'blitz'
class RecordType(str, Enum):
Top = 'top'
Recent = 'recent'
Progression = 'progression'
class RecordKey(NamedTuple):
mode_type: RecordModeType
record_type: RecordType
def to_records(self) -> Records:
return cast('Records', f'{self.mode_type.value}_{self.record_type.value}')
class Player:
__SUMMARIES_MAPPING: MappingProxyType[Summaries, type[SummariesModel]] = MappingProxyType(
{
'40l': SummariesSoloSuccessModel,
'blitz': SummariesSoloSuccessModel,
'zenith': ZenithSuccessModel,
'zenithex': ZenithSuccessModel,
'league': LeagueSuccessModel,
'zen': ZenSuccessModel,
'achievements': AchievementsSuccessModel,
}
)
@overload
def __init__(self, *, user_id: str, trust: bool = False): ...
@overload
def __init__(self, *, user_name: str, trust: bool = False): ...
def __init__(self, *, user_id: str | None = None, user_name: str | None = None, trust: bool = False):
self.user_id = user_id
self.user_name = user_name
if not trust:
if self.user_id is not None:
if not USER_ID.match(self.user_id):
msg = 'Invalid user id'
raise ValueError(msg)
elif self.user_name is not None:
if not USER_NAME.match(self.user_name):
msg = 'Invalid user name'
raise ValueError(msg)
else:
msg = 'Invalid user'
raise ValueError(msg)
self.__user: User | None = None
self._user_info: UserInfoSuccess | None = None
self._summaries: dict[Summaries, SummariesModel] = {}
self._records: dict[RecordKey, RecordsSoloSuccessModel] = {}
self._leagueflow: LeagueFlowSuccess | None = None
@property
def _request_user_parameter(self) -> str:
return self.user_id or cast('str', self.user_name).lower()
@property
async def user(self) -> User:
if self.__user is not None:
return self.__user
if (user := (await self._get_local_summaries_user())) is not None:
self.__user = User(
ID=user.id,
name=user.username,
)
else:
user_info = await self.get_info()
self.__user = User(
ID=user_info.data.id,
name=user_info.data.username,
)
self.user_id = self.__user.ID
self.user_name = self.__user.name
return self.__user
async def get_info(self) -> UserInfoSuccess:
"""Get User Info"""
if self._user_info is None:
raw_user_info = await Cache.get(BASE_URL / 'users' / self._request_user_parameter)
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
if isinstance(user_info, FailedModel):
msg = f'用户信息请求错误:\n{user_info.error}'
raise RequestError(msg)
self._user_info = user_info
await anti_duplicate_add(
TETRIOHistoricalData(
user_unique_identifier=(await self.user).unique_identifier,
api_type='User Info',
data=user_info,
update_time=user_info.cache.cached_at,
),
)
return self._user_info
@overload
async def get_summaries(self, summaries_type: Literal['40l', 'blitz']) -> SummariesSoloSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['zenith', 'zenithex']) -> ZenithSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['zen']) -> ZenSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['league']) -> LeagueSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['achievements']) -> AchievementsSuccessModel: ...
async def get_summaries(self, summaries_type: Summaries) -> SummariesModel:
if summaries_type not in self._summaries:
raw_summaries = await Cache.get(
BASE_URL / 'users' / self._request_user_parameter / 'summaries' / summaries_type
)
summaries: SummariesModel | FailedModel = type_validate_json(
self.__SUMMARIES_MAPPING[summaries_type] | FailedModel, # type: ignore[arg-type]
raw_summaries,
)
if isinstance(summaries, FailedModel):
msg = f'用户Summaries数据请求错误:\n{summaries.error}'
raise RequestError(msg)
self._summaries[summaries_type] = summaries
await anti_duplicate_add(
TETRIOHistoricalData(
user_unique_identifier=(await self.user).unique_identifier,
api_type=summaries_type,
data=summaries,
update_time=summaries.cache.cached_at,
),
)
return self._summaries[summaries_type]
async def get_leagueflow(self) -> LeagueFlowSuccess:
if self._leagueflow is None:
leagueflow: LeagueFlow = type_validate_json(
LeagueFlow, # type: ignore[arg-type]
await Cache.get(BASE_URL / 'labs/leagueflow' / self._request_user_parameter),
)
if isinstance(leagueflow, FailedModel):
msg = f'League 历史记录请求错误:\n{leagueflow.error}'
raise RequestError(msg)
self._leagueflow = leagueflow
return self._leagueflow
@property
async def sprint(self) -> SummariesSoloSuccessModel:
return await self.get_summaries('40l')
@property
async def blitz(self) -> SummariesSoloSuccessModel:
return await self.get_summaries('blitz')
@property
async def zen(self) -> ZenSuccessModel:
return await self.get_summaries('zen')
@property
async def league(self) -> LeagueSuccessModel:
return await self.get_summaries('league')
async def _get_local_summaries_user(self) -> SummariesUser | None:
allow_summaries: set[Literal['40l', 'blitz', 'zenith', 'zenithex']] = {
'40l',
'blitz',
'zenith',
'zenithex',
}
if has_summaries := (allow_summaries & self._summaries.keys()):
for i in has_summaries:
if (record := (await self.get_summaries(i)).data.record) is not None:
return record.user
return None
@property
@alru_cache
async def avatar_revision(self) -> int | None:
if self._user_info is not None:
return self._user_info.data.avatar_revision
if (user := (await self._get_local_summaries_user())) is not None:
return user.avatar_revision
return (await self.get_info()).data.avatar_revision
@property
@alru_cache
async def banner_revision(self) -> int | None:
if self._user_info is not None:
return self._user_info.data.banner_revision
if (user := (await self._get_local_summaries_user())) is not None:
return user.banner_revision
return (await self.get_info()).data.banner_revision
async def get_records(self, mode_type: RecordModeType, records_type: RecordType) -> RecordsSoloSuccessModel:
if (record_key := RecordKey(mode_type, records_type)) not in self._records:
raw_records = await Cache.get(
BASE_URL / 'users' / self._request_user_parameter / 'records' / mode_type / records_type,
)
records: RecordsSoloSuccessModel | FailedModel = type_validate_json(SoloRecord, raw_records) # type: ignore[arg-type]
if isinstance(records, FailedModel):
msg = f'用户Summaries数据请求错误:\n{records.error}'
raise RequestError(msg)
self._records[record_key] = records
await anti_duplicate_add(
TETRIOHistoricalData(
user_unique_identifier=(await self.user).unique_identifier,
api_type=record_key.to_records(),
data=records,
update_time=records.cache.cached_at,
),
)
return self._records[record_key]

View File

@@ -1,83 +0,0 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from ...typedefs import Prisecter
class AggregateStats(BaseModel):
apm: float
pps: float
vsscore: float
class Finesse(BaseModel):
combo: int
faults: int
perfectpieces: int
class Clears(BaseModel):
singles: int
doubles: int
triples: int
quads: int
realtspins: int
minitspins: int
minitspinsingles: int
tspinsingles: int
minitspindoubles: int
tspindoubles: int
tspintriples: int
tspinquads: int
allclear: int
class Garbage(BaseModel):
sent: int
received: int
attack: int | None
cleared: int
class P(BaseModel):
pri: float
sec: float
ter: float
def to_prisecter(self) -> Prisecter:
return Prisecter(f'{self.pri}:{self.sec}:{self.ter}')
# fmt: off
class ArCounts(BaseModel):
bronze: int | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
silver: int | None = Field(default=None, alias='2') # pyright: ignore [reportGeneralTypeIssues]
gold: int | None = Field(default=None, alias='3') # pyright: ignore [reportGeneralTypeIssues]
platinum: int | None = Field(default=None, alias='4') # pyright: ignore [reportGeneralTypeIssues]
diamond: int | None = Field(default=None, alias='5') # pyright: ignore [reportGeneralTypeIssues]
issued: int | None = Field(default=None, alias='100') # pyright: ignore [reportGeneralTypeIssues]
top3: int | None = Field(default=None, alias='t3')
top5: int | None = Field(default=None, alias='t5')
top10: int | None = Field(default=None, alias='t10')
top25: int | None = Field(default=None, alias='t25')
top50: int | None = Field(default=None, alias='t50')
top100: int | None = Field(default=None, alias='t100')
# fmt: on
class Cache(BaseModel):
status: str
cached_at: datetime
cached_until: datetime
class SuccessModel(BaseModel):
success: Literal[True]
cache: Cache
class FailedModel(BaseModel):
success: Literal[False]
error: str

View File

@@ -1,65 +0,0 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from ..base import P
from . import AggregateStats, Clears, Finesse, Garbage
class Time(BaseModel):
start: int
zero: bool
locked: bool
prev: int
frameoffset: int | None = None
class Stats(BaseModel):
seed: float | None = None # ?: 不知道是之后都没有了还是还会有
lines: int
level_lines: int
level_lines_needed: int
inputs: int
holds: int = 0
time: Time | None = None # ?: 不知道是之后都没有了还是还会有
score: int
zenlevel: int | None = None
zenprogress: int | None = None
level: int
combo: int
currentcombopower: int | None = None
topcombo: int
btb: int
topbtb: int
currentbtbchainpower: int | None = None
tspins: int
piecesplaced: int
clears: Clears
garbage: Garbage
kills: int
finesse: Finesse
finaltime: float
class Results(BaseModel):
aggregatestats: AggregateStats
stats: Stats
gameoverreason: str
class Record(BaseModel):
id: str = Field(..., alias='_id')
replayid: str
stub: bool
gamemode: Literal['40l', 'blitz']
pb: bool
oncepb: bool
ts: datetime
revolution: None
otherusers: list
leaderboards: list[str]
results: Results
extras: dict
disputed: bool
p: P

View File

@@ -1,43 +0,0 @@
from datetime import datetime
from enum import IntEnum
from typing import Literal, NamedTuple
from pydantic import BaseModel, Field
from ..base import FailedModel
from ..base import SuccessModel as BaseSuccessModel
class Result(IntEnum):
VICTORY = 1
DEFEAT = 2
VICTORY_BY_DISQUALIFICATION = 3
DEFEAT_BY_DISQUALIFICATION = 4
TIE = 5
NO_CONTEST = 6
MATCH_NULLIFIED = 7
class Point(NamedTuple):
timestamp_offset: int
result: Result
post_match_tr: int
opponent_pre_match_tr: int
"""If the opponent was unranked, same as post_match_tr."""
class Data(BaseModel):
start_time: datetime = Field(..., alias='startTime')
points: list[Point] = Field(..., min_length=1)
class Empty(BaseModel):
start_time: Literal[9007199254740991] = Field(..., alias='startTime')
points: list = Field(..., max_length=0)
class LeagueFlowSuccess(BaseSuccessModel):
data: Data | Empty
LeagueFlow = LeagueFlowSuccess | FailedModel

View File

@@ -1,18 +0,0 @@
from typing import Any
from nonebot.compat import PYDANTIC_V2
from pydantic import BaseModel, Field
from ...typedefs import Prisecter
class Parameter(BaseModel):
after: Prisecter | None = None
before: Prisecter | None = None
limit: int = Field(default=25, ge=1, le=100)
country: str | None = None
def to_params(self) -> dict[str, Any]:
if PYDANTIC_V2:
return self.model_dump(exclude_defaults=True)
return self.dict(exclude_defaults=True)

View File

@@ -1,66 +0,0 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from ...typedefs import Rank, ValidRank
from ..base import ArCounts, FailedModel, P, SuccessModel
class BaseLeague(BaseModel):
gamesplayed: int
gameswon: int
tr: float
gxe: float
rank: Rank
bestrank: ValidRank
glicko: float
rd: float
decaying: bool
class InvalidLeague(BaseLeague):
pps: float | None
apm: None
vs: None
class League(BaseLeague):
pps: float
apm: float
vs: float
class BaseEntry(BaseModel):
id: str = Field(..., alias='_id')
username: str
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop']
ts: datetime | None = None
xp: float
country: str | None = None
supporter: bool | None = None
gamesplayed: int
gameswon: int
gametime: float
ar: int
ar_counts: ArCounts
p: P
class InvalidEntry(BaseEntry):
league: InvalidLeague
class Entry(BaseEntry):
league: League
class Data(BaseModel):
entries: list[Entry | InvalidEntry]
class BySuccessModel(SuccessModel):
data: Data
By = BySuccessModel | FailedModel

View File

@@ -1,15 +0,0 @@
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
from ..summaries.solo import Record
class Data(BaseModel):
entries: list[Record]
class SoloSuccessModel(SuccessModel):
data: Data
Solo = SoloSuccessModel | FailedModel

View File

@@ -1,15 +0,0 @@
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
from ..summaries.zenith import Record
class Data(BaseModel):
entries: list[Record]
class ZenithSuccessModel(SuccessModel):
data: Data
Zenith = ZenithSuccessModel | FailedModel

View File

@@ -1,17 +0,0 @@
from typing import TypeAlias
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
from ..base.solo import Record
class Data(BaseModel):
entries: list[Record]
class SoloSuccessModel(SuccessModel):
data: Data
Solo: TypeAlias = SoloSuccessModel | FailedModel

View File

@@ -1,21 +0,0 @@
from .achievements import Achievements, AchievementsSuccessModel
from .league import LeagueSuccessModel
from .solo import Solo, SoloSuccessModel
from .zen import Zen, ZenSuccessModel
from .zenith import Zenith, ZenithEx, ZenithSuccessModel
SummariesModel = AchievementsSuccessModel | SoloSuccessModel | ZenSuccessModel | LeagueSuccessModel | ZenithSuccessModel
__all__ = [
'Achievements',
'AchievementsSuccessModel',
'LeagueSuccessModel',
'Solo',
'SoloSuccessModel',
'SummariesModel',
'Zen',
'ZenSuccessModel',
'Zenith',
'ZenithEx',
'ZenithSuccessModel',
]

View File

@@ -1,97 +0,0 @@
from datetime import datetime
from enum import IntEnum
from typing import Literal, TypeAlias
from pydantic import BaseModel, Field
from ..base import FailedModel, SuccessModel
class RankType(IntEnum):
PERCENTILE = 1
ISSUE = 2
ZENITH = 3
PERCENTILELAX = 4
PERCENTILEVLAX = 5
PERCENTILEMLAX = 6
class ValueType(IntEnum):
NONE = 0
NUMBER = 1
TIME = 2
TIME_INV = 3
FLOOR = 4
ISSUE = 5
NUMBER_INV = 6
class ArType(IntEnum):
UNRANKED = 0
RANKED = 1
COMPETITIVE = 2
class Rank(IntEnum):
NONE = 0
BRONZE = 1
SILVER = 2
GOLD = 3
PLATINUM = 4
DIAMOND = 5
ISSUED = 100
class Ally(BaseModel):
id: str = Field(alias='_id')
username: str
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'hidden', 'banned']
country: str | None = None
supporter: bool
avatar_revision: int | None = None
class X(BaseModel):
ally: Ally | None = None
class Achievement(BaseModel):
# 这**都是些啥
achievement_id: int = Field(alias='k')
category: str
primary_name: str = Field(alias='name')
objective: str = Field(alias='object')
flavor_text: str = Field(alias='desc')
order: int = Field(alias='o')
rank_type: RankType = Field(alias='rt')
value_type: ValueType = Field(alias='vt')
ar_type: ArType = Field(alias='art')
min: int
deci: int
hidden: bool
nolb: bool
event: str | None = None
event_past: bool | None = None
disabled: bool | None = None
pair: bool | None = None
achieved_score: float | None = Field(None, alias='v')
a: float | None = None
t: datetime | None = None
pos: int | None = None
total: int | None = None
rank: Rank | None = None
x: X | None = None
n: str
tiebreak: int
notifypb: bool
id: str | None = Field(None, alias='_id')
progress: float | None = None
stub: bool | None = None
class AchievementsSuccessModel(SuccessModel):
data: list[Achievement]
Achievements: TypeAlias = AchievementsSuccessModel | FailedModel

View File

@@ -1,10 +0,0 @@
from pydantic import BaseModel
class User(BaseModel):
id: str
username: str
avatar_revision: int | None
banner_revision: int | None
country: str | None
supporter: int

View File

@@ -1,130 +0,0 @@
from typing import Literal
from nonebot.compat import PYDANTIC_V2
from pydantic import BaseModel, Field
from ...typedefs import Rank, S1Rank, S1ValidRank
from ..base import SuccessModel
if PYDANTIC_V2:
from pydantic import field_validator
else:
from pydantic import validator
class PastInner(BaseModel):
season: str
username: str
country: str | None = None
placement: int | None = None
gamesplayed: int
gameswon: int
glicko: float
gxe: float
tr: float
rd: float
rank: S1Rank
bestrank: S1ValidRank
ranked: bool
apm: float
pps: float
vs: float
class Past(BaseModel):
first: PastInner | None = Field(default=None, alias='1') # pyright: ignore [reportGeneralTypeIssues]
class BaseData(BaseModel):
decaying: bool
past: Past
class NeverPlayedData(BaseData):
gamesplayed: Literal[0]
gameswon: Literal[0]
glicko: Literal[-1]
rd: Literal[-1]
gxe: Literal[-1]
tr: Literal[-1]
rank: Literal['z']
apm: None = None
pps: None = None
vs: None = None
standing: Literal[-1]
standing_local: Literal[-1]
prev_rank: None
prev_at: Literal[-1]
next_rank: None
next_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
class NeverRatedData(BaseData):
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
gameswon: int
glicko: Literal[-1]
rd: Literal[-1]
gxe: Literal[-1]
tr: Literal[-1]
apm: float
pps: float
vs: float
rank: Literal['z']
standing: Literal[-1]
standing_local: Literal[-1]
prev_rank: None
prev_at: Literal[-1]
next_rank: None
next_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
if PYDANTIC_V2:
@field_validator('apm', 'pps', 'vs', mode='before')
@classmethod
def _(cls, value: float | None) -> float:
if value is None:
return 0
return value
else:
@validator('apm', 'pps', 'vs', pre=True, always=True)
@classmethod
def _(cls, value: float | None) -> float:
if value is None:
return 0
return value
class RatedData(BaseData):
gamesplayed: int
gameswon: int
glicko: float
rd: float
gxe: float
tr: float
rank: Rank
bestrank: Rank
standing: int
apm: float
pps: float
vs: float
standing_local: int
prev_rank: Rank | None = None
prev_at: int
next_rank: Rank | None = None
next_at: int
percentile: float
percentile_rank: str
class InvalidData(BaseModel):
"""I don't know what osk is doing, but the return value is an empty dictionary"""
class LeagueSuccessModel(SuccessModel):
data: NeverPlayedData | NeverRatedData | RatedData | InvalidData

View File

@@ -1,24 +0,0 @@
from typing import TypeAlias
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
from ..base.solo import Record as BaseRecord
from .base import User
class Record(BaseRecord):
user: User
class Data(BaseModel):
record: Record | None
rank: int
rank_local: int
class SoloSuccessModel(SuccessModel):
data: Data
Solo: TypeAlias = SoloSuccessModel | FailedModel

View File

@@ -1,17 +0,0 @@
from typing import TypeAlias
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
class Data(BaseModel):
level: int
score: int
class ZenSuccessModel(SuccessModel):
data: Data
Zen: TypeAlias = ZenSuccessModel | FailedModel

View File

@@ -1,116 +0,0 @@
from datetime import datetime
from typing import Literal, TypeAlias
from pydantic import BaseModel, Field
from ..base import AggregateStats, FailedModel, Finesse, P, SuccessModel
from ..base import Clears as BaseClears
from ..base import Garbage as BaseGarbage
from .base import User
class Clears(BaseClears):
pentas: int
minitspintriples: int
minitspinquads: int
tspinpentas: int
class Garbage(BaseGarbage):
sent_nomult: int
maxspike: int
maxspike_nomult: int
class _Zenith(BaseModel):
altitude: float
rank: float
peakrank: float
avgrankpts: float
floor: int
targetingfactor: float
targetinggrace: float
totalbonus: float
revives: int
revives_total: int = Field(..., alias='revivesTotal')
speedrun: bool
speedrun_seen: bool
splits: list[int]
class Stats(BaseModel):
lines: int
level_lines: int
level_lines_needed: int
inputs: int
holds: int
score: int
zenlevel: int
zenprogress: int
level: int
combo: int
topcombo: int
combopower: int
btb: int
topbtb: int
btbpower: int
tspins: int
piecesplaced: int
clears: Clears
garbage: Garbage
kills: int
finesse: Finesse
zenith: _Zenith
finaltime: float
class Results(BaseModel):
aggregatestats: AggregateStats
stats: Stats
gameoverreason: str
class ExtrasZenith(BaseModel):
mods: list[str]
class Extras(BaseModel):
zenith: ExtrasZenith
class Record(BaseModel):
id: str = Field(..., alias='_id')
replayid: str
stub: bool
gamemode: Literal['zenith', 'zenithex']
pb: bool
oncepb: bool
ts: datetime
revolution: str | None
user: User
otherusers: list
leaderboards: list[str]
results: Results
extras: Extras
disputed: bool
p: P
class Best(BaseModel):
record: Record | None
rank: int
class Data(BaseModel):
record: Record | None
rank: int
rank_local: int
best: Best
class ZenithSuccessModel(SuccessModel):
data: Data
Zenith: TypeAlias = ZenithSuccessModel | FailedModel
ZenithEx: TypeAlias = ZenithSuccessModel | FailedModel

View File

@@ -1,18 +0,0 @@
from typing import Literal
from typing_extensions import override
from ....schemas import BaseUser
from ...constant import GAME_TYPE
class User(BaseUser[Literal['IO']]):
platform: Literal['IO'] = GAME_TYPE
ID: str
name: str
@property
@override
def unique_identifier(self) -> str:
return self.ID

View File

@@ -1,74 +0,0 @@
from datetime import datetime
from typing import Literal
from pydantic import BaseModel, Field
from .base import ArCounts, FailedModel
from .base import SuccessModel as BaseSuccessModel
class Badge(BaseModel):
id: str
label: str
group: str | None = None
ts: datetime | Literal[False] | None = None
class Connection(BaseModel):
id: str
username: str
display_username: str
class Connections(BaseModel):
discord: Connection | None = None
twitch: Connection | None = None
twitter: Connection | None = None
reddit: Connection | None = None
youtube: Connection | None = None
steam: Connection | None = None
class Distinguishment(BaseModel):
type: str
class Data(BaseModel):
id: str = Field(default=..., alias='_id')
username: str
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'hidden', 'banned']
ts: datetime | None = None
botmaster: str | None = None
badges: list[Badge]
xp: float
gamesplayed: int
gameswon: int
gametime: float
country: str | None = None
badstanding: bool | None = None
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
supporter_tier: int
avatar_revision: int | None = None
"""This user's avatar ID. Get their avatar at
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
banner_revision: int | None = None
"""This user's banner ID. Get their banner at
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
Ignore this field if the user is not a supporter."""
bio: str | None = None
connections: Connections
friend_count: int | None = None
distinguishment: Distinguishment | None = None
achievements: list[int]
ar: int
ar_counts: ArCounts
class UserInfoSuccess(BaseSuccessModel):
data: Data
UserInfo = UserInfoSuccess | FailedModel

View File

@@ -1,46 +0,0 @@
from typing import Literal, NewType
S1ValidRank = Literal[
'x',
'u',
'ss',
's+',
's',
's-',
'a+',
'a',
'a-',
'b+',
'b',
'b-',
'c+',
'c',
'c-',
'd+',
'd',
]
S1Rank = S1ValidRank | Literal['z']
ValidRank = Literal['x+'] | S1ValidRank
Rank = ValidRank | Literal['z'] # 未定级
Summaries = Literal[
'40l',
'blitz',
'zenith',
'zenithex',
'league',
'zen',
'achievements',
]
Records = Literal[
'40l_top',
'40l_recent',
'40l_progression',
'blitz_top',
'blitz_recent',
'blitz_progression',
]
Prisecter = NewType('Prisecter', str)

View File

@@ -1,97 +0,0 @@
from hashlib import md5
from secrets import choice
from arclet.alconna import Arg, ArgFlag
from nonebot_plugin_alconna import Args, Subcommand
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import QryItrface, Uninfo
from nonebot_plugin_uninfo import User as UninfoUser
from nonebot_plugin_uninfo.orm import get_session_persist_id
from nonebot_plugin_user import User
from yarl import URL
from ...config.config import global_config
from ...db import BindStatus, create_or_update_bind, trigger
from ...utils.host import HostPage, get_self_netloc
from ...utils.image import get_avatar
from ...utils.lang import get_lang
from ...utils.render import Bind, render
from ...utils.render.schemas.base import Avatar, People
from ...utils.screenshot import screenshot
from . import alc, command, get_player
from .api import Player
from .constant import GAME_TYPE
command.add(
Subcommand(
'bind',
Args(
Arg(
'account',
get_player,
notice='TETR.IO 用户名 / ID',
flags=[ArgFlag.HIDDEN],
)
),
help_text='绑定 TETR.IO 账号',
)
)
alc.shortcut(
'(?i:io)(?i:绑定|绑|bind)',
command='tstats TETR.IO bind',
humanized='io绑定',
)
@alc.assign('TETRIO.bind')
async def _(nb_user: User, account: Player, event_session: Uninfo, interface: QryItrface):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='bind',
command_args=[],
):
user = await account.user
async with get_session() as session:
bind_status = await create_or_update_bind(
session=session,
user=nb_user,
game_platform=GAME_TYPE,
game_account=user.unique_identifier,
)
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
netloc = get_self_netloc()
async with HostPage(
await render(
'v1/binding',
Bind(
platform='TETR.IO',
type='unknown',
user=People(
avatar=str(
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}')
% {'revision': avatar_revision}
)
if (avatar_revision := (await account.avatar_revision)) is not None and avatar_revision != 0
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
name=user.name.upper(),
),
bot=People(
avatar=await get_avatar(
(
bot_user := await interface.get_user(event_session.self_id)
or UninfoUser(id=event_session.self_id)
),
'Data URI',
'../../static/logo/logo.svg',
),
name=bot_user.nick or bot_user.name or choice(list(global_config.nickname) or ['bot']),
),
prompt='io查我',
lang=get_lang(),
),
)
) as page_hash:
await UniMessage.image(raw=await screenshot(f'http://{netloc}/host/{page_hash}.html')).finish()

View File

@@ -1,51 +0,0 @@
from arclet.alconna import Arg
from nonebot_plugin_alconna import Option, Subcommand
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_orm import async_scoped_session
from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id
from nonebot_plugin_user import User
from sqlalchemy import select
from ...db import trigger
from . import alc, command
from .constant import GAME_TYPE
from .models import TETRIOUserConfig
from .typedefs import Template
command.add(
Subcommand(
'config',
Option(
'--default-template',
Arg('template', Template, notice='模板版本'),
alias=['-DT', 'DefaultTemplate'],
help_text='设置默认查询模板',
),
help_text='TETR.IO 查询个性化配置',
),
)
alc.shortcut(
'(?i:io)(?i:配置|配|config)',
command='tstats TETR.IO config',
humanized='io配置',
)
@alc.assign('TETRIO.config')
async def _(user: User, session: async_scoped_session, event_session: Uninfo, template: Template):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='config',
command_args=[f'--default-template {template}'],
):
config = (await session.scalars(select(TETRIOUserConfig).where(TETRIOUserConfig.id == user.id))).one_or_none()
if config is None:
config = TETRIOUserConfig(id=user.id, query_template=template)
session.add(config)
else:
config.query_template = template
await session.commit()
await UniMessage('配置成功').finish()

View File

@@ -1,37 +0,0 @@
from re import compile # noqa: A004
from typing import Literal
from yarl import URL
from .api.typedefs import ValidRank
GAME_TYPE: Literal['IO'] = 'IO'
BASE_URL = URL('https://ch.tetr.io/api/')
RANK_PERCENTILE: dict[ValidRank, float] = {
'x+': 0.2,
'x': 1,
'u': 5,
'ss': 11,
's+': 17,
's': 23,
's-': 30,
'a+': 38,
'a': 46,
'a-': 54,
'b+': 62,
'b': 70,
'b-': 78,
'c+': 84,
'c': 90,
'c-': 95,
'd+': 97.5,
'd': 100,
}
TR_MIN = 0
TR_MAX = 25000
USER_ID = compile(r'^[a-f0-9]{24}$')
USER_NAME = compile(r'^[a-zA-Z0-9_-]{3,16}$')

View File

@@ -1,96 +0,0 @@
from nonebot_plugin_alconna import Args, Option, Subcommand
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id
from ...db import trigger
from ...utils.host import HostPage, get_self_netloc
from ...utils.lang import get_lang
from ...utils.metrics import get_metrics
from ...utils.render import render
from ...utils.render.schemas.v2.tetrio.user.list import Data, List, TetraLeague, User
from ...utils.screenshot import screenshot
from .. import alc
from . import command
from .api.leaderboards import by
from .api.schemas.base import P
from .api.schemas.leaderboards import Parameter
from .api.schemas.leaderboards.by import Entry
from .constant import GAME_TYPE
command.add(
Subcommand(
'list',
Option('--max-tr', Args['max_tr', float], help_text='TR的上限'),
Option('--min-tr', Args['min_tr', float], help_text='TR的下限'),
Option('--limit', Args['limit', int], help_text='查询数量'),
Option('--country', Args['country', str], help_text='国家代码'),
help_text='查询 TETR.IO 段位排行榜',
)
)
@alc.assign('TETRIO.list')
async def _(
event_session: Uninfo,
max_tr: float | None = None,
min_tr: float | None = None,
limit: int | None = None,
country: str | None = None,
):
country = country.upper() if country is not None else None
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='list',
command_args=[
f'{key} {value}'
for key, value in zip(
('--max-tr', '--min-tr', '--limit', '--country'), (max_tr, min_tr, limit, country), strict=True
)
if value is not None
],
):
parameter = Parameter(
# ?: 似乎是只需要 pri 至少 league 榜的返回值只有 pri
after=P(pri=max_tr, sec=0, ter=0).to_prisecter() if max_tr is not None else None,
before=P(pri=min_tr, sec=0, ter=0).to_prisecter() if min_tr is not None else None,
limit=limit or 25,
country=country,
)
league = await by('league', parameter)
async with HostPage(
await render(
'v2/tetrio/user/list',
List(
show_index=True,
data=[
Data(
user=User(
id=i.id,
name=i.username.upper(),
avatar=f'https://tetr.io/user-content/avatars/{i.id}.jpg',
country=i.country,
xp=i.xp,
),
tetra_league=TetraLeague(
rank=i.league.rank,
tr=round(i.league.tr, 2),
glicko=round(i.league.glicko, 2),
rd=round(i.league.rd, 2),
decaying=i.league.decaying,
pps=(metrics := get_metrics(pps=i.league.pps, apm=i.league.apm, vs=i.league.vs)).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs,
adpl=metrics.adpl,
),
)
for i in league.data.entries
if isinstance(i, Entry)
],
lang=get_lang(),
),
)
) as page_hash:
await UniMessage.image(raw=await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')).finish()

View File

@@ -1,61 +0,0 @@
from datetime import datetime
from uuid import UUID
from nonebot_plugin_orm import Model
from sqlalchemy import DateTime, ForeignKey, String
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column, relationship
from ...db.models import PydanticType
from .api.schemas.leaderboards.by import BySuccessModel, Entry
from .api.typedefs import ValidRank
from .typedefs import Template
class TETRIOUserConfig(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_u_cfg'
id: Mapped[int] = mapped_column(primary_key=True)
query_template: Mapped[Template] = mapped_column(String(2))
class TETRIOLeagueStats(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_stats'
id: Mapped[int] = mapped_column(init=False, primary_key=True)
raw: Mapped[list['TETRIOLeagueHistorical']] = relationship(back_populates='stats', lazy='noload')
fields: Mapped[list['TETRIOLeagueStatsField']] = relationship(back_populates='stats')
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
class TETRIOLeagueHistorical(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_hist'
id: Mapped[int] = mapped_column(init=False, primary_key=True)
request_id: Mapped[UUID] = mapped_column(index=True)
data: Mapped[BySuccessModel] = mapped_column(PydanticType([], {BySuccessModel}))
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
stats_id: Mapped[int] = mapped_column(ForeignKey('nb_t_io_tl_stats.id'), init=False)
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='raw')
entry_type = PydanticType([], {Entry})
class TETRIOLeagueStatsField(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_stats_field'
id: Mapped[int] = mapped_column(init=False, primary_key=True)
rank: Mapped[ValidRank] = mapped_column(String(2), index=True)
tr_line: Mapped[float]
player_count: Mapped[int]
low_pps: Mapped[Entry] = mapped_column(entry_type)
low_apm: Mapped[Entry] = mapped_column(entry_type)
low_vs: Mapped[Entry] = mapped_column(entry_type)
avg_pps: Mapped[float]
avg_apm: Mapped[float]
avg_vs: Mapped[float]
high_pps: Mapped[Entry] = mapped_column(entry_type)
high_apm: Mapped[Entry] = mapped_column(entry_type)
high_vs: Mapped[Entry] = mapped_column(entry_type)
stats_id: Mapped[int] = mapped_column(ForeignKey('nb_t_io_tl_stats.id'), init=False)
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='fields')

View File

@@ -1,135 +0,0 @@
from datetime import timezone
from arclet.alconna import Arg, ArgFlag
from nonebot import get_driver
from nonebot.adapters import Event
from nonebot.matcher import Matcher
from nonebot_plugin_alconna import Args, At, Option, Subcommand
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id
from nonebot_plugin_user import User as NBUser
from nonebot_plugin_user import get_user
from sqlalchemy import select
from ....db import query_bind_info, trigger
from ....i18n import Lang
from ....utils.exception import FallbackError
from ....utils.typedefs import Me
from ... import add_block_handlers, alc
from .. import command, get_player
from ..api import Player
from ..constant import GAME_TYPE
from ..models import TETRIOUserConfig
from ..typedefs import Template
from .v1 import make_query_image_v1
from .v2 import make_query_image_v2
UTC = timezone.utc
driver = get_driver()
command.add(
Subcommand(
'query',
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 / 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
get_player,
notice='TETR.IO 用户名 / ID',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
),
Option(
'--template',
Arg('template', Template),
alias=['-T'],
help_text='要使用的查询模板',
),
help_text='查询 TETR.IO 游戏信息',
),
)
alc.shortcut(
'(?i:io)(?i:查询|查|query|stats)',
command='tstats TETR.IO query',
humanized='io查',
)
alc.shortcut(
'fkosk',
command='tstats TETR.IO query',
arguments=[''],
fuzzy=False,
humanized='An Easter egg!',
)
add_block_handlers(alc.assign('TETRIO.query'))
async def make_query_result(player: Player, template: Template) -> UniMessage:
if template == 'v1':
try:
return UniMessage.image(raw=await make_query_image_v1(player))
except FallbackError:
template = 'v2'
if template == 'v2':
return UniMessage.image(raw=await make_query_image_v2(player))
return None
@alc.assign('TETRIO.query')
async def _( # noqa: PLR0913
user: NBUser,
event: Event,
matcher: Matcher,
target: At | Me,
event_session: Uninfo,
template: Template | None = None,
):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='query',
command_args=[f'--template {template}'] if template is not None else [],
):
async with get_session() as session:
bind = await query_bind_info(
session=session,
user=await get_user(
event_session.scope, target.target if isinstance(target, At) else event.get_user_id()
),
game_platform=GAME_TYPE,
)
if template is None:
template = await session.scalar(
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
)
if bind is None:
await matcher.finish('未查询到绑定信息')
player = Player(user_id=bind.game_account, trust=True)
await (
UniMessage.i18n(Lang.interaction.warning.unverified) + await make_query_result(player, template or 'v1')
).finish()
@alc.assign('TETRIO.query')
async def _(user: NBUser, account: Player, event_session: Uninfo, template: Template | None = None):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='query',
command_args=[f'--template {template}'] if template is not None else [],
):
async with get_session() as session:
if template is None:
template = await session.scalar(
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
)
await (await make_query_result(account, template or 'v1')).finish()

View File

@@ -1,56 +0,0 @@
from collections.abc import Callable
from datetime import timedelta
from typing import TypeVar, overload
from zoneinfo import ZoneInfo
from ....utils.exception import FallbackError
from ....utils.render.schemas.base import HistoryData
from ..api.schemas.labs.leagueflow import Empty, LeagueFlowSuccess
from ..api.schemas.summaries.league import InvalidData, LeagueSuccessModel, NeverPlayedData, NeverRatedData, RatedData
def flow_to_history(
leagueflow: LeagueFlowSuccess,
handle: Callable[[list[HistoryData]], list[HistoryData]] | None = None,
) -> list[HistoryData]:
if isinstance(leagueflow.data, Empty):
raise FallbackError
start_time = leagueflow.data.start_time.astimezone(ZoneInfo('Asia/Shanghai'))
ret = [
HistoryData(
record_at=start_time + timedelta(milliseconds=i.timestamp_offset),
score=i.post_match_tr,
)
for i in leagueflow.data.points
if start_time + timedelta(milliseconds=i.timestamp_offset)
]
return ret if handle is None else handle(ret)
N = TypeVar('N', int, float)
def handling_special_value(value: N) -> N | None:
return value if value != -1 else None
L = TypeVar('L', NeverPlayedData, NeverRatedData, RatedData)
@overload
def get_league_data(user_info: LeagueSuccessModel, league_type: type[L]) -> L: ...
@overload
def get_league_data(
user_info: LeagueSuccessModel, league_type: None = None
) -> NeverPlayedData | NeverRatedData | RatedData: ...
def get_league_data(
user_info: LeagueSuccessModel, league_type: type[L] | None = None
) -> L | NeverPlayedData | NeverRatedData | RatedData:
league = user_info.data
if isinstance(league, InvalidData):
raise FallbackError
if league_type is None:
return league
if isinstance(league, league_type):
return league
raise FallbackError

View File

@@ -1,100 +0,0 @@
from asyncio import gather
from datetime import timedelta
from hashlib import md5
from yarl import URL
from ....utils.chart import get_split, get_value_bounds, handle_history_data
from ....utils.exception import FallbackError
from ....utils.host import HostPage, get_self_netloc
from ....utils.lang import get_lang
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.base import Avatar, Trending
from ....utils.render.schemas.v1.base import History
from ....utils.render.schemas.v1.tetrio.user.info import Info, Multiplayer, Singleplayer, User
from ....utils.screenshot import screenshot
from ..api import Player
from ..api.schemas.summaries.league import RatedData
from ..constant import TR_MAX, TR_MIN
from .tools import flow_to_history, get_league_data
async def make_query_image_v1(player: Player) -> bytes:
(
(user, user_info, league, sprint, blitz, leagueflow),
(avatar_revision,),
) = await gather(
gather(player.user, player.get_info(), player.league, player.sprint, player.blitz, player.get_leagueflow()),
gather(player.avatar_revision),
)
league_data = get_league_data(league, RatedData)
if league_data.vs is None:
raise FallbackError
histories = flow_to_history(leagueflow, handle_history_data)
values = get_value_bounds([i.score for i in histories])
split_value, offset = get_split(values, TR_MAX, TR_MIN)
if sprint.data.record is not None:
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
else:
sprint_value = 'N/A'
blitz_value = f'{blitz.data.record.results.stats.score:,}' if blitz.data.record is not None else 'N/A'
netloc = get_self_netloc()
dsps: float
dspp: float
# make mypy happy
async with HostPage(
page=await render(
'v1/tetrio/info',
Info(
user=User(
avatar=str(
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
)
if avatar_revision is not None and avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
),
name=user.name.upper(),
bio=user_info.data.bio,
),
multiplayer=Multiplayer(
glicko=f'{round(league_data.glicko, 2):,}',
rd=round(league_data.rd, 2),
rank=league_data.rank,
tr=f'{round(league_data.tr, 2):,}',
global_rank=league_data.standing,
history=History(
data=histories,
split_interval=split_value,
min_value=values.value_min,
max_value=values.value_max,
offset=offset,
),
lpm=(metrics := get_metrics(pps=league_data.pps, apm=league_data.apm, vs=league_data.vs)).lpm,
pps=metrics.pps,
lpm_trending=Trending.KEEP,
apm=metrics.apm,
apl=metrics.apl,
apm_trending=Trending.KEEP,
adpm=metrics.adpm,
vs=metrics.vs,
adpl=metrics.adpl,
adpm_trending=Trending.KEEP,
app=(app := (league_data.apm / (60 * league_data.pps))),
dsps=(dsps := ((league_data.vs / 100) - (league_data.apm / 60))),
dspp=(dspp := (dsps / league_data.pps)),
ci=150 * dspp - 125 * app + 50 * (league_data.vs / league_data.apm) - 25,
ge=2 * ((app * dsps) / league_data.pps),
),
singleplayer=Singleplayer(
sprint=sprint_value,
blitz=blitz_value,
),
lang=get_lang(),
),
)
) as page_hash:
return await screenshot(f'http://{netloc}/host/{page_hash}.html')

View File

@@ -1,210 +0,0 @@
from asyncio import gather
from datetime import datetime, timedelta
from hashlib import md5
from yarl import URL
from ....utils.exception import FallbackError
from ....utils.host import HostPage, get_self_netloc
from ....utils.lang import get_lang
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.base import Avatar
from ....utils.render.schemas.v2.tetrio.user.info import (
Achievement,
Badge,
Best,
Blitz,
Info,
Sprint,
Statistic,
TetraLeague,
TetraLeagueStatistic,
User,
Week,
Zen,
Zenith,
)
from ....utils.screenshot import screenshot
from ..api import Player
from ..api.schemas.summaries.league import InvalidData, NeverPlayedData, NeverRatedData
from .tools import flow_to_history, handling_special_value
async def make_query_image_v2(player: Player) -> bytes:
(
(user, user_info, league, sprint, blitz, zen),
(avatar_revision, banner_revision, leagueflow, zenith, zenithex, achievements),
) = await gather(
gather(
player.user,
player.get_info(),
player.league,
player.sprint,
player.blitz,
player.zen,
),
gather(
player.avatar_revision,
player.banner_revision,
player.get_leagueflow(),
player.get_summaries('zenith'),
player.get_summaries('zenithex'),
player.get_summaries('achievements'),
),
)
if sprint.data.record is not None:
duration = timedelta(milliseconds=sprint.data.record.results.stats.finaltime).total_seconds()
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
else:
sprint_value = 'N/A'
play_time: str | None
if (game_time := handling_special_value(user_info.data.gametime)) is not None:
if game_time // 3600 > 0:
play_time = f'{game_time // 3600:.0f}h {game_time % 3600 // 60:.0f}m {game_time % 60:.0f}s'
elif game_time // 60 > 0:
play_time = f'{game_time // 60:.0f}m {game_time % 60:.0f}s'
else:
play_time = f'{game_time:.0f}s'
else:
play_time = game_time
try:
history = flow_to_history(leagueflow, lambda x: x[-100:])
except FallbackError:
history = None
netloc = get_self_netloc()
async with HostPage(
await render(
'v2/tetrio/user/info',
Info(
user=User(
id=user.ID,
name=user.name.upper(),
country=user_info.data.country,
role=user_info.data.role,
botmaster=user_info.data.botmaster,
avatar=str(
URL(f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}') % {'revision': avatar_revision}
)
if avatar_revision is not None and avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
),
banner=str(
URL(f'http://{netloc}/host/resource/tetrio/banners/{user.ID}') % {'revision': banner_revision}
)
if banner_revision is not None and banner_revision != 0
else None,
bio=user_info.data.bio,
friend_count=user_info.data.friend_count,
supporter_tier=user_info.data.supporter_tier,
bad_standing=user_info.data.badstanding or False,
badges=[
Badge(
id=i.id,
description=i.label,
group=i.group,
receive_at=i.ts if isinstance(i.ts, datetime) else None,
)
for i in user_info.data.badges
],
xp=user_info.data.xp,
ar=user_info.data.ar,
achievements=[
Achievement(
key=i.achievement_id,
rank_type=i.rank_type,
ar_type=i.ar_type,
stub=i.stub,
rank=i.rank,
achieved_score=i.achieved_score,
pos=i.pos,
progress=i.progress,
total=i.total,
)
for i in achievements.data
],
playtime=play_time,
join_at=user_info.data.ts,
),
tetra_league=TetraLeague(
rank=league.data.rank,
highest_rank='z' if isinstance(league.data, NeverRatedData) else league.data.bestrank,
tr=round(league.data.tr, 2),
glicko=round(league.data.glicko, 2),
rd=round(league.data.rd, 2),
global_rank=league.data.standing,
country_rank=league.data.standing_local,
pps=(metrics := get_metrics(pps=league.data.pps, apm=league.data.apm, vs=league.data.vs)).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs,
adpl=metrics.adpl,
statistic=TetraLeagueStatistic(total=league.data.gamesplayed, wins=league.data.gameswon),
decaying=league.data.decaying,
history=history,
)
if not isinstance(league.data, NeverPlayedData | InvalidData)
else None,
zenith=Zenith(
week=Week(
altitude=zenith.data.record.results.stats.zenith.altitude,
global_rank=zenith.data.rank,
country_rank=zenith.data.rank_local,
play_at=zenith.data.record.ts,
)
if zenith.data.record is not None
else None,
best=Best(
altitude=zenith.data.best.record.results.stats.zenith.altitude,
global_rank=zenith.data.best.rank,
play_at=zenith.data.best.record.ts,
)
if zenith.data.best.record is not None
else None,
),
zenithex=Zenith(
week=Week(
altitude=zenithex.data.record.results.stats.zenith.altitude,
global_rank=zenithex.data.rank,
country_rank=zenithex.data.rank_local,
play_at=zenithex.data.record.ts,
)
if zenithex.data.record is not None
else None,
best=Best(
altitude=zenithex.data.best.record.results.stats.zenith.altitude,
global_rank=zenithex.data.best.rank,
play_at=zenithex.data.best.record.ts,
)
if zenithex.data.best.record is not None
else None,
),
statistic=Statistic(
total=handling_special_value(user_info.data.gamesplayed),
wins=handling_special_value(user_info.data.gameswon),
),
sprint=Sprint(
time=sprint_value,
global_rank=sprint.data.rank,
country_rank=sprint.data.rank_local,
play_at=sprint.data.record.ts,
)
if sprint.data.record is not None
else None,
blitz=Blitz(
score=blitz.data.record.results.stats.score,
global_rank=blitz.data.rank,
country_rank=blitz.data.rank_local,
play_at=blitz.data.record.ts,
)
if blitz.data.record is not None
else None,
zen=Zen(level=zen.data.level, score=zen.data.score),
lang=get_lang(),
),
),
) as page_hash:
return await screenshot(f'http://{netloc}/host/{page_hash}.html')

View File

@@ -1,156 +0,0 @@
from collections import defaultdict
from collections.abc import Callable, Sequence
from datetime import datetime, timedelta, timezone
from math import floor
from statistics import mean
from typing import TYPE_CHECKING
from uuid import uuid4
from nonebot import get_driver
from nonebot_plugin_alconna import Subcommand
from nonebot_plugin_apscheduler import scheduler
from nonebot_plugin_orm import get_session
from sqlalchemy import select
from ....config.config import config
from ....utils.exception import RequestError
from ....utils.retry import retry
from .. import alc
from .. import command as base_command
from ..api.leaderboards import by
from ..api.schemas.base import P
from ..api.schemas.leaderboards import Parameter
from ..api.schemas.leaderboards.by import Entry
from ..constant import RANK_PERCENTILE
from ..models import TETRIOLeagueHistorical, TETRIOLeagueStats, TETRIOLeagueStatsField
if TYPE_CHECKING:
from ..api.schemas.leaderboards.by import BySuccessModel
from ..api.typedefs import Rank
UTC = timezone.utc
driver = get_driver()
command = Subcommand('rank', help_text='查询 TETR.IO 段位信息')
def wrapper(slot: int | str, content: str | None) -> str | None:
if slot == 'rank' and not content:
return '--all'
if content is not None:
return f'--detail {content.lower()}'
return content
alc.shortcut(
r'(?i:io)(?i:段位|段|rank)\s*(?P<rank>[a-zA-Z+-]{0,2})',
command='tstats TETR.IO rank {rank}',
humanized='iorank',
fuzzy=False,
wrapper=wrapper,
)
def _pps(user: Entry) -> float:
return user.league.pps
def _apm(user: Entry) -> float:
return user.league.apm
def _vs(user: Entry) -> float:
return user.league.vs
def _min(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
return min(users, key=field)
def _max(users: Sequence[Entry], field: Callable[[Entry], float]) -> Entry:
return max(users, key=field)
def find_special_player(
users: Sequence[Entry],
field: Callable[[Entry], float],
sort: Callable[[Sequence[Entry], Callable[[Entry], float]], Entry],
) -> Entry:
return sort(users, field)
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
async def get_tetra_league_data() -> None:
x_session_id = uuid4()
retry_by = retry(max_attempts=10, exception_type=RequestError)(by)
prisecter = P(pri=9007199254740991, sec=9007199254740991, ter=9007199254740991) # * from ch.tetr.io
results: list[BySuccessModel] = []
while True:
model = await retry_by('league', Parameter(after=prisecter.to_prisecter(), limit=100), x_session_id)
prisecter = model.data.entries[-1].p
results.append(model)
if len(model.data.entries) < 100: # 分页值 # noqa: PLR2004
break
players: list[Entry] = []
for result in results:
players.extend([i for i in result.data.entries if isinstance(i, Entry)])
players.sort(key=lambda x: x.league.tr, reverse=True)
rank_player_mapping: defaultdict[Rank, list[Entry]] = defaultdict(list)
for player in players:
rank_player_mapping[player.league.rank].append(player)
stats = TETRIOLeagueStats(raw=[], fields=[], update_time=datetime.now(UTC))
fields: list[TETRIOLeagueStatsField] = []
for rank, percentile in RANK_PERCENTILE.items():
offset = floor((percentile / 100) * len(players)) - 1
tr_line = players[offset].league.tr
rank_players = rank_player_mapping[rank]
fields.append(
TETRIOLeagueStatsField(
rank=rank,
tr_line=tr_line,
player_count=len(rank_players),
low_pps=find_special_player(rank_players, _pps, _min),
low_apm=find_special_player(rank_players, _apm, _min),
low_vs=find_special_player(rank_players, _vs, _min),
avg_pps=mean(_pps(i) for i in rank_players),
avg_apm=mean(_apm(i) for i in rank_players),
avg_vs=mean(_vs(i) for i in rank_players),
high_pps=find_special_player(rank_players, _pps, _max),
high_apm=find_special_player(rank_players, _apm, _max),
high_vs=find_special_player(rank_players, _vs, _max),
stats=stats,
)
)
historicals = [
TETRIOLeagueHistorical(request_id=x_session_id, data=model, update_time=model.cache.cached_at, stats=stats)
for model in results
]
stats.raw = historicals
stats.fields = fields
async with get_session() as session:
session.add(stats)
await session.commit()
if not config.tetris.development:
@driver.on_startup
async def _() -> None:
async with get_session() as session:
latest_time = await session.scalar(
select(TETRIOLeagueStats.update_time).order_by(TETRIOLeagueStats.id.desc()).limit(1)
)
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
await get_tetra_league_data()
from . import all, detail # noqa: A004, E402
base_command.add(command)
__all__ = ['all', 'detail']

View File

@@ -1,118 +0,0 @@
from datetime import timedelta
from arclet.alconna import Arg
from nonebot_plugin_alconna import Option, Subcommand, UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id
from sqlalchemy import func, select
from sqlalchemy.orm import selectinload
from ....db import trigger
from ....utils.host import HostPage, get_self_netloc
from ....utils.lang import get_lang
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.v1.tetrio.rank import Data as DataV1
from ....utils.render.schemas.v1.tetrio.rank import ItemData as ItemDataV1
from ....utils.render.schemas.v2.tetrio.rank import AverageData as AverageDataV2
from ....utils.render.schemas.v2.tetrio.rank import Data as DataV2
from ....utils.render.schemas.v2.tetrio.rank import ItemData as ItemDataV2
from ....utils.screenshot import screenshot
from .. import alc
from ..constant import GAME_TYPE
from ..models import TETRIOLeagueStats
from ..typedefs import Template
from . import command
command.add(
Subcommand(
'--all', Option('--template', Arg('template', Template), alias=['-T'], help_text='要使用的查询模板'), dest='all'
)
)
@alc.assign('TETRIO.rank.all')
async def _(event_session: Uninfo, template: Template | None = None):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='rank',
command_args=['--all'] + ([f'--template {template}'] if template is not None else []),
):
async with get_session() as session:
latest_data = (
await session.scalars(
select(TETRIOLeagueStats)
.order_by(TETRIOLeagueStats.id.desc())
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
).one()
compare_data = (
await session.scalars(
select(TETRIOLeagueStats)
.order_by(
func.abs(
func.julianday(TETRIOLeagueStats.update_time)
- func.julianday(latest_data.update_time - timedelta(hours=24))
)
)
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
).one()
match template:
case 'v1' | None:
await UniMessage.image(raw=await make_image_v1(latest_data, compare_data)).finish()
case 'v2':
await UniMessage.image(raw=await make_image_v2(latest_data, compare_data)).finish()
async def make_image_v1(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
async with HostPage(
await render(
'v1/tetrio/rank',
DataV1(
items={
i[0].rank: ItemDataV1(
trending=round(i[0].tr_line - i[1].tr_line, 2),
require_tr=round(i[0].tr_line, 2),
players=i[0].player_count,
)
for i in zip(latest_data.fields, compare_data.fields, strict=True)
},
updated_at=latest_data.update_time,
lang=get_lang(),
),
)
) as page_hash:
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
async def make_image_v2(latest_data: TETRIOLeagueStats, compare_data: TETRIOLeagueStats) -> bytes:
async with HostPage(
await render(
'v2/tetrio/rank',
DataV2(
items={
i[0].rank: ItemDataV2(
require_tr=round(i[0].tr_line, 2),
trending=round(i[0].tr_line - i[1].tr_line, 2),
average_data=AverageDataV2(
pps=(metrics := get_metrics(pps=i[0].avg_pps, apm=i[0].avg_apm, vs=i[0].avg_vs)).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs,
adpl=metrics.adpl,
),
players=i[0].player_count,
)
for i in zip(latest_data.fields, compare_data.fields, strict=True)
},
updated_at=latest_data.update_time,
lang=get_lang(),
),
)
) as page_hash:
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')

View File

@@ -1,130 +0,0 @@
from datetime import timedelta, timezone
from zoneinfo import ZoneInfo
from arclet.alconna import Arg
from nonebot import get_driver
from nonebot_plugin_alconna import Option, UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id
from sqlalchemy import func, select
from sqlalchemy.orm import selectinload
from ....db import trigger
from ....utils.host import HostPage, get_self_netloc
from ....utils.lang import get_lang
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.v2.tetrio.rank.detail import Data, SpecialData
from ....utils.screenshot import screenshot
from .. import alc
from ..api.typedefs import ValidRank
from ..constant import GAME_TYPE
from ..models import TETRIOLeagueStats
from . import command
UTC = timezone.utc
driver = get_driver()
command.add(Option('--detail', Arg('rank', ValidRank), alias=['-D']))
@alc.assign('TETRIO.rank')
async def _(rank: ValidRank, event_session: Uninfo):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='rank',
command_args=[f'--detail {rank}'],
):
async with get_session() as session:
latest_data = (
await session.scalars(
select(TETRIOLeagueStats)
.order_by(TETRIOLeagueStats.id.desc())
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
).one()
compare_data = (
await session.scalars(
select(TETRIOLeagueStats)
.order_by(
func.abs(
func.julianday(TETRIOLeagueStats.update_time)
- func.julianday(latest_data.update_time - timedelta(hours=24))
)
)
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
).one()
await UniMessage.image(
raw=await make_image(
rank,
latest_data,
compare_data,
)
).finish()
async def make_image(rank: ValidRank, latest: TETRIOLeagueStats, compare: TETRIOLeagueStats) -> bytes:
latest_data = next(filter(lambda x: x.rank == rank, latest.fields))
compare_data = next(filter(lambda x: x.rank == rank, compare.fields))
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
low_pps = get_metrics(
pps=latest_data.low_pps.league.pps, apm=latest_data.low_pps.league.apm, vs=latest_data.low_pps.league.vs
)
low_apm = get_metrics(
pps=latest_data.low_apm.league.pps, apm=latest_data.low_apm.league.apm, vs=latest_data.low_apm.league.vs
)
low_vs = get_metrics(
pps=latest_data.low_vs.league.pps, apm=latest_data.low_vs.league.apm, vs=latest_data.low_vs.league.vs
)
max_pps = get_metrics(
pps=latest_data.high_pps.league.pps, apm=latest_data.high_pps.league.apm, vs=latest_data.high_pps.league.vs
)
max_apm = get_metrics(
pps=latest_data.high_apm.league.pps, apm=latest_data.high_apm.league.apm, vs=latest_data.high_apm.league.vs
)
max_vs = get_metrics(
pps=latest_data.high_vs.league.pps, apm=latest_data.high_vs.league.apm, vs=latest_data.high_vs.league.vs
)
async with HostPage(
await render(
'v2/tetrio/rank/detail',
Data(
name=latest_data.rank,
trending=round(latest_data.tr_line - compare_data.tr_line, 2),
require_tr=round(latest_data.tr_line, 2),
players=latest_data.player_count,
minimum_data=SpecialData(
apm=low_apm.apm,
pps=low_pps.pps,
lpm=low_pps.lpm,
vs=low_vs.vs,
adpm=low_vs.adpm,
apm_holder=latest_data.low_apm.username.upper(),
pps_holder=latest_data.low_pps.username.upper(),
vs_holder=latest_data.low_vs.username.upper(),
),
average_data=SpecialData(
apm=avg.apm, pps=avg.pps, lpm=avg.lpm, vs=avg.vs, adpm=avg.adpm, apl=avg.apl, adpl=avg.adpl
),
maximum_data=SpecialData(
apm=max_apm.apm,
pps=max_pps.pps,
lpm=max_pps.lpm,
vs=max_vs.vs,
adpm=max_vs.adpm,
apm_holder=latest_data.high_apm.username.upper(),
pps_holder=latest_data.high_pps.username.upper(),
vs_holder=latest_data.high_vs.username.upper(),
),
updated_at=latest.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo('Asia/Shanghai')),
lang=get_lang(),
),
)
) as page_hash:
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')

View File

@@ -1,33 +0,0 @@
from arclet.alconna import Arg, ArgFlag
from nonebot_plugin_alconna import Args, At, Subcommand
from ....utils.typedefs import Me
from .. import command as base_command
from .. import get_player
command = Subcommand(
'record',
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 / 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
get_player,
notice='TETR.IO 用户名 / ID',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
),
)
from . import blitz, sprint # noqa: E402
base_command.add(command)
__all__ = [
'blitz',
'sprint',
]

Some files were not shown because too many files have changed in this diff Show More