mirror of
https://github.com/A-Minos/nonebot-plugin-tetris-stats.git
synced 2026-03-05 05:36:54 +08:00
Compare commits
194 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7fe9a6fd3d | |||
| 6dbfd31eab | |||
| 1788d40ed2 | |||
| 18d8e0cdcc | |||
| b37f927be6 | |||
| 314bf4c2f0 | |||
| c9f6817c6a | |||
| 4c7cd00a76 | |||
| b8cf10b45d | |||
| 4ec5c3bde1 | |||
| 270b953bc9 | |||
| 13bd0da592 | |||
| 9545f0b5d0 | |||
| 12f320cbb4 | |||
| 7ff59cfc01 | |||
| 498781f376 | |||
| a3c00dbd93 | |||
| 069d5953f9 | |||
| 3721d92f52 | |||
| 98b58866e1 | |||
|
|
189c3999f7 | ||
|
|
a2622d5102 | ||
|
|
c8832bd1c9 | ||
| e6c3a32532 | |||
| b3015aaa91 | |||
|
|
abc1038082 | ||
|
|
dd91455890 | ||
|
|
4b17b0b907 | ||
| ac4631d1f3 | |||
| b0ee7fe6c7 | |||
| 5bcecc0623 | |||
| 9cf048fce4 | |||
| aff2fa120a | |||
| 1c057661c2 | |||
| 83bcd14012 | |||
| 70f53a2c76 | |||
| 6df70f621e | |||
| 8ba3f3c3f4 | |||
| a5c4e7df5c | |||
| 66db7a8a28 | |||
|
|
716e392a3a | ||
|
|
e47f1bb6f9 | ||
| 03d34c5572 | |||
| 04b480ef52 | |||
| 5563b01937 | |||
| 504edb08de | |||
| c283f1ca49 | |||
| 0171953264 | |||
| 7515daccc7 | |||
| 17690e673f | |||
| e9b3c30a13 | |||
| 42484b9c2c | |||
| 42828f23f6 | |||
| d0af2e83c4 | |||
| 5534456b22 | |||
|
|
1928506021 | ||
|
|
67da935849 | ||
|
|
e1e8743c48 | ||
| e5556bad1d | |||
| 889405ea6b | |||
| 66e1850297 | |||
| f39faced7e | |||
| fffa07dc03 | |||
| 0467b3e5df | |||
| f6cc0229ba | |||
| e2708b661d | |||
| 65d019a6d3 | |||
| be1b07d5dc | |||
| c92bc3aaad | |||
| d4b887ef83 | |||
|
|
695ff13aa2 | ||
| ec1001b3bb | |||
| b545b12255 | |||
| b2505e0979 | |||
| 38defe37cd | |||
| 7a3d7c908c | |||
| bc37a015d6 | |||
|
|
fd85140c99 | ||
|
|
80f4316564 | ||
|
|
3b9c0c89b1 | ||
| c02fdfc47f | |||
| 93b169fa40 | |||
| 5cb428ed71 | |||
|
|
ec392ee384 | ||
|
|
d037cf6d44 | ||
|
|
6964e9b655 | ||
|
|
7a032bf947 | ||
|
|
9a91e5ef5b | ||
|
|
5b58697fce | ||
|
|
b14cebe832 | ||
|
|
4306195ee5 | ||
|
|
ac9c6e79d9 | ||
|
|
ed035c65c1 | ||
| dc8bc9b306 | |||
| 454dd57007 | |||
| b396a6d450 | |||
| 7f584a46eb | |||
| 27518c0408 | |||
|
|
d2a3801dac | ||
| 563564ac8d | |||
| 87c87ad231 | |||
| 30515d1907 | |||
|
|
bd0a8ea447 | ||
|
|
1db1e6dbba | ||
|
|
9040aa9fba | ||
|
|
3a5f1eb266 | ||
|
|
43e927430a | ||
| e1b0918a52 | |||
| c86b2eb31b | |||
|
|
47b3f3e881 | ||
|
|
7caee587b4 | ||
|
|
28ae564e59 | ||
|
|
90dee8402d | ||
|
|
8b560e55cb | ||
|
|
3080531503 | ||
|
|
fae0088533 | ||
|
|
db9286a369 | ||
|
|
420fb29318 | ||
|
|
433a6edd3b | ||
|
|
fa81231f78 | ||
|
|
c474cf0af2 | ||
|
|
e38eb5cdff | ||
|
|
7bacf89840 | ||
|
|
4622e90995 | ||
|
|
fa8c2b11e6 | ||
|
|
2123b747af | ||
|
|
e65233d09f | ||
|
|
7e81bf6b8b | ||
|
|
c4614aa006 | ||
|
|
79a657b9f5 | ||
|
|
0164f29c1e | ||
|
|
8db56366df | ||
|
|
de0a1e4c73 | ||
|
|
3670ce7221 | ||
|
|
101ed737ab | ||
|
|
1611bf47fa | ||
|
|
e084cdb145 | ||
|
|
27258ab744 | ||
|
|
07324825e6 | ||
|
|
472becdfe0 | ||
|
|
bc87e4b16d | ||
|
|
28e2a46303 | ||
| 1324015d58 | |||
| e6eae023e7 | |||
| 67cfb07246 | |||
| 12145a614f | |||
| 0b07882a16 | |||
|
|
9073bf5d0b | ||
|
|
f4dd5fe76f | ||
|
|
1f44fc9884 | ||
|
|
44dee7f200 | ||
|
|
dc5ade6ffc | ||
|
|
05ce329976 | ||
|
|
43cabf2135 | ||
|
|
6767136850 | ||
|
|
65999b4625 | ||
|
|
9fde62ac9e | ||
|
|
c74d8b70aa | ||
|
|
0e29b38f9d | ||
|
|
d040c7dca2 | ||
|
|
68ace3a715 | ||
|
|
e63ac69e0f | ||
| 4afda62782 | |||
|
|
abf4410a00 | ||
| 88c2915251 | |||
| 546369241a | |||
| d59bccbd4d | |||
| 75a6989a7f | |||
| ad635bd37d | |||
|
|
b6d63c9e7f | ||
| 805da8cd36 | |||
| 4a13d7807a | |||
| 7bbdeacc5e | |||
|
|
782792e455 | ||
|
|
bd10549b4c | ||
|
|
035e6d4782 | ||
| 003e6619d8 | |||
| c0fa92df30 | |||
| 7cdb0f3547 | |||
| b773fb44a1 | |||
| c75c6b73bd | |||
| 67782c3156 | |||
| 1e02858913 | |||
| 60605d0dca | |||
| 0d589450bd | |||
|
|
2f144acf0c | ||
| 87e6a544a2 | |||
| 74db1931fd | |||
| 1ca6d1f86a | |||
|
|
7361789245 | ||
| fe69d8d2fe | |||
| 2737119865 | |||
| 34a654b5df | |||
| f9f39618a1 |
2
.github/workflows/Release.yml
vendored
2
.github/workflows/Release.yml
vendored
@@ -44,6 +44,6 @@ jobs:
|
|||||||
uses: pypa/gh-action-pypi-publish@release/v1
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
|
||||||
- name: Publish Package to GitHub Release
|
- name: Publish Package to GitHub Release
|
||||||
run: gh release upload --clobber ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl
|
run: gh release create ${{ steps.version.outputs.TAG_NAME }} dist/*.tar.gz dist/*.whl -t "🔖 ${{ steps.version.outputs.TAG_NAME }}" --generate-notes
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|||||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -5,6 +5,17 @@ Untitled*
|
|||||||
*copy*
|
*copy*
|
||||||
.vscode
|
.vscode
|
||||||
*dev*
|
*dev*
|
||||||
*cache*
|
*_cache*
|
||||||
*backup*
|
*backup*
|
||||||
*.pyc
|
*.pyc
|
||||||
|
node_modules
|
||||||
|
.prettier*
|
||||||
|
package.json
|
||||||
|
pnpm-lock.yaml
|
||||||
|
*.drawio.svg
|
||||||
|
package-lock.json
|
||||||
|
*Zone.Identifier
|
||||||
|
.env*
|
||||||
|
bot.py
|
||||||
|
TODO
|
||||||
|
*.fish
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
from nonebot import require
|
from nonebot import require
|
||||||
from nonebot.plugin import PluginMetadata
|
from nonebot.plugin import PluginMetadata
|
||||||
|
|
||||||
require('nonebot_plugin_localstore')
|
|
||||||
require('nonebot_plugin_orm')
|
|
||||||
require('nonebot_plugin_alconna')
|
require('nonebot_plugin_alconna')
|
||||||
require('nonebot_plugin_apscheduler')
|
require('nonebot_plugin_apscheduler')
|
||||||
|
require('nonebot_plugin_localstore')
|
||||||
|
require('nonebot_plugin_orm')
|
||||||
|
require('nonebot_plugin_session')
|
||||||
|
require('nonebot_plugin_session_orm')
|
||||||
|
|
||||||
from .config.config import migrations # noqa: E402
|
from .config.config import migrations # noqa: E402
|
||||||
|
|
||||||
@@ -13,10 +15,11 @@ __plugin_meta__ = PluginMetadata(
|
|||||||
description='一个用于查询 Tetris 相关游戏玩家数据的插件',
|
description='一个用于查询 Tetris 相关游戏玩家数据的插件',
|
||||||
usage='发送 {游戏名} --help 查询使用方法',
|
usage='发送 {游戏名} --help 查询使用方法',
|
||||||
type='application',
|
type='application',
|
||||||
homepage='https://github.com/shoucandanghehe/nonebot-plugin-tetris-stats',
|
homepage='https://github.com/A-minos/nonebot-plugin-tetris-stats',
|
||||||
extra={
|
extra={
|
||||||
'orm_version_location': migrations,
|
'orm_version_location': migrations,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
from . import game_data_processor # noqa: F401, E402
|
from . import game_data_processor # noqa: F401, E402
|
||||||
|
from .utils import host # noqa: F401, E402
|
||||||
|
|||||||
@@ -0,0 +1,54 @@
|
|||||||
|
"""Rename field
|
||||||
|
|
||||||
|
迁移 ID: 09d4bb60160d
|
||||||
|
父迁移: b9d65badc713
|
||||||
|
创建时间: 2024-04-23 23:42:04.541672
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '09d4bb60160d'
|
||||||
|
down_revision: str | Sequence[str] | None = 'b9d65badc713'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||||
|
batch_op.alter_column('create_time', new_column_name='update_time', existing_type=sa.DateTime())
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_create_time')
|
||||||
|
op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_update_time'),
|
||||||
|
'nonebot_plugin_tetris_stats_iorank',
|
||||||
|
['update_time'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||||
|
batch_op.alter_column('update_time', new_column_name='create_time')
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_update_time'))
|
||||||
|
op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_iorank_create_time',
|
||||||
|
'nonebot_plugin_tetris_stats_iorank',
|
||||||
|
['create_time'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
"""add field
|
||||||
|
|
||||||
|
迁移 ID: 0d50142b780f
|
||||||
|
父迁移: 09d4bb60160d
|
||||||
|
创建时间: 2024-04-24 14:55:08.064098
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '0d50142b780f'
|
||||||
|
down_revision: str | Sequence[str] | None = '09d4bb60160d'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('file_hash', sa.String(length=128), nullable=True))
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash'), ['file_hash'], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash'))
|
||||||
|
batch_op.drop_column('file_hash')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,279 @@
|
|||||||
|
"""Refactor Historical
|
||||||
|
|
||||||
|
迁移 ID: 3c25a5a8c050
|
||||||
|
父迁移: b7fbdafc339a
|
||||||
|
创建时间: 2024-05-14 09:16:35.193001
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from nonebot.log import logger
|
||||||
|
from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn
|
||||||
|
from sqlalchemy import desc, select
|
||||||
|
from sqlalchemy.dialects import sqlite
|
||||||
|
from sqlalchemy.ext.automap import automap_base
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from ujson import dumps, loads
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '3c25a5a8c050'
|
||||||
|
down_revision: str | Sequence[str] | None = 'b7fbdafc339a'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def migrate_old_data() -> None:
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
Base.prepare(autoload_with=op.get_bind())
|
||||||
|
OldHistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
TETRIOHistoricalData = Base.classes.nonebot_plugin_tetris_stats_tetriohistoricaldata # noqa: N806
|
||||||
|
TOSHistoricalData = Base.classes.nonebot_plugin_tetris_stats_toshistoricaldata # noqa: N806
|
||||||
|
with (
|
||||||
|
Session(op.get_bind()) as session,
|
||||||
|
Progress(
|
||||||
|
TextColumn('[progress.description]{task.description}'),
|
||||||
|
BarColumn(),
|
||||||
|
MofNCompleteColumn(),
|
||||||
|
TaskProgressColumn(),
|
||||||
|
TimeRemainingColumn(),
|
||||||
|
) as progress,
|
||||||
|
):
|
||||||
|
task_id = progress.add_task('[cyan]Migrating:', total=session.query(OldHistoricalData).count())
|
||||||
|
pointer = 0
|
||||||
|
while pointer < session.query(OldHistoricalData).order_by(desc(OldHistoricalData.id)).limit(1).one().id:
|
||||||
|
result = session.scalars(
|
||||||
|
select(OldHistoricalData)
|
||||||
|
.where(OldHistoricalData.id > pointer)
|
||||||
|
.order_by(OldHistoricalData.id)
|
||||||
|
.limit(100)
|
||||||
|
).all()
|
||||||
|
for j in result:
|
||||||
|
processed_data: dict[str, Any] = loads(j.processed_data)
|
||||||
|
if j.game_platform == 'IO':
|
||||||
|
if (data := processed_data.get('user_info')) is not None:
|
||||||
|
session.add(
|
||||||
|
TETRIOHistoricalData(
|
||||||
|
user_unique_identifier=j.user_unique_identifier,
|
||||||
|
api_type='User Info',
|
||||||
|
data=dumps(data),
|
||||||
|
update_time=datetime.fromisoformat(data['cache']['cached_at']),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if (data := processed_data.get('user_records')) is not None:
|
||||||
|
session.add(
|
||||||
|
TETRIOHistoricalData(
|
||||||
|
user_unique_identifier=j.user_unique_identifier,
|
||||||
|
api_type='User Records',
|
||||||
|
data=dumps(data),
|
||||||
|
update_time=datetime.fromisoformat(data['cache']['cached_at']),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if j.game_platform == 'TOS' and not j.user_unique_identifier.isdigit():
|
||||||
|
if (data := processed_data.get('user_info')) is not None:
|
||||||
|
session.add(
|
||||||
|
TOSHistoricalData(
|
||||||
|
user_unique_identifier=j.user_unique_identifier,
|
||||||
|
api_type='User Info',
|
||||||
|
data=dumps(data),
|
||||||
|
update_time=j.finish_time,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if (data := processed_data.get('user_profile')) is not None:
|
||||||
|
for v in data.values():
|
||||||
|
session.add(
|
||||||
|
TOSHistoricalData(
|
||||||
|
user_unique_identifier=j.user_unique_identifier,
|
||||||
|
api_type='User Profile',
|
||||||
|
data=dumps(v),
|
||||||
|
update_time=j.finish_time,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
progress.update(task_id, advance=1)
|
||||||
|
session.commit()
|
||||||
|
pointer = result[-1].id
|
||||||
|
logger.success('Migrate successfully')
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_tetriohistoricaldata',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||||
|
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tetriohistoricaldata')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'), ['api_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'), ['update_time'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'),
|
||||||
|
['user_unique_identifier'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_tophistoricaldata',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||||
|
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_tophistoricaldata')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'), ['api_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'), ['update_time'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'),
|
||||||
|
['user_unique_identifier'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_toshistoricaldata',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
|
||||||
|
sa.Column('api_type', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('update_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_toshistoricaldata')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'), ['api_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'), ['update_time'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'),
|
||||||
|
['user_unique_identifier'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_triggerhistoricaldata',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('trigger_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('session_persist_id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('game_platform', sa.String(length=32), nullable=False),
|
||||||
|
sa.Column('command_type', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('command_args', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('finish_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_triggerhistoricaldata')),
|
||||||
|
info={'bind_key': 'nonebot_plugin_tetris_stats'},
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'),
|
||||||
|
['command_type'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'),
|
||||||
|
['game_platform'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
migrate_old_data()
|
||||||
|
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier')
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_historicaldata',
|
||||||
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
|
sa.Column('trigger_time', sa.DATETIME(), nullable=False),
|
||||||
|
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True),
|
||||||
|
sa.Column('bot_account', sa.VARCHAR(), nullable=True),
|
||||||
|
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True),
|
||||||
|
sa.Column('source_account', sa.VARCHAR(), nullable=True),
|
||||||
|
sa.Column('message', sa.BLOB(), nullable=True),
|
||||||
|
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False),
|
||||||
|
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False),
|
||||||
|
sa.Column('command_args', sqlite.JSON(), nullable=False),
|
||||||
|
sa.Column('game_user', sqlite.JSON(), nullable=False),
|
||||||
|
sa.Column('processed_data', sqlite.JSON(), nullable=False),
|
||||||
|
sa.Column('finish_time', sa.DATETIME(), nullable=False),
|
||||||
|
sa.Column('user_unique_identifier', sa.VARCHAR(length=32), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier',
|
||||||
|
['user_unique_identifier'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_game_platform'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldata_command_type'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_triggerhistoricaldata')
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_toshistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_user_unique_identifier'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_update_time'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_toshistoricaldata_api_type'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_toshistoricaldata')
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tophistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_user_unique_identifier'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_update_time'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tophistoricaldata_api_type'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_tophistoricaldata')
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_user_unique_identifier'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_update_time'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetriohistoricaldata_api_type'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_tetriohistoricaldata')
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
"""Add redundant platform field
|
||||||
|
|
||||||
|
迁移 ID: 6c3206f90cc3
|
||||||
|
父迁移: 9f6582279ce2
|
||||||
|
创建时间: 2023-11-26 20:15:56.033892
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.ext.automap import automap_base
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from ujson import dumps, loads
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '6c3206f90cc3'
|
||||||
|
down_revision: str | Sequence[str] | None = '9f6582279ce2'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
connection = op.get_bind()
|
||||||
|
Base.prepare(autoload_with=connection)
|
||||||
|
|
||||||
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
|
||||||
|
with Session(connection) as session:
|
||||||
|
for row in session.query(HistoricalData):
|
||||||
|
platform = row.game_platform
|
||||||
|
game_user = loads(row.game_user)
|
||||||
|
processed_data = loads(row.processed_data)
|
||||||
|
game_user['platform'] = platform
|
||||||
|
processed_data['platform'] = platform
|
||||||
|
row.game_user = dumps(game_user)
|
||||||
|
row.processed_data = dumps(processed_data)
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
connection = op.get_bind()
|
||||||
|
Base.prepare(autoload_with=connection)
|
||||||
|
|
||||||
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
|
||||||
|
with Session(connection) as session:
|
||||||
|
for row in session.query(HistoricalData):
|
||||||
|
game_user = loads(row.game_user)
|
||||||
|
processed_data = loads(row.processed_data)
|
||||||
|
game_user.pop('platform', None)
|
||||||
|
processed_data.pop('platform', None)
|
||||||
|
row.game_user = dumps(game_user)
|
||||||
|
row.processed_data = dumps(processed_data)
|
||||||
|
session.add(row)
|
||||||
|
session.commit()
|
||||||
@@ -0,0 +1,96 @@
|
|||||||
|
"""Correct the data in HistoricalData
|
||||||
|
|
||||||
|
迁移 ID: 8a91210ce14d
|
||||||
|
父迁移: 0d50142b780f
|
||||||
|
创建时间: 2024-05-06 08:16:38.487214
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from nonebot.log import logger
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.automap import automap_base
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '8a91210ce14d'
|
||||||
|
down_revision: str | Sequence[str] | None = '0d50142b780f'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None: # noqa: C901
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
from nonebot_plugin_tetris_stats.version import __version__
|
||||||
|
|
||||||
|
if __version__ != '1.0.3':
|
||||||
|
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
|
||||||
|
logger.critical(msg)
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
||||||
|
from pydantic import BaseModel, ValidationError
|
||||||
|
from rich.progress import (
|
||||||
|
BarColumn,
|
||||||
|
MofNCompleteColumn,
|
||||||
|
Progress,
|
||||||
|
TaskProgressColumn,
|
||||||
|
TextColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
)
|
||||||
|
|
||||||
|
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseProcessedData # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
Base.prepare(autoload_with=op.get_bind())
|
||||||
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
|
||||||
|
def model_to_json(value: BaseModel) -> str:
|
||||||
|
return value.model_dump_json(by_alias=True)
|
||||||
|
else:
|
||||||
|
|
||||||
|
def model_to_json(value: BaseModel) -> str:
|
||||||
|
return value.json(by_alias=True)
|
||||||
|
|
||||||
|
models = BaseProcessedData.__subclasses__()
|
||||||
|
|
||||||
|
def json_to_model(value: str) -> BaseModel:
|
||||||
|
for i in models:
|
||||||
|
try:
|
||||||
|
return type_validate_json(i, value)
|
||||||
|
except ValidationError: # noqa: PERF203
|
||||||
|
...
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
with Session(op.get_bind()) as session:
|
||||||
|
count = session.query(HistoricalData).count()
|
||||||
|
with Progress(
|
||||||
|
TextColumn('[progress.description]{task.description}'),
|
||||||
|
BarColumn(),
|
||||||
|
MofNCompleteColumn(),
|
||||||
|
TaskProgressColumn(),
|
||||||
|
TimeRemainingColumn(),
|
||||||
|
) as progress:
|
||||||
|
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
||||||
|
for i in range(0, count, 100):
|
||||||
|
for j in session.scalars(
|
||||||
|
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
||||||
|
):
|
||||||
|
model = json_to_model(j.processed_data)
|
||||||
|
j.processed_data = model_to_json(model)
|
||||||
|
progress.update(task_id, advance=1)
|
||||||
|
session.commit()
|
||||||
|
logger.success('Corrected HistoricalData')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
@@ -5,13 +5,17 @@
|
|||||||
创建时间: 2023-11-11 16:24:11.826667
|
创建时间: 2023-11-11 16:24:11.826667
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Sequence
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
revision: str = '9866f53ce44f'
|
revision: str = '9866f53ce44f'
|
||||||
down_revision: str | Sequence[str] | None = None
|
down_revision: str | Sequence[str] | None = None
|
||||||
branch_labels: str | Sequence[str] | None = ('nonebot_plugin_tetris_stats',)
|
branch_labels: str | Sequence[str] | None = ('nonebot_plugin_tetris_stats',)
|
||||||
|
|||||||
@@ -5,11 +5,12 @@
|
|||||||
创建时间: 2023-11-11 16:51:30.718277
|
创建时间: 2023-11-11 16:51:30.718277
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Sequence
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from shutil import copyfile
|
from shutil import copyfile
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
from nonebot import get_driver
|
from nonebot import get_driver
|
||||||
@@ -18,6 +19,9 @@ from sqlalchemy import Connection, create_engine, inspect, text
|
|||||||
from sqlalchemy.ext.automap import automap_base
|
from sqlalchemy.ext.automap import automap_base
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
revision: str = '9cd1647db502'
|
revision: str = '9cd1647db502'
|
||||||
down_revision: str | Sequence[str] | None = '9866f53ce44f'
|
down_revision: str | Sequence[str] | None = '9866f53ce44f'
|
||||||
branch_labels: str | Sequence[str] | None = None
|
branch_labels: str | Sequence[str] | None = None
|
||||||
@@ -80,8 +84,9 @@ def upgrade(name: str = '') -> None:
|
|||||||
logger.success('nonebot_plugin_tetris_stats: 跳过迁移')
|
logger.success('nonebot_plugin_tetris_stats: 跳过迁移')
|
||||||
return
|
return
|
||||||
if 'IORANK' not in tables:
|
if 'IORANK' not in tables:
|
||||||
logger.warning('nonebot_plugin_tetris_stats: 发现过早版本的数据, 请先更新到 0.4.4 版本')
|
msg = 'nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级'
|
||||||
raise RuntimeError('nonebot_plugin_tetris_stats: 请先安装 0.4.4 版本完成迁移之后再升级')
|
logger.warning(msg)
|
||||||
|
raise RuntimeError(msg)
|
||||||
logger.info('nonebot_plugin_tetris_stats: 发现来自老版本的数据, 正在迁移...')
|
logger.info('nonebot_plugin_tetris_stats: 发现来自老版本的数据, 正在迁移...')
|
||||||
migrate_old_data(connection)
|
migrate_old_data(connection)
|
||||||
db_path.unlink()
|
db_path.unlink()
|
||||||
|
|||||||
@@ -0,0 +1,114 @@
|
|||||||
|
"""Recreate HistoricalData
|
||||||
|
|
||||||
|
迁移 ID: 9f6582279ce2
|
||||||
|
父迁移: 9cd1647db502
|
||||||
|
创建时间: 2023-11-21 08:35:50.393246
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.dialects import sqlite
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = '9f6582279ce2'
|
||||||
|
down_revision: str | Sequence[str] | None = '9cd1647db502'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
|
||||||
|
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_historicaldata',
|
||||||
|
sa.Column('id', sa.Integer(), nullable=False),
|
||||||
|
sa.Column('trigger_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.Column('bot_platform', sa.String(length=32), nullable=True),
|
||||||
|
sa.Column('bot_account', sa.String(), nullable=True),
|
||||||
|
sa.Column('source_type', sa.String(length=32), nullable=True),
|
||||||
|
sa.Column('source_account', sa.String(), nullable=True),
|
||||||
|
sa.Column('message', sa.PickleType(), nullable=True),
|
||||||
|
sa.Column('game_platform', sa.String(length=32), nullable=False),
|
||||||
|
sa.Column('command_type', sa.String(length=16), nullable=False),
|
||||||
|
sa.Column('command_args', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('game_user', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('processed_data', sa.JSON(), nullable=False),
|
||||||
|
sa.Column('finish_time', sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_historicaldata')),
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'), ['command_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'), ['game_platform'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'), ['source_account'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'), ['source_type'], unique=False
|
||||||
|
)
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'))
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'))
|
||||||
|
|
||||||
|
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'nonebot_plugin_tetris_stats_historicaldata',
|
||||||
|
sa.Column('id', sa.INTEGER(), nullable=False),
|
||||||
|
sa.Column('trigger_time', sa.DATETIME(), nullable=False),
|
||||||
|
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True),
|
||||||
|
sa.Column('bot_account', sa.VARCHAR(), nullable=True),
|
||||||
|
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True),
|
||||||
|
sa.Column('source_account', sa.VARCHAR(), nullable=True),
|
||||||
|
sa.Column('message', sa.BLOB(), nullable=True),
|
||||||
|
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False),
|
||||||
|
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False),
|
||||||
|
sa.Column('command_args', sqlite.JSON(), nullable=False),
|
||||||
|
sa.Column('game_user', sa.BLOB(), nullable=False),
|
||||||
|
sa.Column('processed_data', sa.BLOB(), nullable=False),
|
||||||
|
sa.Column('finish_time', sa.DATETIME(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
|
||||||
|
)
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
|
||||||
|
)
|
||||||
|
batch_op.create_index(
|
||||||
|
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,107 @@
|
|||||||
|
"""Add user_unique_identifier field to HistoricalData
|
||||||
|
|
||||||
|
迁移 ID: b7fbdafc339a
|
||||||
|
父迁移: 8a91210ce14d
|
||||||
|
创建时间: 2024-05-07 16:55:29.527215
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from alembic import op
|
||||||
|
from nonebot.log import logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = 'b7fbdafc339a'
|
||||||
|
down_revision: str | Sequence[str] | None = '8a91210ce14d'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
from nonebot_plugin_tetris_stats.version import __version__
|
||||||
|
|
||||||
|
if __version__ != '1.0.4':
|
||||||
|
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
|
||||||
|
logger.critical(msg)
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
from pydantic import ValidationError
|
||||||
|
from rich.progress import (
|
||||||
|
BarColumn,
|
||||||
|
MofNCompleteColumn,
|
||||||
|
Progress,
|
||||||
|
TaskProgressColumn,
|
||||||
|
TextColumn,
|
||||||
|
TimeRemainingColumn,
|
||||||
|
)
|
||||||
|
from sqlalchemy import select
|
||||||
|
from sqlalchemy.ext.automap import automap_base
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser
|
||||||
|
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
|
||||||
|
batch_op.create_index(
|
||||||
|
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier'),
|
||||||
|
['user_unique_identifier'],
|
||||||
|
unique=False,
|
||||||
|
)
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
connection = op.get_bind()
|
||||||
|
Base.prepare(autoload_with=connection)
|
||||||
|
HistoricalData = Base.classes.nonebot_plugin_tetris_stats_historicaldata # noqa: N806
|
||||||
|
|
||||||
|
models: list[type[BaseUser]] = BaseUser.__subclasses__()
|
||||||
|
|
||||||
|
def json_to_model(value: str) -> BaseUser:
|
||||||
|
for i in models:
|
||||||
|
try:
|
||||||
|
return type_validate_json(i, value)
|
||||||
|
except ValidationError: # noqa: PERF203
|
||||||
|
...
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
with Session(op.get_bind()) as session:
|
||||||
|
count = session.query(HistoricalData).count()
|
||||||
|
with Progress(
|
||||||
|
TextColumn('[progress.description]{task.description}'),
|
||||||
|
BarColumn(),
|
||||||
|
MofNCompleteColumn(),
|
||||||
|
TaskProgressColumn(),
|
||||||
|
TimeRemainingColumn(),
|
||||||
|
) as progress:
|
||||||
|
task_id = progress.add_task('[cyan]Updateing:', total=count)
|
||||||
|
for i in range(0, count, 100):
|
||||||
|
for j in session.scalars(
|
||||||
|
select(HistoricalData).where(HistoricalData.id > i).order_by(HistoricalData.id).limit(100)
|
||||||
|
):
|
||||||
|
model = json_to_model(j.game_user)
|
||||||
|
try:
|
||||||
|
j.user_unique_identifier = model.unique_identifier
|
||||||
|
except ValueError:
|
||||||
|
session.delete(j)
|
||||||
|
progress.update(task_id, advance=1)
|
||||||
|
session.commit()
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.alter_column('user_unique_identifier', existing_type=sa.VARCHAR(length=32), nullable=False)
|
||||||
|
logger.success('database upgrade success')
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
|
||||||
|
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier'))
|
||||||
|
batch_op.drop_column('user_unique_identifier')
|
||||||
|
|
||||||
|
# ### end Alembic commands ###
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
"""Del old TOS bind data
|
||||||
|
|
||||||
|
迁移 ID: b9d65badc713
|
||||||
|
父迁移: 6c3206f90cc3
|
||||||
|
创建时间: 2023-12-30 00:27:40.991704
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.ext.automap import automap_base
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
revision: str = 'b9d65badc713'
|
||||||
|
down_revision: str | Sequence[str] | None = '6c3206f90cc3'
|
||||||
|
branch_labels: str | Sequence[str] | None = None
|
||||||
|
depends_on: str | Sequence[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
|
|
||||||
|
Base = automap_base() # noqa: N806
|
||||||
|
connection = op.get_bind()
|
||||||
|
Base.prepare(autoload_with=connection)
|
||||||
|
|
||||||
|
Bind = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
|
||||||
|
with Session(connection) as session:
|
||||||
|
session.query(Bind).filter(Bind.game_platform == 'TOS').delete()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade(name: str = '') -> None:
|
||||||
|
if name:
|
||||||
|
return
|
||||||
@@ -1,8 +1,29 @@
|
|||||||
from nonebot_plugin_orm import AsyncSession
|
from asyncio import Lock
|
||||||
|
from collections.abc import AsyncGenerator
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from enum import Enum, auto
|
||||||
|
from typing import TYPE_CHECKING, Literal, TypeVar, overload
|
||||||
|
|
||||||
|
from nonebot.exception import FinishedException
|
||||||
|
from nonebot.log import logger
|
||||||
|
from nonebot_plugin_orm import AsyncSession, get_session
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
|
|
||||||
from ..utils.typing import GameType
|
from ..utils.typing import CommandType, GameType
|
||||||
from .models import Bind
|
from .models import Bind, TriggerHistoricalData
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..game_data_processor.io_data_processor.api.models import TETRIOHistoricalData
|
||||||
|
from ..game_data_processor.top_data_processor.api.models import TOPHistoricalData
|
||||||
|
from ..game_data_processor.tos_data_processor.api.models import TOSHistoricalData
|
||||||
|
|
||||||
|
|
||||||
|
class BindStatus(Enum):
|
||||||
|
SUCCESS = auto()
|
||||||
|
UPDATE = auto()
|
||||||
|
|
||||||
|
|
||||||
async def query_bind_info(
|
async def query_bind_info(
|
||||||
@@ -27,7 +48,7 @@ async def create_or_update_bind(
|
|||||||
chat_account: str,
|
chat_account: str,
|
||||||
game_platform: GameType,
|
game_platform: GameType,
|
||||||
game_account: str,
|
game_account: str,
|
||||||
) -> str:
|
) -> BindStatus:
|
||||||
bind = await query_bind_info(
|
bind = await query_bind_info(
|
||||||
session=session,
|
session=session,
|
||||||
chat_platform=chat_platform,
|
chat_platform=chat_platform,
|
||||||
@@ -42,9 +63,81 @@ async def create_or_update_bind(
|
|||||||
game_account=game_account,
|
game_account=game_account,
|
||||||
)
|
)
|
||||||
session.add(bind)
|
session.add(bind)
|
||||||
message = '绑定成功'
|
message = BindStatus.SUCCESS
|
||||||
else:
|
else:
|
||||||
bind.game_account = game_account
|
bind.game_account = game_account
|
||||||
message = '更新绑定成功'
|
message = BindStatus.UPDATE
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
T = TypeVar('T', 'TETRIOHistoricalData', 'TOPHistoricalData', 'TOSHistoricalData')
|
||||||
|
|
||||||
|
lock = Lock()
|
||||||
|
|
||||||
|
|
||||||
|
async def anti_duplicate_add(cls: type[T], model: T) -> None:
|
||||||
|
async with lock, get_session() as session:
|
||||||
|
result = (
|
||||||
|
await session.scalars(
|
||||||
|
select(cls)
|
||||||
|
.where(cls.update_time == model.update_time)
|
||||||
|
.where(cls.user_unique_identifier == model.user_unique_identifier)
|
||||||
|
.where(cls.api_type == model.api_type)
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
if result:
|
||||||
|
for i in result:
|
||||||
|
if i.data == model.data:
|
||||||
|
logger.debug('Anti duplicate successfully')
|
||||||
|
return
|
||||||
|
session.add(model)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
@overload
|
||||||
|
async def trigger(
|
||||||
|
session_persist_id: int,
|
||||||
|
game_platform: Literal['IO'],
|
||||||
|
command_type: CommandType | Literal['rank'],
|
||||||
|
command_args: list[str],
|
||||||
|
) -> AsyncGenerator:
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
@overload
|
||||||
|
async def trigger(
|
||||||
|
session_persist_id: int,
|
||||||
|
game_platform: GameType,
|
||||||
|
command_type: CommandType,
|
||||||
|
command_args: list[str],
|
||||||
|
) -> AsyncGenerator:
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def trigger(
|
||||||
|
session_persist_id: int,
|
||||||
|
game_platform: GameType,
|
||||||
|
command_type: CommandType | Literal['rank'],
|
||||||
|
command_args: list[str],
|
||||||
|
) -> AsyncGenerator:
|
||||||
|
trigger_time = datetime.now(UTC)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except FinishedException:
|
||||||
|
async with get_session() as session:
|
||||||
|
session.add(
|
||||||
|
TriggerHistoricalData(
|
||||||
|
trigger_time=trigger_time,
|
||||||
|
session_persist_id=session_persist_id,
|
||||||
|
game_platform=game_platform,
|
||||||
|
command_type=command_type,
|
||||||
|
command_args=command_args,
|
||||||
|
finish_time=datetime.now(UTC),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await session.commit()
|
||||||
|
raise
|
||||||
|
|||||||
@@ -1,14 +1,69 @@
|
|||||||
|
from collections.abc import Callable, Sequence
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from typing import Any, Literal
|
||||||
|
|
||||||
from nonebot.adapters import Message
|
from nonebot.compat import PYDANTIC_V2, type_validate_json
|
||||||
from nonebot_plugin_orm import Model
|
from nonebot_plugin_orm import Model
|
||||||
from sqlalchemy import JSON, DateTime, PickleType, String
|
from pydantic import BaseModel, ValidationError
|
||||||
|
from sqlalchemy import JSON, DateTime, Dialect, String, TypeDecorator
|
||||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
from ..game_data_processor import ProcessedData, User
|
|
||||||
from ..utils.typing import CommandType, GameType
|
from ..utils.typing import CommandType, GameType
|
||||||
|
|
||||||
|
|
||||||
|
class PydanticType(TypeDecorator):
|
||||||
|
impl = JSON
|
||||||
|
|
||||||
|
@override
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
get_model: Sequence[Callable[[], Sequence[type[BaseModel]]]],
|
||||||
|
models: set[type[BaseModel]],
|
||||||
|
*args: Any,
|
||||||
|
**kwargs: Any,
|
||||||
|
):
|
||||||
|
self.get_model = get_model
|
||||||
|
self._models = models
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
|
||||||
|
@override
|
||||||
|
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
|
||||||
|
# 将 Pydantic 模型实例转换为 JSON
|
||||||
|
if isinstance(value, tuple(self.models)):
|
||||||
|
return value.model_dump_json(by_alias=True) # type: ignore[union-attr]
|
||||||
|
raise TypeError
|
||||||
|
else:
|
||||||
|
|
||||||
|
@override
|
||||||
|
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str:
|
||||||
|
# 将 Pydantic 模型实例转换为 JSON
|
||||||
|
if isinstance(value, tuple(self.models)):
|
||||||
|
return value.json(by_alias=True) # type: ignore[union-attr]
|
||||||
|
raise TypeError
|
||||||
|
|
||||||
|
@override
|
||||||
|
def process_result_value(self, value: Any | None, dialect: Dialect) -> BaseModel:
|
||||||
|
# 将 JSON 转换回 Pydantic 模型实例
|
||||||
|
if isinstance(value, str | bytes):
|
||||||
|
for i in self.models:
|
||||||
|
try:
|
||||||
|
return type_validate_json(i, value)
|
||||||
|
except ValidationError: # noqa: PERF203
|
||||||
|
...
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
@property
|
||||||
|
def models(self) -> tuple[type[BaseModel], ...]:
|
||||||
|
models: set[type[BaseModel]] = set()
|
||||||
|
for i in self.get_model:
|
||||||
|
models.update(i())
|
||||||
|
models.update(self._models)
|
||||||
|
return tuple(models)
|
||||||
|
|
||||||
|
|
||||||
class Bind(MappedAsDataclass, Model):
|
class Bind(MappedAsDataclass, Model):
|
||||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
chat_platform: Mapped[str] = mapped_column(String(32), index=True)
|
chat_platform: Mapped[str] = mapped_column(String(32), index=True)
|
||||||
@@ -17,17 +72,11 @@ class Bind(MappedAsDataclass, Model):
|
|||||||
game_account: Mapped[str]
|
game_account: Mapped[str]
|
||||||
|
|
||||||
|
|
||||||
class HistoricalData(MappedAsDataclass, Model):
|
class TriggerHistoricalData(MappedAsDataclass, Model):
|
||||||
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
trigger_time: Mapped[datetime] = mapped_column(DateTime)
|
trigger_time: Mapped[datetime] = mapped_column(DateTime)
|
||||||
bot_platform: Mapped[str | None] = mapped_column(String(32))
|
session_persist_id: Mapped[int]
|
||||||
bot_account: Mapped[str | None]
|
game_platform: Mapped[GameType] = mapped_column(String(32), index=True)
|
||||||
source_type: Mapped[str | None] = mapped_column(String(32), index=True)
|
command_type: Mapped[CommandType | Literal['rank']] = mapped_column(String(16), index=True)
|
||||||
source_account: Mapped[str | None] = mapped_column(index=True)
|
command_args: Mapped[list[str]] = mapped_column(JSON)
|
||||||
message: Mapped[Message | None] = mapped_column(PickleType)
|
finish_time: Mapped[datetime] = mapped_column(DateTime)
|
||||||
game_platform: Mapped[GameType] = mapped_column(String(32), index=True, init=False)
|
|
||||||
command_type: Mapped[CommandType] = mapped_column(String(16), index=True, init=False)
|
|
||||||
command_args: Mapped[list[str]] = mapped_column(JSON, init=False)
|
|
||||||
game_user: Mapped[User] = mapped_column(PickleType, init=False)
|
|
||||||
processed_data: Mapped[ProcessedData] = mapped_column(PickleType, init=False)
|
|
||||||
finish_time: Mapped[datetime] = mapped_column(DateTime, init=False)
|
|
||||||
|
|||||||
@@ -1,104 +1,35 @@
|
|||||||
from abc import ABC, abstractmethod
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from datetime import UTC, datetime
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from nonebot.adapters import Bot
|
||||||
|
from nonebot.exception import FinishedException
|
||||||
from nonebot.matcher import Matcher
|
from nonebot.matcher import Matcher
|
||||||
from nonebot_plugin_alconna import AlcMatches, AlconnaMatcher
|
from nonebot.message import run_postprocessor
|
||||||
|
from nonebot_plugin_alconna import AlcMatches, AlconnaMatcher, At
|
||||||
|
|
||||||
from ..utils.exception import MessageFormatError
|
from ..utils.exception import MessageFormatError, NeedCatchError
|
||||||
from ..utils.typing import CommandType, GameType
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class User:
|
|
||||||
"""游戏用户"""
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RawResponse:
|
|
||||||
"""原始请求数据"""
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ProcessedData:
|
|
||||||
"""处理/验证后的数据"""
|
|
||||||
|
|
||||||
|
|
||||||
from ..utils.recorder import Recorder # noqa: E402 避免循环导入
|
|
||||||
|
|
||||||
|
|
||||||
class Processor(ABC):
|
|
||||||
event_id: int
|
|
||||||
command_type: CommandType
|
|
||||||
command_args: list[str]
|
|
||||||
user: User
|
|
||||||
raw_response: RawResponse
|
|
||||||
processed_data: ProcessedData
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
event_id: int,
|
|
||||||
user: User,
|
|
||||||
command_args: list[str],
|
|
||||||
) -> None:
|
|
||||||
self.event_id = event_id
|
|
||||||
self.user = user
|
|
||||||
self.command_args = command_args
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def game_platform(self) -> GameType:
|
|
||||||
"""游戏平台"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def handle_bind(self, platform: str, account: str) -> str:
|
|
||||||
"""处理绑定消息"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def handle_query(self) -> str:
|
|
||||||
"""处理查询消息"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def generate_message(self) -> str:
|
|
||||||
"""生成消息"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def __del__(self) -> None:
|
|
||||||
finish_time = datetime.now(tz=UTC)
|
|
||||||
if Recorder.is_error_event(self.event_id):
|
|
||||||
Recorder.del_error_event(self.event_id)
|
|
||||||
return
|
|
||||||
historical_data = Recorder.get_historical_data(self.event_id)
|
|
||||||
historical_data.game_platform = self.game_platform
|
|
||||||
historical_data.command_type = self.command_type
|
|
||||||
historical_data.command_args = self.command_args
|
|
||||||
historical_data.game_user = self.user
|
|
||||||
historical_data.processed_data = self.processed_data
|
|
||||||
historical_data.finish_time = finish_time
|
|
||||||
Recorder.update_historical_data(self.event_id, historical_data)
|
|
||||||
|
|
||||||
|
|
||||||
def add_default_handlers(matcher: type[AlconnaMatcher]) -> None:
|
def add_default_handlers(matcher: type[AlconnaMatcher]) -> None:
|
||||||
|
@matcher.assign('query')
|
||||||
|
async def _(bot: Bot, matcher: Matcher, target: At):
|
||||||
|
if isinstance(target, At) and target.target == bot.self_id:
|
||||||
|
await matcher.finish('不能查询bot的信息')
|
||||||
|
|
||||||
@matcher.handle()
|
@matcher.handle()
|
||||||
async def _(matcher: Matcher, account: MessageFormatError):
|
async def _(matcher: Matcher, account: MessageFormatError):
|
||||||
await matcher.finish(str(account))
|
await matcher.finish(str(account))
|
||||||
|
|
||||||
@matcher.handle()
|
@matcher.handle()
|
||||||
async def _(matcher: Matcher, matches: AlcMatches):
|
async def _(matcher: Matcher, matches: AlcMatches):
|
||||||
if matches.head_matched and matches.options != {}:
|
if matches.head_matched and matches.options != {} or matches.main_args == {}:
|
||||||
await matcher.finish(
|
await matcher.finish(
|
||||||
(f'{matches.error_info!r}\n' if matches.error_info is not None else '')
|
(f'{matches.error_info!r}\n' if matches.error_info is not None else '')
|
||||||
+ f'输入"{matches.header_result} --help"查看帮助'
|
+ f'输入"{matches.header_result} --help"查看帮助'
|
||||||
)
|
)
|
||||||
|
|
||||||
@matcher.handle()
|
@matcher.handle()
|
||||||
async def _(matcher: Matcher, other: Any): # noqa: ANN401
|
def _(other: Any): # noqa: ANN401, ARG001
|
||||||
await matcher.finish()
|
raise FinishedException
|
||||||
|
|
||||||
|
|
||||||
from . import ( # noqa: F401, E402
|
from . import ( # noqa: F401, E402
|
||||||
@@ -106,3 +37,8 @@ from . import ( # noqa: F401, E402
|
|||||||
top_data_processor,
|
top_data_processor,
|
||||||
tos_data_processor,
|
tos_data_processor,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@run_postprocessor
|
||||||
|
async def _(matcher: Matcher, exception: NeedCatchError):
|
||||||
|
await matcher.send(str(exception))
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
BIND_COMMAND: list[str] = ['绑定', 'bind']
|
BIND_COMMAND: list[str] = ['绑定', 'bind']
|
||||||
QUERY_COMMAND: list[str] = ['查', '查询', 'query', 'stats']
|
QUERY_COMMAND: list[str] = ['查', '查询', 'query', 'stats']
|
||||||
|
CANT_VERIFY_MESSAGE = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
||||||
|
|||||||
@@ -1,24 +1,22 @@
|
|||||||
from datetime import timedelta
|
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
|
|
||||||
from nonebot.adapters import Bot, Event
|
|
||||||
from nonebot.matcher import Matcher
|
|
||||||
from nonebot_plugin_alconna import At, on_alconna
|
from nonebot_plugin_alconna import At, on_alconna
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from ...db import query_bind_info
|
from ...utils.exception import MessageFormatError
|
||||||
from ...utils.exception import NeedCatchError
|
|
||||||
from ...utils.metrics import get_metrics
|
|
||||||
from ...utils.platform import get_platform
|
|
||||||
from ...utils.typing import Me
|
from ...utils.typing import Me
|
||||||
from .. import add_default_handlers
|
from .. import add_default_handlers
|
||||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||||
from .constant import GAME_TYPE
|
from .api import Player
|
||||||
from .model import IORank
|
from .api.typing import Rank
|
||||||
from .processor import Processor, User, check_rank_data, identify_user_info
|
from .constant import USER_ID, USER_NAME
|
||||||
from .typing import Rank
|
|
||||||
|
|
||||||
|
def get_player(user_id_or_name: str) -> Player | MessageFormatError:
|
||||||
|
if USER_ID.match(user_id_or_name):
|
||||||
|
return Player(user_id=user_id_or_name, trust=True)
|
||||||
|
if USER_NAME.match(user_id_or_name):
|
||||||
|
return Player(user_name=user_id_or_name, trust=True)
|
||||||
|
return MessageFormatError('用户名/ID不合法')
|
||||||
|
|
||||||
|
|
||||||
alc = on_alconna(
|
alc = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
@@ -28,7 +26,7 @@ alc = on_alconna(
|
|||||||
Args(
|
Args(
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info,
|
get_player,
|
||||||
notice='IO 用户名 / ID',
|
notice='IO 用户名 / ID',
|
||||||
flags=[ArgFlag.HIDDEN],
|
flags=[ArgFlag.HIDDEN],
|
||||||
)
|
)
|
||||||
@@ -49,7 +47,7 @@ alc = on_alconna(
|
|||||||
),
|
),
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info,
|
get_player,
|
||||||
notice='IO 用户名 / ID',
|
notice='IO 用户名 / ID',
|
||||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||||
),
|
),
|
||||||
@@ -67,7 +65,7 @@ alc = on_alconna(
|
|||||||
dest='rank',
|
dest='rank',
|
||||||
help_text='查询 IO 段位信息',
|
help_text='查询 IO 段位信息',
|
||||||
),
|
),
|
||||||
Arg('other', Any, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||||
meta=CommandMeta(
|
meta=CommandMeta(
|
||||||
description='查询 TETR.IO 的信息',
|
description='查询 TETR.IO 的信息',
|
||||||
example='io绑定scdhh\nio查我\niorankx',
|
example='io绑定scdhh\nio查我\niorankx',
|
||||||
@@ -80,101 +78,8 @@ alc = on_alconna(
|
|||||||
aliases={'IO'},
|
aliases={'IO'},
|
||||||
)
|
)
|
||||||
|
|
||||||
alc.shortcut('fkosk', {'command': 'io查', 'args': ['我']})
|
alc.shortcut('fkosk', {'command': 'io查', 'args': ['我'], 'fuzzy': False, 'humanized': 'An Easter egg!'})
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('bind')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
|
||||||
async with get_session() as session:
|
|
||||||
bind = await query_bind_info(
|
|
||||||
session=session,
|
|
||||||
chat_platform=get_platform(bot),
|
|
||||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
)
|
|
||||||
if bind is None:
|
|
||||||
await matcher.finish('未查询到绑定信息')
|
|
||||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=User(ID=bind.game_account),
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(message + await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('rank')
|
|
||||||
async def _(event: Event, matcher: Matcher, rank: Rank):
|
|
||||||
if rank == 'z':
|
|
||||||
await matcher.finish('暂不支持查询未知段位')
|
|
||||||
try:
|
|
||||||
await check_rank_data()
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(f'段位信息获取失败\n{e}'))
|
|
||||||
async with get_session() as session:
|
|
||||||
data = (
|
|
||||||
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(5))
|
|
||||||
).all()
|
|
||||||
latest_data = data[0]
|
|
||||||
message = f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
|
|
||||||
if len(data) > 1:
|
|
||||||
message += f'对比 {(latest_data.create_time-data[-1].create_time).total_seconds()/3600:.2f} 小时前趋势: {f"↑{difference:.2f}" if (difference:=latest_data.tr_line-data[-1].tr_line) > 0 else f"↓{-difference:.2f}" if difference < 0 else "→"}'
|
|
||||||
else:
|
|
||||||
message += '暂无对比数据'
|
|
||||||
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
|
|
||||||
low_pps = get_metrics(pps=latest_data.low_pps[1])
|
|
||||||
low_vs = get_metrics(vs=latest_data.low_vs[1])
|
|
||||||
max_pps = get_metrics(pps=latest_data.high_pps[1])
|
|
||||||
max_vs = get_metrics(vs=latest_data.high_vs[1])
|
|
||||||
message += (
|
|
||||||
'\n'
|
|
||||||
'平均数据:\n'
|
|
||||||
f"L'PM: {avg.lpm} ( {avg.pps} pps )\n"
|
|
||||||
f'APM: {avg.apm} ( x{avg.apl} )\n'
|
|
||||||
f'ADPM: {avg.adpm} ( x{avg.adpl} ) ( {avg.vs}vs )\n'
|
|
||||||
'\n'
|
|
||||||
'最低数据:\n'
|
|
||||||
f"L'PM: {low_pps.lpm} ( {low_pps.pps} pps ) By: {latest_data.low_pps[0]['name'].upper()}\n"
|
|
||||||
f'APM: {latest_data.low_apm[1]} By: {latest_data.low_apm[0]["name"].upper()}\n'
|
|
||||||
f'ADPM: {low_vs.adpm} ( {low_vs.vs}vs ) By: {latest_data.low_vs[0]["name"].upper()}\n'
|
|
||||||
'\n'
|
|
||||||
'最高数据:\n'
|
|
||||||
f"L'PM: {max_pps.lpm} ( {max_pps.pps} pps ) By: {latest_data.high_pps[0]['name'].upper()}\n"
|
|
||||||
f'APM: {latest_data.high_apm[1]} By: {latest_data.high_apm[0]["name"].upper()}\n'
|
|
||||||
f'ADPM: {max_vs.adpm} ( {max_vs.vs}vs ) By: {latest_data.high_vs[0]["name"].upper()}\n'
|
|
||||||
'\n'
|
|
||||||
f'数据更新时间: {(latest_data.create_time+timedelta(hours=8)).strftime("%Y-%m-%d %H:%M:%S")}'
|
|
||||||
)
|
|
||||||
await matcher.finish(message)
|
|
||||||
|
|
||||||
|
from . import bind, query, rank # noqa: F401, E402
|
||||||
|
|
||||||
add_default_handlers(alc)
|
add_default_handlers(alc)
|
||||||
|
|||||||
@@ -0,0 +1,7 @@
|
|||||||
|
from .player import Player
|
||||||
|
from .schemas.user import User
|
||||||
|
from .schemas.user_info import UserInfoSuccess
|
||||||
|
from .schemas.user_records import UserRecordsSuccess
|
||||||
|
from .tetra_league import full_export as tetra_league_full_export
|
||||||
|
|
||||||
|
__all__ = ['Player', 'User', 'UserInfoSuccess', 'UserRecordsSuccess', 'tetra_league_full_export']
|
||||||
@@ -0,0 +1,35 @@
|
|||||||
|
from asyncio import Lock
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import ClassVar
|
||||||
|
from weakref import WeakValueDictionary
|
||||||
|
|
||||||
|
from aiocache import Cache as ACache # type: ignore[import-untyped]
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
from nonebot.log import logger
|
||||||
|
|
||||||
|
from ....utils.request import Request
|
||||||
|
from .schemas.base import FailedModel, SuccessModel
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
class Cache:
|
||||||
|
cache = ACache(ACache.MEMORY)
|
||||||
|
task: ClassVar[WeakValueDictionary[str, Lock]] = WeakValueDictionary()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get(cls, url: str) -> bytes:
|
||||||
|
lock = cls.task.setdefault(url, Lock())
|
||||||
|
async with lock:
|
||||||
|
if (cached_data := await cls.cache.get(url)) is not None:
|
||||||
|
logger.debug(f'{url}: Cache hit!')
|
||||||
|
return cached_data
|
||||||
|
response_data = await Request.request(url)
|
||||||
|
parsed_data: SuccessModel | FailedModel = type_validate_json(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
|
||||||
|
if isinstance(parsed_data, SuccessModel):
|
||||||
|
await cls.cache.add(
|
||||||
|
url,
|
||||||
|
response_data,
|
||||||
|
(parsed_data.cache.cached_until - datetime.now(UTC)).total_seconds(),
|
||||||
|
)
|
||||||
|
return response_data
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from nonebot_plugin_orm import Model
|
||||||
|
from sqlalchemy import DateTime, String
|
||||||
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||||
|
|
||||||
|
from ....db.models import PydanticType
|
||||||
|
from .schemas.base import SuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class TETRIOHistoricalData(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
|
||||||
|
api_type: Mapped[Literal['User Info', 'User Records']] = mapped_column(String(16), index=True)
|
||||||
|
data: Mapped[SuccessModel] = mapped_column(PydanticType(get_model=[SuccessModel.__subclasses__], models=set()))
|
||||||
|
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||||
@@ -0,0 +1,102 @@
|
|||||||
|
from typing import overload
|
||||||
|
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
|
||||||
|
from ....db import anti_duplicate_add
|
||||||
|
from ....utils.exception import RequestError
|
||||||
|
from ....utils.request import splice_url
|
||||||
|
from ..constant import BASE_URL, USER_ID, USER_NAME
|
||||||
|
from .cache import Cache
|
||||||
|
from .models import TETRIOHistoricalData
|
||||||
|
from .schemas.base import FailedModel
|
||||||
|
from .schemas.user import User
|
||||||
|
from .schemas.user_info import UserInfo, UserInfoSuccess
|
||||||
|
from .schemas.user_records import UserRecords, UserRecordsSuccess
|
||||||
|
|
||||||
|
|
||||||
|
class Player:
|
||||||
|
@overload
|
||||||
|
def __init__(self, *, user_id: str, trust: bool = False): ...
|
||||||
|
@overload
|
||||||
|
def __init__(self, *, user_name: str, trust: bool = False): ...
|
||||||
|
def __init__(self, *, user_id: str | None = None, user_name: str | None = None, trust: bool = False):
|
||||||
|
self.user_id = user_id
|
||||||
|
self.user_name = user_name
|
||||||
|
if not trust:
|
||||||
|
if self.user_id is not None:
|
||||||
|
if not USER_ID.match(self.user_id):
|
||||||
|
msg = 'Invalid user id'
|
||||||
|
raise ValueError(msg)
|
||||||
|
elif self.user_name is not None:
|
||||||
|
if not USER_NAME.match(self.user_name):
|
||||||
|
msg = 'Invalid user name'
|
||||||
|
raise ValueError(msg)
|
||||||
|
else:
|
||||||
|
msg = 'Invalid user'
|
||||||
|
raise ValueError(msg)
|
||||||
|
self.__user: User | None = None
|
||||||
|
self._user_info: UserInfoSuccess | None = None
|
||||||
|
self._user_records: UserRecordsSuccess | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _request_user_parameter(self) -> str:
|
||||||
|
if self.user_id is not None:
|
||||||
|
return self.user_id
|
||||||
|
if self.user_name is not None:
|
||||||
|
return self.user_name.lower()
|
||||||
|
msg = 'Invalid user'
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
@property
|
||||||
|
async def user(self) -> User:
|
||||||
|
if self.__user is None:
|
||||||
|
user_info = await self.get_info()
|
||||||
|
self.__user = User(
|
||||||
|
ID=user_info.data.user.id,
|
||||||
|
name=user_info.data.user.username,
|
||||||
|
)
|
||||||
|
self.user_id = user_info.data.user.id
|
||||||
|
self.user_name = user_info.data.user.username
|
||||||
|
return self.__user
|
||||||
|
|
||||||
|
async def get_info(self) -> UserInfoSuccess:
|
||||||
|
"""Get User Info"""
|
||||||
|
if self._user_info is None:
|
||||||
|
raw_user_info = await Cache.get(splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}']))
|
||||||
|
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
||||||
|
if isinstance(user_info, FailedModel):
|
||||||
|
msg = f'用户信息请求错误:\n{user_info.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
self._user_info = user_info
|
||||||
|
await anti_duplicate_add(
|
||||||
|
TETRIOHistoricalData,
|
||||||
|
TETRIOHistoricalData(
|
||||||
|
user_unique_identifier=(await self.user).unique_identifier,
|
||||||
|
api_type='User Info',
|
||||||
|
data=user_info,
|
||||||
|
update_time=user_info.cache.cached_at,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return self._user_info
|
||||||
|
|
||||||
|
async def get_records(self) -> UserRecordsSuccess:
|
||||||
|
"""Get User Records"""
|
||||||
|
if self._user_records is None:
|
||||||
|
raw_user_records = await Cache.get(
|
||||||
|
splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}/', 'records'])
|
||||||
|
)
|
||||||
|
user_records: UserRecords = type_validate_json(UserRecords, raw_user_records) # type: ignore[arg-type]
|
||||||
|
if isinstance(user_records, FailedModel):
|
||||||
|
msg = f'用户Solo数据请求错误:\n{user_records.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
self._user_records = user_records
|
||||||
|
await anti_duplicate_add(
|
||||||
|
TETRIOHistoricalData,
|
||||||
|
TETRIOHistoricalData(
|
||||||
|
user_unique_identifier=(await self.user).unique_identifier,
|
||||||
|
api_type='User Records',
|
||||||
|
data=user_records,
|
||||||
|
update_time=user_records.cache.cached_at,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return self._user_records
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ..typing import Rank
|
||||||
|
from .base import FailedModel
|
||||||
|
from .base import SuccessModel as BaseSuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class _User(BaseModel):
|
||||||
|
id: str = Field(..., alias='_id')
|
||||||
|
username: str
|
||||||
|
role: str
|
||||||
|
xp: float
|
||||||
|
supporter: bool
|
||||||
|
verified: bool
|
||||||
|
country: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class _League(BaseModel):
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
rating: float
|
||||||
|
rank: Rank
|
||||||
|
bestrank: Rank
|
||||||
|
decaying: bool
|
||||||
|
|
||||||
|
|
||||||
|
class ValidLeague(_League):
|
||||||
|
glicko: float
|
||||||
|
rd: float
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float
|
||||||
|
|
||||||
|
|
||||||
|
class ValidUser(_User):
|
||||||
|
league: ValidLeague
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidLeague(_League):
|
||||||
|
glicko: float | None = None
|
||||||
|
rd: float | None = None
|
||||||
|
apm: float | None = None
|
||||||
|
pps: float | None = None
|
||||||
|
vs: float | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidUser(_User):
|
||||||
|
league: InvalidLeague
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
users: list[ValidUser | InvalidUser]
|
||||||
|
|
||||||
|
|
||||||
|
class TetraLeagueSuccess(BaseSuccessModel):
|
||||||
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
TetraLeague = TetraLeagueSuccess | FailedModel
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
from ....schemas import BaseUser
|
||||||
|
from ...constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
class User(BaseUser):
|
||||||
|
platform: Literal['IO'] = GAME_TYPE
|
||||||
|
|
||||||
|
ID: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def unique_identifier(self) -> str:
|
||||||
|
return self.ID
|
||||||
@@ -0,0 +1,133 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from ..typing import Rank
|
||||||
|
from .base import FailedModel
|
||||||
|
from .base import SuccessModel as BaseSuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class Badge(BaseModel):
|
||||||
|
id: str
|
||||||
|
label: str
|
||||||
|
group: str | None = None
|
||||||
|
ts: datetime | Literal[False] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class MetaLeague(BaseModel):
|
||||||
|
decaying: bool
|
||||||
|
|
||||||
|
|
||||||
|
class NeverPlayedLeague(MetaLeague):
|
||||||
|
gamesplayed: Literal[0]
|
||||||
|
gameswon: Literal[0]
|
||||||
|
rating: Literal[-1]
|
||||||
|
rank: Literal['z']
|
||||||
|
standing: Literal[-1]
|
||||||
|
standing_local: Literal[-1]
|
||||||
|
next_rank: None
|
||||||
|
prev_rank: None
|
||||||
|
next_at: Literal[-1]
|
||||||
|
prev_at: Literal[-1]
|
||||||
|
percentile: Literal[-1]
|
||||||
|
percentile_rank: Literal['z']
|
||||||
|
apm: None = None
|
||||||
|
pps: None = None
|
||||||
|
vs: None = None
|
||||||
|
|
||||||
|
|
||||||
|
class NeverRatedLeague(MetaLeague):
|
||||||
|
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
||||||
|
gameswon: int
|
||||||
|
rating: Literal[-1]
|
||||||
|
rank: Literal['z']
|
||||||
|
standing: Literal[-1]
|
||||||
|
standing_local: Literal[-1]
|
||||||
|
next_rank: None
|
||||||
|
prev_rank: None
|
||||||
|
next_at: Literal[-1]
|
||||||
|
prev_at: Literal[-1]
|
||||||
|
percentile: Literal[-1]
|
||||||
|
percentile_rank: Literal['z']
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class RatedLeague(MetaLeague):
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
rating: float
|
||||||
|
rank: Rank
|
||||||
|
bestrank: Rank
|
||||||
|
standing: int
|
||||||
|
standing_local: int
|
||||||
|
next_rank: Rank | None = None
|
||||||
|
prev_rank: Rank | None = None
|
||||||
|
next_at: int
|
||||||
|
prev_at: int
|
||||||
|
percentile: float
|
||||||
|
percentile_rank: str
|
||||||
|
glicko: float
|
||||||
|
rd: float
|
||||||
|
apm: float
|
||||||
|
pps: float
|
||||||
|
vs: float | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Discord(BaseModel):
|
||||||
|
id: str
|
||||||
|
username: str
|
||||||
|
|
||||||
|
|
||||||
|
class Connections(BaseModel):
|
||||||
|
discord: Discord | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Distinguishment(BaseModel):
|
||||||
|
type: str
|
||||||
|
|
||||||
|
|
||||||
|
class User(BaseModel):
|
||||||
|
id: str = Field(..., alias='_id')
|
||||||
|
username: str
|
||||||
|
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
|
||||||
|
ts: datetime | None = None
|
||||||
|
botmaster: str | None = None
|
||||||
|
badges: list[Badge]
|
||||||
|
xp: float
|
||||||
|
gamesplayed: int
|
||||||
|
gameswon: int
|
||||||
|
gametime: float
|
||||||
|
country: str | None = None
|
||||||
|
badstanding: bool | None = None
|
||||||
|
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
|
||||||
|
supporter_tier: int
|
||||||
|
verified: bool
|
||||||
|
league: NeverPlayedLeague | NeverRatedLeague | RatedLeague
|
||||||
|
avatar_revision: int | None = None
|
||||||
|
"""This user's avatar ID. Get their avatar at
|
||||||
|
|
||||||
|
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
|
||||||
|
banner_revision: int | None = None
|
||||||
|
"""This user's banner ID. Get their banner at
|
||||||
|
|
||||||
|
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
|
||||||
|
|
||||||
|
Ignore this field if the user is not a supporter."""
|
||||||
|
bio: str | None = None
|
||||||
|
connections: Connections
|
||||||
|
friend_count: int | None = None
|
||||||
|
distinguishment: Distinguishment | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
user: User
|
||||||
|
|
||||||
|
|
||||||
|
class UserInfoSuccess(BaseSuccessModel):
|
||||||
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
UserInfo = UserInfoSuccess | FailedModel
|
||||||
@@ -0,0 +1,122 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from .....utils.typing import Number
|
||||||
|
from .base import FailedModel
|
||||||
|
from .base import SuccessModel as BaseSuccessModel
|
||||||
|
|
||||||
|
|
||||||
|
class Time(BaseModel):
|
||||||
|
start: int
|
||||||
|
zero: bool
|
||||||
|
locked: bool
|
||||||
|
prev: int
|
||||||
|
frameoffset: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Clears(BaseModel):
|
||||||
|
singles: int
|
||||||
|
doubles: int
|
||||||
|
triples: int
|
||||||
|
quads: int
|
||||||
|
pentas: int | None = None
|
||||||
|
realtspins: int
|
||||||
|
minitspins: int
|
||||||
|
minitspinsingles: int
|
||||||
|
tspinsingles: int
|
||||||
|
minitspindoubles: int
|
||||||
|
tspindoubles: int
|
||||||
|
tspintriples: int
|
||||||
|
tspinquads: int
|
||||||
|
allclear: int
|
||||||
|
|
||||||
|
|
||||||
|
class Garbage(BaseModel):
|
||||||
|
sent: int
|
||||||
|
received: int
|
||||||
|
attack: int | None = None
|
||||||
|
cleared: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Finesse(BaseModel):
|
||||||
|
combo: int
|
||||||
|
faults: int
|
||||||
|
perfectpieces: int
|
||||||
|
|
||||||
|
|
||||||
|
class EndContext(BaseModel):
|
||||||
|
seed: Number
|
||||||
|
lines: int
|
||||||
|
level_lines: int
|
||||||
|
level_lines_needed: int
|
||||||
|
inputs: int
|
||||||
|
holds: int | None = None
|
||||||
|
time: Time
|
||||||
|
score: int
|
||||||
|
zenlevel: int | None = None
|
||||||
|
zenprogress: int | None = None
|
||||||
|
level: int
|
||||||
|
combo: int
|
||||||
|
currentcombopower: int | None = None # WTF
|
||||||
|
topcombo: int
|
||||||
|
btb: int
|
||||||
|
topbtb: int
|
||||||
|
currentbtbchainpower: int | None = None # WTF * 2
|
||||||
|
tspins: int
|
||||||
|
piecesplaced: int
|
||||||
|
clears: Clears
|
||||||
|
garbage: Garbage
|
||||||
|
kills: int
|
||||||
|
finesse: Finesse
|
||||||
|
final_time: float = Field(..., alias='finalTime')
|
||||||
|
gametype: str
|
||||||
|
|
||||||
|
|
||||||
|
class _User(BaseModel):
|
||||||
|
id: str = Field(..., alias='_id')
|
||||||
|
username: str
|
||||||
|
|
||||||
|
|
||||||
|
class _Record(BaseModel):
|
||||||
|
id: str = Field(..., alias='_id')
|
||||||
|
stream: str
|
||||||
|
replayid: str
|
||||||
|
user: _User
|
||||||
|
ts: datetime
|
||||||
|
ismulti: bool | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class SoloRecord(_Record):
|
||||||
|
endcontext: EndContext
|
||||||
|
|
||||||
|
|
||||||
|
class MultiRecord(_Record):
|
||||||
|
endcontext: list[EndContext]
|
||||||
|
|
||||||
|
|
||||||
|
class SoloModeRecord(BaseModel):
|
||||||
|
record: SoloRecord
|
||||||
|
rank: int | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Records(BaseModel):
|
||||||
|
sprint: SoloModeRecord = Field(..., alias='40l')
|
||||||
|
blitz: SoloModeRecord
|
||||||
|
|
||||||
|
|
||||||
|
class Zen(BaseModel):
|
||||||
|
level: int
|
||||||
|
score: int
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
records: Records
|
||||||
|
zen: Zen
|
||||||
|
|
||||||
|
|
||||||
|
class UserRecordsSuccess(BaseSuccessModel):
|
||||||
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
UserRecords = UserRecordsSuccess | FailedModel
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
from typing import Literal, NamedTuple, overload
|
||||||
|
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
|
||||||
|
from ....utils.exception import RequestError
|
||||||
|
from ....utils.request import splice_url
|
||||||
|
from ..constant import BASE_URL
|
||||||
|
from .cache import Cache
|
||||||
|
from .schemas.base import FailedModel
|
||||||
|
from .schemas.tetra_league import TetraLeague, TetraLeagueSuccess
|
||||||
|
|
||||||
|
|
||||||
|
class FullExport(NamedTuple):
|
||||||
|
model: TetraLeagueSuccess
|
||||||
|
original: bytes
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def full_export(*, with_original: Literal[False]) -> TetraLeagueSuccess: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def full_export(*, with_original: Literal[True]) -> FullExport: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def full_export(*, with_original: bool) -> TetraLeagueSuccess | FullExport:
|
||||||
|
full: TetraLeague = type_validate_json(
|
||||||
|
TetraLeague, # type: ignore[arg-type]
|
||||||
|
(data := await Cache.get(splice_url([BASE_URL, 'users/lists/league/all']))),
|
||||||
|
)
|
||||||
|
if isinstance(full, FailedModel):
|
||||||
|
msg = f'排行榜数据请求错误:\n{full.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
if with_original:
|
||||||
|
return FullExport(full, data)
|
||||||
|
return full
|
||||||
@@ -0,0 +1,64 @@
|
|||||||
|
from hashlib import md5
|
||||||
|
from urllib.parse import urlunparse
|
||||||
|
|
||||||
|
from nonebot.adapters import Bot, Event
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_userinfo import BotUserInfo, UserInfo # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import BindStatus, create_or_update_bind, trigger
|
||||||
|
from ...utils.avatar import get_avatar
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.render import Bind, render
|
||||||
|
from ...utils.render.schemas.base import Avatar, People
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
|
from . import alc
|
||||||
|
from .api import Player
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('bind')
|
||||||
|
async def _(bot: Bot, event: Event, account: Player, event_session: EventSession, bot_info: UserInfo = BotUserInfo()): # noqa: B008
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='bind',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
user = await account.user
|
||||||
|
async with get_session() as session:
|
||||||
|
bind_status = await create_or_update_bind(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=event.get_user_id(),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
game_account=user.unique_identifier,
|
||||||
|
)
|
||||||
|
user_info = await account.get_info()
|
||||||
|
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'binding',
|
||||||
|
Bind(
|
||||||
|
platform='TETR.IO',
|
||||||
|
status='unknown',
|
||||||
|
user=People(
|
||||||
|
avatar=f'https://tetr.io/user-content/avatars/{user_info.data.user.id}.jpg?rv={user_info.data.user.avatar_revision}'
|
||||||
|
if user_info.data.user.avatar_revision is not None
|
||||||
|
else Avatar(type='identicon', hash=md5(user_info.data.user.id.encode()).hexdigest()), # noqa: S324
|
||||||
|
name=user_info.data.user.username.upper(),
|
||||||
|
),
|
||||||
|
bot=People(
|
||||||
|
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||||
|
name=bot_info.user_name,
|
||||||
|
),
|
||||||
|
command='io查我',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
await UniMessage.image(
|
||||||
|
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||||
|
).finish()
|
||||||
@@ -1,8 +1,12 @@
|
|||||||
from ...utils.typing import GameType
|
from re import compile
|
||||||
from .typing import Rank
|
from typing import Literal
|
||||||
|
|
||||||
|
from .api.typing import Rank
|
||||||
|
|
||||||
|
GAME_TYPE: Literal['IO'] = 'IO'
|
||||||
|
|
||||||
GAME_TYPE: GameType = 'IO'
|
|
||||||
BASE_URL = 'https://ch.tetr.io/api/'
|
BASE_URL = 'https://ch.tetr.io/api/'
|
||||||
|
|
||||||
RANK_PERCENTILE: dict[Rank, float] = {
|
RANK_PERCENTILE: dict[Rank, float] = {
|
||||||
'x': 1,
|
'x': 1,
|
||||||
'u': 5,
|
'u': 5,
|
||||||
@@ -22,3 +26,9 @@ RANK_PERCENTILE: dict[Rank, float] = {
|
|||||||
'd+': 97.5,
|
'd+': 97.5,
|
||||||
'd': 100,
|
'd': 100,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TR_MIN = 0
|
||||||
|
TR_MAX = 25000
|
||||||
|
|
||||||
|
USER_ID = compile(r'^[a-f0-9]{24}$')
|
||||||
|
USER_NAME = compile(r'^[a-zA-Z0-9_-]{3,16}$')
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
from datetime import UTC, datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from nonebot_plugin_orm import Model
|
from nonebot_plugin_orm import Model
|
||||||
from sqlalchemy import JSON, DateTime, String
|
from sqlalchemy import JSON, DateTime, String
|
||||||
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||||
|
|
||||||
from .typing import Rank
|
from .api.typing import Rank
|
||||||
|
|
||||||
|
|
||||||
class IORank(MappedAsDataclass, Model):
|
class IORank(MappedAsDataclass, Model):
|
||||||
@@ -21,9 +21,8 @@ class IORank(MappedAsDataclass, Model):
|
|||||||
high_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
high_pps: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||||
high_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
high_apm: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||||
high_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
high_vs: Mapped[tuple[dict[str, str], float]] = mapped_column(JSON)
|
||||||
create_time: Mapped[datetime] = mapped_column(
|
update_time: Mapped[datetime] = mapped_column(
|
||||||
DateTime,
|
DateTime,
|
||||||
default=lambda: datetime.now(tz=UTC),
|
|
||||||
index=True,
|
index=True,
|
||||||
init=False,
|
|
||||||
)
|
)
|
||||||
|
file_hash: Mapped[str | None] = mapped_column(String(128), index=True)
|
||||||
|
|||||||
@@ -1,254 +0,0 @@
|
|||||||
from collections import defaultdict
|
|
||||||
from collections.abc import Callable
|
|
||||||
from dataclasses import asdict, dataclass
|
|
||||||
from datetime import UTC, datetime, timedelta
|
|
||||||
from math import floor
|
|
||||||
from re import match
|
|
||||||
from statistics import mean
|
|
||||||
|
|
||||||
from nonebot import get_driver
|
|
||||||
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
|
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
from pydantic import parse_raw_as
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from ...db import create_or_update_bind
|
|
||||||
from ...utils.exception import MessageFormatError, RequestError, WhatTheFuckError
|
|
||||||
from ...utils.request import Request, splice_url
|
|
||||||
from ...utils.typing import GameType
|
|
||||||
from .. import ProcessedData as ProcessedDataMeta
|
|
||||||
from .. import Processor as ProcessorMeta
|
|
||||||
from .. import RawResponse as RawResponseMeta
|
|
||||||
from .. import User as UserMeta
|
|
||||||
from .constant import BASE_URL, GAME_TYPE, RANK_PERCENTILE
|
|
||||||
from .model import IORank
|
|
||||||
from .schemas.league_all import FailedModel as LeagueAllFailed
|
|
||||||
from .schemas.league_all import LeagueAll
|
|
||||||
from .schemas.league_all import ValidUser as LeagueAllUser
|
|
||||||
from .schemas.user_info import FailedModel as InfoFailed
|
|
||||||
from .schemas.user_info import (
|
|
||||||
NeverPlayedLeague,
|
|
||||||
NeverRatedLeague,
|
|
||||||
UserInfo,
|
|
||||||
)
|
|
||||||
from .schemas.user_info import SuccessModel as InfoSuccess
|
|
||||||
from .schemas.user_records import FailedModel as RecordsFailed
|
|
||||||
from .schemas.user_records import SoloRecord, UserRecords
|
|
||||||
from .schemas.user_records import SuccessModel as RecordsSuccess
|
|
||||||
from .typing import Rank
|
|
||||||
|
|
||||||
driver = get_driver()
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class User(UserMeta):
|
|
||||||
ID: str | None = None
|
|
||||||
name: str | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RawResponse(RawResponseMeta):
|
|
||||||
user_info: bytes | None = None
|
|
||||||
user_records: bytes | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ProcessedData(ProcessedDataMeta):
|
|
||||||
user_info: InfoSuccess | None = None
|
|
||||||
user_records: RecordsSuccess | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
|
||||||
if match(r'^[a-f0-9]{24}$', info):
|
|
||||||
return User(ID=info)
|
|
||||||
if match(r'^[a-zA-Z0-9_-]{3,16}$', info):
|
|
||||||
return User(name=info.lower())
|
|
||||||
return MessageFormatError('用户名/ID不合法')
|
|
||||||
|
|
||||||
|
|
||||||
class Processor(ProcessorMeta):
|
|
||||||
user: User
|
|
||||||
raw_response: RawResponse
|
|
||||||
processed_data: ProcessedData
|
|
||||||
|
|
||||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
|
||||||
super().__init__(event_id, user, command_args)
|
|
||||||
self.raw_response = RawResponse()
|
|
||||||
self.processed_data = ProcessedData()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def game_platform(self) -> GameType:
|
|
||||||
return GAME_TYPE
|
|
||||||
|
|
||||||
async def handle_bind(self, platform: str, account: str) -> str:
|
|
||||||
"""处理绑定消息"""
|
|
||||||
self.command_type = 'bind'
|
|
||||||
await self.get_user()
|
|
||||||
if self.user.ID is None:
|
|
||||||
raise # FIXME: 不知道怎么才能把这类型给变过来了
|
|
||||||
async with get_session() as session:
|
|
||||||
return await create_or_update_bind(
|
|
||||||
session=session,
|
|
||||||
chat_platform=platform,
|
|
||||||
chat_account=account,
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
game_account=self.user.ID,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def handle_query(self) -> str:
|
|
||||||
"""处理查询消息"""
|
|
||||||
self.command_type = 'query'
|
|
||||||
await self.get_user()
|
|
||||||
return await self.generate_message()
|
|
||||||
|
|
||||||
async def get_user(self) -> None:
|
|
||||||
"""
|
|
||||||
用于获取 UserName 和 UserID 的函数
|
|
||||||
"""
|
|
||||||
if self.user.name is None:
|
|
||||||
self.user.name = (await self.get_user_info()).data.user.username
|
|
||||||
if self.user.ID is None:
|
|
||||||
self.user.ID = (await self.get_user_info()).data.user.id
|
|
||||||
|
|
||||||
async def get_user_info(self) -> InfoSuccess:
|
|
||||||
"""获取用户数据"""
|
|
||||||
if self.processed_data.user_info is None:
|
|
||||||
self.raw_response.user_info = await Request.request(
|
|
||||||
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}'])
|
|
||||||
)
|
|
||||||
user_info: UserInfo = parse_raw_as(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
|
|
||||||
if isinstance(user_info, InfoFailed):
|
|
||||||
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
|
|
||||||
self.processed_data.user_info = user_info
|
|
||||||
return self.processed_data.user_info
|
|
||||||
|
|
||||||
async def get_user_records(self) -> RecordsSuccess:
|
|
||||||
"""获取Solo数据"""
|
|
||||||
if self.processed_data.user_records is None:
|
|
||||||
self.raw_response.user_records = await Request.request(
|
|
||||||
splice_url(
|
|
||||||
[
|
|
||||||
BASE_URL,
|
|
||||||
'users/',
|
|
||||||
f'{self.user.ID or self.user.name}/',
|
|
||||||
'records',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
user_records: UserRecords = parse_raw_as(
|
|
||||||
UserRecords, # type: ignore[arg-type]
|
|
||||||
self.raw_response.user_records,
|
|
||||||
)
|
|
||||||
if isinstance(user_records, RecordsFailed):
|
|
||||||
raise RequestError(f'用户Solo数据请求错误:\n{user_records.error}')
|
|
||||||
self.processed_data.user_records = user_records
|
|
||||||
return self.processed_data.user_records
|
|
||||||
|
|
||||||
async def generate_message(self) -> str:
|
|
||||||
"""生成消息"""
|
|
||||||
user_info = await self.get_user_info()
|
|
||||||
user_name = user_info.data.user.username.upper()
|
|
||||||
league = user_info.data.user.league
|
|
||||||
ret_message = ''
|
|
||||||
if isinstance(league, NeverPlayedLeague):
|
|
||||||
ret_message += f'用户 {user_name} 没有排位统计数据'
|
|
||||||
else:
|
|
||||||
if isinstance(league, NeverRatedLeague):
|
|
||||||
ret_message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
|
|
||||||
elif league.rank == 'z':
|
|
||||||
ret_message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
|
|
||||||
else:
|
|
||||||
ret_message += (
|
|
||||||
f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
|
|
||||||
)
|
|
||||||
ret_message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
|
|
||||||
lpm = league.pps * 24
|
|
||||||
ret_message += f"\nL'PM: {round(lpm, 2)} ( {league.pps} pps )"
|
|
||||||
ret_message += f'\nAPM: {league.apm} ( x{round(league.apm/(league.pps*24),2)} )'
|
|
||||||
if league.vs is not None:
|
|
||||||
adpm = league.vs * 0.6
|
|
||||||
ret_message += f'\nADPM: {round(adpm,2)} ( x{round(adpm/lpm,2)} ) ( {league.vs}vs )'
|
|
||||||
user_records = await self.get_user_records()
|
|
||||||
sprint = user_records.data.records.sprint
|
|
||||||
if sprint.record is not None:
|
|
||||||
if not isinstance(sprint.record, SoloRecord):
|
|
||||||
raise WhatTheFuckError('40L记录不是单人记录')
|
|
||||||
ret_message += f'\n40L: {round(sprint.record.endcontext.final_time/1000,2)}s'
|
|
||||||
ret_message += f' ( #{sprint.rank} )' if sprint.rank is not None else ''
|
|
||||||
blitz = user_records.data.records.blitz
|
|
||||||
if blitz.record is not None:
|
|
||||||
if not isinstance(blitz.record, SoloRecord):
|
|
||||||
raise WhatTheFuckError('Blitz记录不是单人记录')
|
|
||||||
ret_message += f'\nBlitz: {blitz.record.endcontext.score}'
|
|
||||||
ret_message += f' ( #{blitz.rank} )' if blitz.rank is not None else ''
|
|
||||||
return ret_message
|
|
||||||
|
|
||||||
|
|
||||||
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
|
|
||||||
async def get_io_rank_data() -> None:
|
|
||||||
league_all: LeagueAll = parse_raw_as(
|
|
||||||
LeagueAll, # type: ignore[arg-type]
|
|
||||||
await Request.request(splice_url([BASE_URL, 'users/lists/league/all'])),
|
|
||||||
)
|
|
||||||
if isinstance(league_all, LeagueAllFailed):
|
|
||||||
raise RequestError(f'用户Solo数据请求错误:\n{league_all.error}')
|
|
||||||
|
|
||||||
def pps(user: LeagueAllUser) -> float:
|
|
||||||
return user.league.pps
|
|
||||||
|
|
||||||
def apm(user: LeagueAllUser) -> float:
|
|
||||||
return user.league.apm
|
|
||||||
|
|
||||||
def vs(user: LeagueAllUser) -> float:
|
|
||||||
return user.league.vs
|
|
||||||
|
|
||||||
def _min(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
|
|
||||||
return min(users, key=field)
|
|
||||||
|
|
||||||
def _max(users: list[LeagueAllUser], field: Callable[[LeagueAllUser], float]) -> LeagueAllUser:
|
|
||||||
return max(users, key=field)
|
|
||||||
|
|
||||||
def build_extremes_data(
|
|
||||||
users: list[LeagueAllUser],
|
|
||||||
field: Callable[[LeagueAllUser], float],
|
|
||||||
sort: Callable[[list[LeagueAllUser], Callable[[LeagueAllUser], float]], LeagueAllUser],
|
|
||||||
) -> tuple[dict[str, str], float]:
|
|
||||||
user = sort(users, field)
|
|
||||||
return asdict(User(ID=user.id, name=user.username)), field(user)
|
|
||||||
|
|
||||||
users = [i for i in league_all.data.users if isinstance(i, LeagueAllUser)]
|
|
||||||
rank_to_users: defaultdict[Rank, list[LeagueAllUser]] = defaultdict(list)
|
|
||||||
for i in users:
|
|
||||||
rank_to_users[i.league.rank].append(i)
|
|
||||||
rank_info: list[IORank] = []
|
|
||||||
for rank, percentile in RANK_PERCENTILE.items():
|
|
||||||
offset = floor((percentile / 100) * len(users)) - 1
|
|
||||||
tr_line = users[offset].league.rating
|
|
||||||
rank_users = rank_to_users[rank]
|
|
||||||
rank_info.append(
|
|
||||||
IORank(
|
|
||||||
rank=rank,
|
|
||||||
tr_line=tr_line,
|
|
||||||
player_count=len(rank_users),
|
|
||||||
low_pps=(build_extremes_data(rank_users, pps, _min)),
|
|
||||||
low_apm=(build_extremes_data(rank_users, apm, _min)),
|
|
||||||
low_vs=(build_extremes_data(rank_users, vs, _min)),
|
|
||||||
avg_pps=mean({i.league.pps for i in rank_users}),
|
|
||||||
avg_apm=mean({i.league.apm for i in rank_users}),
|
|
||||||
avg_vs=mean({i.league.vs for i in rank_users}),
|
|
||||||
high_pps=(build_extremes_data(rank_users, pps, _max)),
|
|
||||||
high_apm=(build_extremes_data(rank_users, apm, _max)),
|
|
||||||
high_vs=(build_extremes_data(rank_users, vs, _max)),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
async with get_session() as session:
|
|
||||||
session.add_all(rank_info)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
@driver.on_startup
|
|
||||||
async def check_rank_data() -> None:
|
|
||||||
async with get_session() as session:
|
|
||||||
latest_time = await session.scalar(select(IORank.create_time).order_by(IORank.id.desc()).limit(1))
|
|
||||||
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
|
|
||||||
await get_io_rank_data()
|
|
||||||
@@ -0,0 +1,388 @@
|
|||||||
|
import contextlib
|
||||||
|
from asyncio import gather
|
||||||
|
from collections import defaultdict
|
||||||
|
from datetime import date, datetime, timedelta, timezone
|
||||||
|
from hashlib import md5
|
||||||
|
from math import ceil, floor
|
||||||
|
from typing import ClassVar
|
||||||
|
from urllib.parse import urlunparse
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from aiofiles import open
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot.adapters import Bot, Event
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
from nonebot.matcher import Matcher
|
||||||
|
from nonebot_plugin_alconna import At
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_localstore import get_data_file # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from sqlalchemy import select
|
||||||
|
from zstandard import ZstdDecompressor
|
||||||
|
|
||||||
|
from ...db import query_bind_info, trigger
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.render import TETRIOInfo, render
|
||||||
|
from ...utils.render.schemas.base import Avatar
|
||||||
|
from ...utils.render.schemas.tetrio_info import Data, Radar, Ranking, TetraLeague, TetraLeagueHistory
|
||||||
|
from ...utils.render.schemas.tetrio_info import User as TemplateUser
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
|
from ...utils.typing import Me, Number
|
||||||
|
from ..constant import CANT_VERIFY_MESSAGE
|
||||||
|
from . import alc
|
||||||
|
from .api import Player, User, UserInfoSuccess
|
||||||
|
from .api.models import TETRIOHistoricalData
|
||||||
|
from .api.schemas.tetra_league import TetraLeagueSuccess
|
||||||
|
from .api.schemas.user_info import NeverPlayedLeague, NeverRatedLeague, RatedLeague
|
||||||
|
from .api.schemas.user_records import SoloModeRecord, SoloRecord
|
||||||
|
from .constant import GAME_TYPE, TR_MAX, TR_MIN
|
||||||
|
from .model import IORank
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
bind = await query_bind_info(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
)
|
||||||
|
if bind is None:
|
||||||
|
await matcher.finish('未查询到绑定信息')
|
||||||
|
message = UniMessage(CANT_VERIFY_MESSAGE)
|
||||||
|
player = Player(user_id=bind.game_account, trust=True)
|
||||||
|
user, user_info, user_records = await gather(player.user, player.get_info(), player.get_records())
|
||||||
|
sprint = user_records.data.records.sprint
|
||||||
|
blitz = user_records.data.records.blitz
|
||||||
|
# with contextlib.suppress(TypeError):
|
||||||
|
message += UniMessage.image(raw=await make_query_image(user, user_info, sprint.record, blitz.record))
|
||||||
|
await message.finish()
|
||||||
|
message += make_query_text(user_info, sprint, blitz)
|
||||||
|
await message.finish()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(account: Player, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
user, user_info, user_records = await gather(account.user, account.get_info(), account.get_records())
|
||||||
|
sprint = user_records.data.records.sprint
|
||||||
|
blitz = user_records.data.records.blitz
|
||||||
|
with contextlib.suppress(TypeError):
|
||||||
|
await UniMessage.image(raw=await make_query_image(user, user_info, sprint.record, blitz.record)).finish()
|
||||||
|
await make_query_text(user_info, sprint, blitz).finish()
|
||||||
|
|
||||||
|
|
||||||
|
def get_value_bounds(values: list[int | float]) -> tuple[int, int]:
|
||||||
|
value_max = 10 * ceil(max(values) / 10)
|
||||||
|
value_min = 10 * floor(min(values) / 10)
|
||||||
|
return value_max, value_min
|
||||||
|
|
||||||
|
|
||||||
|
def get_split(value_max: int, value_min: int) -> tuple[int, int]:
|
||||||
|
offset = 0
|
||||||
|
overflow = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if (new_max_value := value_max + offset + overflow) > TR_MAX:
|
||||||
|
overflow -= 1
|
||||||
|
continue
|
||||||
|
if (new_min_value := value_min - offset + overflow) < TR_MIN:
|
||||||
|
overflow += 1
|
||||||
|
continue
|
||||||
|
if ((new_max_value - new_min_value) / 40).is_integer():
|
||||||
|
split_value = int((value_max + offset - (value_min - offset)) / 4)
|
||||||
|
break
|
||||||
|
offset += 1
|
||||||
|
return split_value, offset + overflow
|
||||||
|
|
||||||
|
|
||||||
|
def get_specified_point(
|
||||||
|
previous_point: Data,
|
||||||
|
behind_point: Data,
|
||||||
|
point_time: datetime,
|
||||||
|
) -> Data:
|
||||||
|
"""根据给出的 previous_point 和 behind_point, 推算 point_time 点处的数据
|
||||||
|
|
||||||
|
Args:
|
||||||
|
previous_point (Data): 前面的数据点
|
||||||
|
behind_point (Data): 后面的数据点
|
||||||
|
point_time (datetime): 要推算的点的位置
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Data: 要推算的点的数据
|
||||||
|
"""
|
||||||
|
# 求两个点的斜率
|
||||||
|
slope = (behind_point.tr - previous_point.tr) / (
|
||||||
|
datetime.timestamp(behind_point.record_at) - datetime.timestamp(previous_point.record_at)
|
||||||
|
)
|
||||||
|
return Data(
|
||||||
|
record_at=point_time,
|
||||||
|
tr=previous_point.tr + slope * (datetime.timestamp(point_time) - datetime.timestamp(previous_point.record_at)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def query_historical_data(user: User, user_info: UserInfoSuccess) -> list[Data]:
|
||||||
|
today = datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
forward = timedelta(days=9)
|
||||||
|
start_time = (today - forward).astimezone(UTC)
|
||||||
|
async with get_session() as session:
|
||||||
|
historical_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOHistoricalData)
|
||||||
|
.where(TETRIOHistoricalData.update_time >= start_time)
|
||||||
|
.where(TETRIOHistoricalData.user_unique_identifier == user.unique_identifier)
|
||||||
|
.where(TETRIOHistoricalData.api_type == 'User Info')
|
||||||
|
)
|
||||||
|
).all()
|
||||||
|
if historical_data:
|
||||||
|
extra = (
|
||||||
|
await session.scalars(
|
||||||
|
select(TETRIOHistoricalData)
|
||||||
|
.where(TETRIOHistoricalData.user_unique_identifier == user.unique_identifier)
|
||||||
|
.where(TETRIOHistoricalData.api_type == 'User Info')
|
||||||
|
.order_by(TETRIOHistoricalData.id.desc())
|
||||||
|
.where(TETRIOHistoricalData.id < min([i.id for i in historical_data]))
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
).one_or_none()
|
||||||
|
if extra is not None:
|
||||||
|
historical_data = list(historical_data)
|
||||||
|
historical_data.append(extra)
|
||||||
|
full_export_data = FullExport.get_data(user.unique_identifier)
|
||||||
|
if not historical_data and not full_export_data:
|
||||||
|
return [
|
||||||
|
Data(record_at=today - forward, tr=user_info.data.user.league.rating),
|
||||||
|
Data(record_at=today.replace(microsecond=1000), tr=user_info.data.user.league.rating),
|
||||||
|
]
|
||||||
|
histories = [
|
||||||
|
Data(
|
||||||
|
record_at=i.update_time.astimezone(ZoneInfo('Asia/Shanghai')),
|
||||||
|
tr=i.data.data.user.league.rating,
|
||||||
|
)
|
||||||
|
for i in historical_data
|
||||||
|
if isinstance(i.data, UserInfoSuccess) and isinstance(i.data.data.user.league, RatedLeague)
|
||||||
|
] + full_export_data
|
||||||
|
|
||||||
|
# 按照时间排序
|
||||||
|
histories = sorted(histories, key=lambda x: x.record_at)
|
||||||
|
for index, value in enumerate(histories):
|
||||||
|
# 在历史记录里找有没有今天0点后的数据, 并且至少要有两个数据点
|
||||||
|
if value.record_at > today and len(histories) >= 2: # noqa: PLR2004
|
||||||
|
histories = histories[:index] + [
|
||||||
|
get_specified_point(histories[index - 1], histories[index], today.replace(microsecond=1000))
|
||||||
|
]
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
histories.append(
|
||||||
|
get_specified_point(
|
||||||
|
histories[-1],
|
||||||
|
Data(record_at=user_info.cache.cached_at, tr=user_info.data.user.league.rating),
|
||||||
|
today.replace(microsecond=1000),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if histories[0].record_at < (today - forward):
|
||||||
|
histories[0] = get_specified_point(
|
||||||
|
histories[0],
|
||||||
|
histories[1],
|
||||||
|
today - forward,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
histories.insert(0, Data(record_at=today - forward, tr=histories[0].tr))
|
||||||
|
return histories
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_image(
|
||||||
|
user: User, user_info: UserInfoSuccess, sprint: SoloRecord | None, blitz: SoloRecord | None
|
||||||
|
) -> bytes:
|
||||||
|
league = user_info.data.user.league
|
||||||
|
if not isinstance(league, RatedLeague) or league.vs is None:
|
||||||
|
raise TypeError
|
||||||
|
user_name = user_info.data.user.username.upper()
|
||||||
|
histories = await query_historical_data(user, user_info)
|
||||||
|
value_max, value_min = get_value_bounds([i.tr for i in histories])
|
||||||
|
split_value, offset = get_split(value_max, value_min)
|
||||||
|
if sprint is not None:
|
||||||
|
duration = timedelta(milliseconds=sprint.endcontext.final_time).total_seconds()
|
||||||
|
sprint_value = f'{duration:.1f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.1f}s' # noqa: PLR2004
|
||||||
|
else:
|
||||||
|
sprint_value = 'N/A'
|
||||||
|
blitz_value = f'{blitz.endcontext.score:,}' if blitz is not None else 'N/A'
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'tetrio/info',
|
||||||
|
TETRIOInfo(
|
||||||
|
user=TemplateUser(
|
||||||
|
avatar=f'https://tetr.io/user-content/avatars/{user_info.data.user.id}.jpg?rv={user_info.data.user.avatar_revision}'
|
||||||
|
if user_info.data.user.avatar_revision is not None
|
||||||
|
else Avatar(
|
||||||
|
type='identicon',
|
||||||
|
hash=md5(user_info.data.user.id.encode()).hexdigest(), # noqa: S324
|
||||||
|
),
|
||||||
|
name=user_name,
|
||||||
|
bio=user_info.data.user.bio,
|
||||||
|
),
|
||||||
|
ranking=Ranking(
|
||||||
|
rating=round(league.glicko, 2),
|
||||||
|
rd=round(league.rd, 2),
|
||||||
|
),
|
||||||
|
tetra_league=TetraLeague(
|
||||||
|
rank=league.rank,
|
||||||
|
tr=round(league.rating, 2),
|
||||||
|
global_rank=league.standing,
|
||||||
|
pps=league.pps,
|
||||||
|
lpm=round(lpm := (league.pps * 24), 2),
|
||||||
|
apm=league.apm,
|
||||||
|
apl=round(league.apm / lpm, 2),
|
||||||
|
vs=league.vs,
|
||||||
|
adpm=round(adpm := (league.vs * 0.6), 2),
|
||||||
|
adpl=round(adpm / lpm, 2),
|
||||||
|
),
|
||||||
|
tetra_league_history=TetraLeagueHistory(
|
||||||
|
data=histories,
|
||||||
|
split_interval=split_value,
|
||||||
|
min_tr=value_min,
|
||||||
|
max_tr=value_max,
|
||||||
|
offset=offset,
|
||||||
|
),
|
||||||
|
radar=Radar(
|
||||||
|
app=(app := (league.apm / (60 * league.pps))),
|
||||||
|
dsps=(dsps := ((league.vs / 100) - (league.apm / 60))),
|
||||||
|
dspp=(dspp := (dsps / league.pps)),
|
||||||
|
ci=150 * dspp - 125 * app + 50 * (league.vs / league.apm) - 25,
|
||||||
|
ge=2 * ((app * dsps) / league.pps),
|
||||||
|
),
|
||||||
|
sprint=sprint_value,
|
||||||
|
blitz=blitz_value,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
return await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||||
|
|
||||||
|
|
||||||
|
def make_query_text(user_info: UserInfoSuccess, sprint: SoloModeRecord, blitz: SoloModeRecord) -> UniMessage:
|
||||||
|
league = user_info.data.user.league
|
||||||
|
user_name = user_info.data.user.username.upper()
|
||||||
|
message = ''
|
||||||
|
if isinstance(league, NeverPlayedLeague):
|
||||||
|
message += f'用户 {user_name} 没有排位统计数据'
|
||||||
|
else:
|
||||||
|
if isinstance(league, NeverRatedLeague):
|
||||||
|
message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
|
||||||
|
else:
|
||||||
|
if league.rank == 'z':
|
||||||
|
message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
|
||||||
|
else:
|
||||||
|
message += f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
|
||||||
|
message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
|
||||||
|
lpm = league.pps * 24
|
||||||
|
message += f"\nL'PM: {round(lpm, 2)} ( {league.pps} pps )"
|
||||||
|
message += f'\nAPM: {league.apm} ( x{round(league.apm/lpm,2)} )'
|
||||||
|
if league.vs is not None:
|
||||||
|
adpm = league.vs * 0.6
|
||||||
|
message += f'\nADPM: {round(adpm,2)} ( x{round(adpm/lpm,2)} ) ( {league.vs}vs )'
|
||||||
|
if sprint.record is not None:
|
||||||
|
message += f'\n40L: {round(sprint.record.endcontext.final_time/1000,2)}s'
|
||||||
|
message += f' ( #{sprint.rank} )' if sprint.rank is not None else ''
|
||||||
|
if blitz.record is not None:
|
||||||
|
message += f'\nBlitz: {blitz.record.endcontext.score}'
|
||||||
|
message += f' ( #{blitz.rank} )' if blitz.rank is not None else ''
|
||||||
|
return UniMessage(message)
|
||||||
|
|
||||||
|
|
||||||
|
class FullExport:
|
||||||
|
cache: ClassVar[defaultdict[str, set[tuple[datetime, Number]]]] = defaultdict(set)
|
||||||
|
latest_update: ClassVar[date | None] = None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def init(cls) -> None:
|
||||||
|
async with get_session() as session:
|
||||||
|
full_exports = (await session.scalars(select(IORank).where(IORank.update_time >= cls.start_time()))).all()
|
||||||
|
await gather(
|
||||||
|
*[
|
||||||
|
cls._load(update_time, file_hash)
|
||||||
|
for file_hash, update_time in {
|
||||||
|
i.file_hash: i.update_time for i in full_exports if i.file_hash is not None
|
||||||
|
}.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def update(cls) -> None:
|
||||||
|
if cls.latest_update == datetime.now(tz=ZoneInfo('Asia/Shanghai')).date():
|
||||||
|
return
|
||||||
|
start_time = cls.start_time()
|
||||||
|
for i in cls.cache:
|
||||||
|
cls.cache[i] = {j for j in cls.cache[i] if j[0] >= start_time}
|
||||||
|
latest_time = max(cls.cache)
|
||||||
|
async with get_session() as session:
|
||||||
|
full_exports = (await session.scalars(select(IORank).where(IORank.update_time > latest_time))).all()
|
||||||
|
await gather(
|
||||||
|
*[
|
||||||
|
cls._load(update_time, file_hash)
|
||||||
|
for file_hash, update_time in {
|
||||||
|
i.file_hash: i.update_time for i in full_exports if i.file_hash is not None
|
||||||
|
}.items()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
cls.latest_update = datetime.now(tz=ZoneInfo('Asia/Shanghai')).date()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_data(cls, unique_identifier: str) -> list[Data]:
|
||||||
|
return [Data(record_at=i[0], tr=i[1]) for i in cls.cache[unique_identifier]]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def start_time(cls) -> datetime:
|
||||||
|
return (
|
||||||
|
datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
- timedelta(days=9)
|
||||||
|
).astimezone(UTC)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def _load(cls, update_time: datetime, file_hash: str) -> None:
|
||||||
|
try:
|
||||||
|
users = type_validate_json(TetraLeagueSuccess, await cls.decompress(file_hash)).data.users
|
||||||
|
except FileNotFoundError:
|
||||||
|
await cls.clear_invalid(file_hash)
|
||||||
|
return
|
||||||
|
update_time = update_time.astimezone(ZoneInfo('Asia/Shanghai'))
|
||||||
|
for i in users:
|
||||||
|
cls.cache[i.id].add((update_time, i.league.rating))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def decompress(cls, file_hash: str) -> bytes:
|
||||||
|
async with open(get_data_file('nonebot_plugin_tetris_stats', f'{file_hash}.json.zst'), mode='rb') as file:
|
||||||
|
return ZstdDecompressor().decompress(await file.read())
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def clear_invalid(cls, file_hash: str) -> None:
|
||||||
|
async with get_session() as session:
|
||||||
|
full_exports = (await session.scalars(select(IORank).where(IORank.file_hash == file_hash))).all()
|
||||||
|
for i in full_exports:
|
||||||
|
i.file_hash = None
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@driver.on_startup
|
||||||
|
async def _():
|
||||||
|
await FullExport.init()
|
||||||
|
scheduler.add_job(FullExport.update, 'interval', hours=1)
|
||||||
@@ -0,0 +1,174 @@
|
|||||||
|
from collections import defaultdict
|
||||||
|
from collections.abc import Callable
|
||||||
|
from datetime import datetime, timedelta, timezone
|
||||||
|
from hashlib import sha512
|
||||||
|
from math import floor
|
||||||
|
from statistics import mean
|
||||||
|
from zoneinfo import ZoneInfo
|
||||||
|
|
||||||
|
from aiofiles import open
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot.compat import model_dump
|
||||||
|
from nonebot.matcher import Matcher
|
||||||
|
from nonebot.utils import run_sync
|
||||||
|
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_localstore import get_data_file # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from sqlalchemy import func, select
|
||||||
|
from zstandard import ZstdCompressor
|
||||||
|
|
||||||
|
from ...db import trigger
|
||||||
|
from ...utils.exception import RequestError
|
||||||
|
from ...utils.metrics import get_metrics
|
||||||
|
from ...utils.retry import retry
|
||||||
|
from . import alc
|
||||||
|
from .api.schemas.base import FailedModel
|
||||||
|
from .api.schemas.tetra_league import ValidUser
|
||||||
|
from .api.schemas.user import User
|
||||||
|
from .api.tetra_league import full_export
|
||||||
|
from .api.typing import Rank
|
||||||
|
from .constant import GAME_TYPE, RANK_PERCENTILE
|
||||||
|
from .model import IORank
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('rank')
|
||||||
|
async def _(matcher: Matcher, rank: Rank, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='rank',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
if rank == 'z':
|
||||||
|
await matcher.finish('暂不支持查询未知段位')
|
||||||
|
async with get_session() as session:
|
||||||
|
latest_data = (
|
||||||
|
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(1))
|
||||||
|
).one()
|
||||||
|
compare_data = (
|
||||||
|
await session.scalars(
|
||||||
|
select(IORank)
|
||||||
|
.where(IORank.rank == rank)
|
||||||
|
.order_by(
|
||||||
|
func.abs(
|
||||||
|
func.julianday(IORank.update_time)
|
||||||
|
- func.julianday(latest_data.update_time - timedelta(hours=24))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1)
|
||||||
|
)
|
||||||
|
).one()
|
||||||
|
message = ''
|
||||||
|
if (datetime.now(UTC) - latest_data.update_time.replace(tzinfo=UTC)) > timedelta(hours=7):
|
||||||
|
message += 'Warning: 数据超过7小时未更新, 请联系Bot主人查看后台\n'
|
||||||
|
message += f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
|
||||||
|
if compare_data.id != latest_data.id:
|
||||||
|
message += f'对比 {(latest_data.update_time-compare_data.update_time).total_seconds()/3600:.2f} 小时前趋势: {f"↑{difference:.2f}" if (difference:=latest_data.tr_line-compare_data.tr_line) > 0 else f"↓{-difference:.2f}" if difference < 0 else "→"}'
|
||||||
|
else:
|
||||||
|
message += '暂无对比数据'
|
||||||
|
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
|
||||||
|
low_pps = get_metrics(pps=latest_data.low_pps[1])
|
||||||
|
low_vs = get_metrics(vs=latest_data.low_vs[1])
|
||||||
|
max_pps = get_metrics(pps=latest_data.high_pps[1])
|
||||||
|
max_vs = get_metrics(vs=latest_data.high_vs[1])
|
||||||
|
message += (
|
||||||
|
'\n'
|
||||||
|
'平均数据:\n'
|
||||||
|
f"L'PM: {avg.lpm} ( {avg.pps} pps )\n"
|
||||||
|
f'APM: {avg.apm} ( x{avg.apl} )\n'
|
||||||
|
f'ADPM: {avg.adpm} ( x{avg.adpl} ) ( {avg.vs}vs )\n'
|
||||||
|
'\n'
|
||||||
|
'最低数据:\n'
|
||||||
|
f"L'PM: {low_pps.lpm} ( {low_pps.pps} pps ) By: {latest_data.low_pps[0]['name'].upper()}\n"
|
||||||
|
f'APM: {latest_data.low_apm[1]} By: {latest_data.low_apm[0]["name"].upper()}\n'
|
||||||
|
f'ADPM: {low_vs.adpm} ( {low_vs.vs}vs ) By: {latest_data.low_vs[0]["name"].upper()}\n'
|
||||||
|
'\n'
|
||||||
|
'最高数据:\n'
|
||||||
|
f"L'PM: {max_pps.lpm} ( {max_pps.pps} pps ) By: {latest_data.high_pps[0]['name'].upper()}\n"
|
||||||
|
f'APM: {latest_data.high_apm[1]} By: {latest_data.high_apm[0]["name"].upper()}\n'
|
||||||
|
f'ADPM: {max_vs.adpm} ( {max_vs.vs}vs ) By: {latest_data.high_vs[0]["name"].upper()}\n'
|
||||||
|
'\n'
|
||||||
|
f'数据更新时间: {latest_data.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")}'
|
||||||
|
)
|
||||||
|
await matcher.finish(message)
|
||||||
|
|
||||||
|
|
||||||
|
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
|
||||||
|
@retry(exception_type=RequestError, delay=timedelta(minutes=15))
|
||||||
|
async def get_tetra_league_data() -> None:
|
||||||
|
league, original = await full_export(with_original=True)
|
||||||
|
if isinstance(league, FailedModel):
|
||||||
|
msg = f'排行榜数据请求错误:\n{league.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
|
||||||
|
def pps(user: ValidUser) -> float:
|
||||||
|
return user.league.pps
|
||||||
|
|
||||||
|
def apm(user: ValidUser) -> float:
|
||||||
|
return user.league.apm
|
||||||
|
|
||||||
|
def vs(user: ValidUser) -> float:
|
||||||
|
return user.league.vs
|
||||||
|
|
||||||
|
def _min(users: list[ValidUser], field: Callable[[ValidUser], float]) -> ValidUser:
|
||||||
|
return min(users, key=field)
|
||||||
|
|
||||||
|
def _max(users: list[ValidUser], field: Callable[[ValidUser], float]) -> ValidUser:
|
||||||
|
return max(users, key=field)
|
||||||
|
|
||||||
|
def build_extremes_data(
|
||||||
|
users: list[ValidUser],
|
||||||
|
field: Callable[[ValidUser], float],
|
||||||
|
sort: Callable[[list[ValidUser], Callable[[ValidUser], float]], ValidUser],
|
||||||
|
) -> tuple[dict[str, str], float]:
|
||||||
|
user = sort(users, field)
|
||||||
|
return model_dump(User(ID=user.id, name=user.username)), field(user)
|
||||||
|
|
||||||
|
data_hash: str | None = await run_sync((await run_sync(sha512)(original)).hexdigest)()
|
||||||
|
async with open(get_data_file('nonebot_plugin_tetris_stats', f'{data_hash}.json.zst'), mode='wb') as file:
|
||||||
|
await file.write(await run_sync(ZstdCompressor(level=12, threads=-1).compress)(original))
|
||||||
|
|
||||||
|
users = [i for i in league.data.users if isinstance(i, ValidUser)]
|
||||||
|
rank_to_users: defaultdict[Rank, list[ValidUser]] = defaultdict(list)
|
||||||
|
for i in users:
|
||||||
|
rank_to_users[i.league.rank].append(i)
|
||||||
|
rank_info: list[IORank] = []
|
||||||
|
for rank, percentile in RANK_PERCENTILE.items():
|
||||||
|
offset = floor((percentile / 100) * len(users)) - 1
|
||||||
|
tr_line = users[offset].league.rating
|
||||||
|
rank_users = rank_to_users[rank]
|
||||||
|
rank_info.append(
|
||||||
|
IORank(
|
||||||
|
rank=rank,
|
||||||
|
tr_line=tr_line,
|
||||||
|
player_count=len(rank_users),
|
||||||
|
low_pps=(build_extremes_data(rank_users, pps, _min)),
|
||||||
|
low_apm=(build_extremes_data(rank_users, apm, _min)),
|
||||||
|
low_vs=(build_extremes_data(rank_users, vs, _min)),
|
||||||
|
avg_pps=mean({i.league.pps for i in rank_users}),
|
||||||
|
avg_apm=mean({i.league.apm for i in rank_users}),
|
||||||
|
avg_vs=mean({i.league.vs for i in rank_users}),
|
||||||
|
high_pps=(build_extremes_data(rank_users, pps, _max)),
|
||||||
|
high_apm=(build_extremes_data(rank_users, apm, _max)),
|
||||||
|
high_vs=(build_extremes_data(rank_users, vs, _max)),
|
||||||
|
update_time=league.cache.cached_at,
|
||||||
|
file_hash=data_hash,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
async with get_session() as session:
|
||||||
|
session.add_all(rank_info)
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@driver.on_startup
|
||||||
|
async def _() -> None:
|
||||||
|
async with get_session() as session:
|
||||||
|
latest_time = await session.scalar(select(IORank.update_time).order_by(IORank.id.desc()).limit(1))
|
||||||
|
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
|
||||||
|
await get_tetra_league_data()
|
||||||
@@ -1,63 +0,0 @@
|
|||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from ..typing import Rank
|
|
||||||
from .base import FailedModel
|
|
||||||
from .base import SuccessModel as BaseSuccessModel
|
|
||||||
|
|
||||||
|
|
||||||
class SuccessModel(BaseSuccessModel):
|
|
||||||
class Data(BaseModel):
|
|
||||||
class ValidUser(BaseModel):
|
|
||||||
class League(BaseModel):
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
rating: float
|
|
||||||
glicko: float
|
|
||||||
rd: float
|
|
||||||
rank: Rank
|
|
||||||
bestrank: Rank
|
|
||||||
apm: float
|
|
||||||
pps: float
|
|
||||||
vs: float
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
role: str
|
|
||||||
xp: float
|
|
||||||
league: League
|
|
||||||
supporter: bool
|
|
||||||
verified: bool
|
|
||||||
country: str | None
|
|
||||||
|
|
||||||
class InvalidUser(BaseModel):
|
|
||||||
class League(BaseModel):
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
rating: float
|
|
||||||
glicko: float | None
|
|
||||||
rd: float | None
|
|
||||||
rank: Rank
|
|
||||||
bestrank: Rank
|
|
||||||
apm: float | None
|
|
||||||
pps: float | None
|
|
||||||
vs: float | None
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
role: str
|
|
||||||
xp: float
|
|
||||||
league: League
|
|
||||||
supporter: bool
|
|
||||||
verified: bool
|
|
||||||
country: str | None
|
|
||||||
|
|
||||||
users: list[ValidUser | InvalidUser]
|
|
||||||
|
|
||||||
data: Data
|
|
||||||
|
|
||||||
|
|
||||||
LeagueAll = SuccessModel | FailedModel
|
|
||||||
ValidUser = SuccessModel.Data.ValidUser
|
|
||||||
InvalidUser = SuccessModel.Data.InvalidUser
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from ..typing import Rank
|
|
||||||
from .base import FailedModel
|
|
||||||
from .base import SuccessModel as BaseSuccessModel
|
|
||||||
|
|
||||||
|
|
||||||
class SuccessModel(BaseSuccessModel):
|
|
||||||
class Data(BaseModel):
|
|
||||||
class User(BaseModel):
|
|
||||||
class Badge(BaseModel):
|
|
||||||
id: str
|
|
||||||
label: str
|
|
||||||
ts: datetime | None
|
|
||||||
|
|
||||||
class NeverPlayedLeague(BaseModel):
|
|
||||||
gamesplayed: Literal[0]
|
|
||||||
gameswon: Literal[0]
|
|
||||||
rating: Literal[-1]
|
|
||||||
rank: Literal['z']
|
|
||||||
standing: Literal[-1]
|
|
||||||
standing_local: Literal[-1]
|
|
||||||
next_rank: None
|
|
||||||
prev_rank: None
|
|
||||||
next_at: Literal[-1]
|
|
||||||
prev_at: Literal[-1]
|
|
||||||
percentile: Literal[-1]
|
|
||||||
percentile_rank: Literal['z']
|
|
||||||
apm: None = Field(None)
|
|
||||||
pps: None = Field(None)
|
|
||||||
vs: None = Field(None)
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
class NeverRatedLeague(BaseModel):
|
|
||||||
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
|
|
||||||
gameswon: int
|
|
||||||
rating: Literal[-1]
|
|
||||||
rank: Literal['z']
|
|
||||||
standing: Literal[-1]
|
|
||||||
standing_local: Literal[-1]
|
|
||||||
next_rank: None
|
|
||||||
prev_rank: None
|
|
||||||
next_at: Literal[-1]
|
|
||||||
prev_at: Literal[-1]
|
|
||||||
percentile: Literal[-1]
|
|
||||||
percentile_rank: Literal['z']
|
|
||||||
apm: float
|
|
||||||
pps: float
|
|
||||||
vs: float
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
class RatedLeague(BaseModel):
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
rating: float
|
|
||||||
rank: Rank
|
|
||||||
bestrank: Rank
|
|
||||||
standing: int
|
|
||||||
standing_local: int
|
|
||||||
next_rank: Rank | None
|
|
||||||
prev_rank: Rank | None
|
|
||||||
next_at: int
|
|
||||||
prev_at: int
|
|
||||||
percentile: float
|
|
||||||
percentile_rank: str
|
|
||||||
glicko: float
|
|
||||||
rd: float
|
|
||||||
apm: float
|
|
||||||
pps: float
|
|
||||||
vs: float | None
|
|
||||||
decaying: bool
|
|
||||||
|
|
||||||
class Connections(BaseModel):
|
|
||||||
class Discord(BaseModel):
|
|
||||||
id: str
|
|
||||||
username: str
|
|
||||||
|
|
||||||
discord: Discord | None
|
|
||||||
|
|
||||||
class Distinguishment(BaseModel):
|
|
||||||
type: str # noqa: A003
|
|
||||||
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
|
|
||||||
ts: datetime | None
|
|
||||||
botmaster: str | None
|
|
||||||
badges: list[Badge]
|
|
||||||
xp: float
|
|
||||||
gamesplayed: int
|
|
||||||
gameswon: int
|
|
||||||
gametime: float
|
|
||||||
country: str | None
|
|
||||||
badstanding: bool | None
|
|
||||||
supporter: bool | None # osk说是必有, 但实际上不是 fk osk
|
|
||||||
supporter_tier: int
|
|
||||||
verified: bool
|
|
||||||
league: NeverPlayedLeague | NeverRatedLeague | RatedLeague
|
|
||||||
avatar_revision: int | None
|
|
||||||
"""This user's avatar ID. Get their avatar at
|
|
||||||
|
|
||||||
https://tetr.io/user-content/avatars/{ USERID }.jpg?rv={ AVATAR_REVISION }"""
|
|
||||||
banner_revision: int | None
|
|
||||||
"""This user's banner ID. Get their banner at
|
|
||||||
|
|
||||||
https://tetr.io/user-content/banners/{ USERID }.jpg?rv={ BANNER_REVISION }
|
|
||||||
|
|
||||||
Ignore this field if the user is not a supporter."""
|
|
||||||
bio: str | None
|
|
||||||
connections: Connections
|
|
||||||
friend_count: int | None
|
|
||||||
distinguishment: Distinguishment | None
|
|
||||||
|
|
||||||
user: User
|
|
||||||
|
|
||||||
data: Data
|
|
||||||
|
|
||||||
|
|
||||||
NeverPlayedLeague = SuccessModel.Data.User.NeverPlayedLeague
|
|
||||||
NeverRatedLeague = SuccessModel.Data.User.NeverRatedLeague
|
|
||||||
RatedLeague = SuccessModel.Data.User.RatedLeague
|
|
||||||
UserInfo = SuccessModel | FailedModel
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
from .base import FailedModel
|
|
||||||
from .base import SuccessModel as BaseSuccessModel
|
|
||||||
|
|
||||||
|
|
||||||
class EndContext(BaseModel):
|
|
||||||
class Time(BaseModel):
|
|
||||||
start: int
|
|
||||||
zero: bool
|
|
||||||
locked: bool
|
|
||||||
prev: int
|
|
||||||
frameoffset: int
|
|
||||||
|
|
||||||
class Clears(BaseModel):
|
|
||||||
singles: int
|
|
||||||
doubles: int
|
|
||||||
triples: int
|
|
||||||
quads: int
|
|
||||||
pentas: int | None
|
|
||||||
realtspins: int
|
|
||||||
minitspins: int
|
|
||||||
minitspinsingles: int
|
|
||||||
tspinsingles: int
|
|
||||||
minitspindoubles: int
|
|
||||||
tspindoubles: int
|
|
||||||
tspintriples: int
|
|
||||||
tspinquads: int
|
|
||||||
allclear: int
|
|
||||||
|
|
||||||
class Garbage(BaseModel):
|
|
||||||
sent: int
|
|
||||||
received: int
|
|
||||||
attack: int | None
|
|
||||||
cleared: int
|
|
||||||
|
|
||||||
class Finesse(BaseModel):
|
|
||||||
combo: int
|
|
||||||
faults: int
|
|
||||||
perfectpieces: int
|
|
||||||
|
|
||||||
seed: int
|
|
||||||
lines: int
|
|
||||||
level_lines: int
|
|
||||||
level_lines_needed: int
|
|
||||||
inputs: int
|
|
||||||
holds: int | None
|
|
||||||
time: Time
|
|
||||||
score: int
|
|
||||||
zenlevel: int
|
|
||||||
zenprogress: int
|
|
||||||
level: int
|
|
||||||
combo: int
|
|
||||||
currentcombopower: int | None # WTF
|
|
||||||
topcombo: int
|
|
||||||
btb: int
|
|
||||||
topbtb: int
|
|
||||||
currentbtbchainpower: int | None # WTF * 2
|
|
||||||
tspins: int
|
|
||||||
piecesplaced: int
|
|
||||||
clears: Clears
|
|
||||||
garbage: Garbage
|
|
||||||
kills: int
|
|
||||||
finesse: Finesse
|
|
||||||
final_time: float = Field(..., alias='finalTime')
|
|
||||||
gametype: str
|
|
||||||
|
|
||||||
|
|
||||||
class BaseModeRecord(BaseModel):
|
|
||||||
class SoloRecord(BaseModel):
|
|
||||||
class User(BaseModel):
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
stream: str
|
|
||||||
replayid: str
|
|
||||||
user: User
|
|
||||||
ts: datetime
|
|
||||||
ismulti: bool | None
|
|
||||||
endcontext: EndContext
|
|
||||||
|
|
||||||
class MultiRecord(BaseModel):
|
|
||||||
class User(BaseModel):
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
username: str
|
|
||||||
|
|
||||||
id: str = Field(..., alias='_id')
|
|
||||||
stream: str
|
|
||||||
replayid: str
|
|
||||||
user: User
|
|
||||||
ts: datetime
|
|
||||||
ismulti: bool | None
|
|
||||||
endcontext: list[EndContext]
|
|
||||||
|
|
||||||
record: SoloRecord | MultiRecord | None
|
|
||||||
rank: int | None
|
|
||||||
|
|
||||||
|
|
||||||
class SuccessModel(BaseSuccessModel):
|
|
||||||
class Data(BaseModel):
|
|
||||||
class Records(BaseModel):
|
|
||||||
class Sprint(BaseModeRecord):
|
|
||||||
...
|
|
||||||
|
|
||||||
class Blitz(BaseModeRecord):
|
|
||||||
...
|
|
||||||
|
|
||||||
sprint: Sprint = Field(..., alias='40l')
|
|
||||||
blitz: Blitz
|
|
||||||
|
|
||||||
class Zen(BaseModel):
|
|
||||||
level: int
|
|
||||||
score: int
|
|
||||||
|
|
||||||
records: Records
|
|
||||||
zen: Zen
|
|
||||||
|
|
||||||
data: Data
|
|
||||||
|
|
||||||
|
|
||||||
SoloRecord = BaseModeRecord.SoloRecord
|
|
||||||
MultiRecord = BaseModeRecord.MultiRecord
|
|
||||||
UserRecords = SuccessModel | FailedModel
|
|
||||||
23
nonebot_plugin_tetris_stats/game_data_processor/schemas.py
Normal file
23
nonebot_plugin_tetris_stats/game_data_processor/schemas.py
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from ..utils.typing import GameType
|
||||||
|
|
||||||
|
|
||||||
|
class Base(BaseModel):
|
||||||
|
platform: GameType
|
||||||
|
|
||||||
|
|
||||||
|
class BaseUser(ABC, Base):
|
||||||
|
"""游戏用户"""
|
||||||
|
|
||||||
|
def __eq__(self, __value: object) -> bool:
|
||||||
|
if isinstance(__value, BaseUser):
|
||||||
|
return self.unique_identifier == __value.unique_identifier
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def unique_identifier(self) -> str:
|
||||||
|
raise NotImplementedError
|
||||||
@@ -1,19 +1,19 @@
|
|||||||
from typing import Any
|
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||||
|
|
||||||
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
|
|
||||||
from nonebot.adapters import Bot, Event
|
|
||||||
from nonebot.matcher import Matcher
|
|
||||||
from nonebot_plugin_alconna import At, on_alconna
|
from nonebot_plugin_alconna import At, on_alconna
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
|
|
||||||
from ...db import query_bind_info
|
from ...utils.exception import MessageFormatError
|
||||||
from ...utils.exception import NeedCatchError
|
|
||||||
from ...utils.platform import get_platform
|
|
||||||
from ...utils.typing import Me
|
from ...utils.typing import Me
|
||||||
from .. import add_default_handlers
|
from .. import add_default_handlers
|
||||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||||
from .constant import GAME_TYPE
|
from .api import Player
|
||||||
from .processor import Processor, User, identify_user_info
|
from .constant import USER_NAME
|
||||||
|
|
||||||
|
|
||||||
|
def get_player(name: str) -> Player | MessageFormatError:
|
||||||
|
if USER_NAME.match(name):
|
||||||
|
return Player(user_name=name, trust=True)
|
||||||
|
return MessageFormatError('用户名/ID不合法')
|
||||||
|
|
||||||
|
|
||||||
alc = on_alconna(
|
alc = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
@@ -23,7 +23,7 @@ alc = on_alconna(
|
|||||||
Args(
|
Args(
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info,
|
get_player,
|
||||||
notice='TOP 用户名',
|
notice='TOP 用户名',
|
||||||
flags=[ArgFlag.HIDDEN],
|
flags=[ArgFlag.HIDDEN],
|
||||||
)
|
)
|
||||||
@@ -44,7 +44,7 @@ alc = on_alconna(
|
|||||||
),
|
),
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info | Me | At,
|
get_player,
|
||||||
notice='TOP 用户名',
|
notice='TOP 用户名',
|
||||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||||
),
|
),
|
||||||
@@ -54,7 +54,7 @@ alc = on_alconna(
|
|||||||
dest='query',
|
dest='query',
|
||||||
help_text='查询 TOP 游戏信息',
|
help_text='查询 TOP 游戏信息',
|
||||||
),
|
),
|
||||||
Arg('other', Any, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||||
meta=CommandMeta(
|
meta=CommandMeta(
|
||||||
description='查询 TetrisOnline波兰服 的信息',
|
description='查询 TetrisOnline波兰服 的信息',
|
||||||
example='top绑定scdhh\ntop查我',
|
example='top绑定scdhh\ntop查我',
|
||||||
@@ -67,54 +67,6 @@ alc = on_alconna(
|
|||||||
aliases={'TOP'},
|
aliases={'TOP'},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from . import bind, query # noqa: E402, F401
|
||||||
@alc.assign('bind')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
|
||||||
async with get_session() as session:
|
|
||||||
bind = await query_bind_info(
|
|
||||||
session=session,
|
|
||||||
chat_platform=get_platform(bot),
|
|
||||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
)
|
|
||||||
if bind is None:
|
|
||||||
await matcher.finish('未查询到绑定信息')
|
|
||||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=User(name=bind.game_account),
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(message + await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
add_default_handlers(alc)
|
add_default_handlers(alc)
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
from .player import Player
|
||||||
|
|
||||||
|
__all__ = ['Player']
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from nonebot_plugin_orm import Model
|
||||||
|
from sqlalchemy import DateTime, String
|
||||||
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||||
|
|
||||||
|
from ....db.models import PydanticType
|
||||||
|
from .schemas.user_profile import UserProfile
|
||||||
|
|
||||||
|
|
||||||
|
class TOPHistoricalData(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
|
||||||
|
api_type: Mapped[Literal['User Profile']] = mapped_column(String(16), index=True)
|
||||||
|
data: Mapped[UserProfile] = mapped_column(PydanticType(get_model=[], models={UserProfile}))
|
||||||
|
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
from contextlib import suppress
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from io import StringIO
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
from lxml import etree
|
||||||
|
from pandas import read_html
|
||||||
|
|
||||||
|
from ....db import anti_duplicate_add
|
||||||
|
from ....utils.request import Request, splice_url
|
||||||
|
from ..constant import BASE_URL, USER_NAME
|
||||||
|
from .models import TOPHistoricalData
|
||||||
|
from .schemas.user import User
|
||||||
|
from .schemas.user_profile import Data, UserProfile
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
class Player:
|
||||||
|
def __init__(self, *, user_name: str, trust: bool = False) -> None:
|
||||||
|
self.user_name = user_name
|
||||||
|
if not trust and not USER_NAME.match(self.user_name):
|
||||||
|
msg = 'Invalid user name'
|
||||||
|
raise ValueError(msg)
|
||||||
|
self.__user: User | None = None
|
||||||
|
self._user_profile: UserProfile | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
async def user(self) -> User:
|
||||||
|
if self.__user is None:
|
||||||
|
profile = await self.get_profile()
|
||||||
|
self.__user = User(user_name=profile.user_name)
|
||||||
|
return self.__user
|
||||||
|
|
||||||
|
async def get_profile(self) -> UserProfile:
|
||||||
|
"""获取用户信息"""
|
||||||
|
if self._user_profile is None:
|
||||||
|
url = splice_url([BASE_URL, 'profile.php', f'?{urlencode({"user":self.user_name})}'])
|
||||||
|
raw_user_profile = await Request.request(url, is_json=False)
|
||||||
|
self._user_profile = self._parse_profile(raw_user_profile)
|
||||||
|
await anti_duplicate_add(
|
||||||
|
TOPHistoricalData,
|
||||||
|
TOPHistoricalData(
|
||||||
|
user_unique_identifier=(await self.user).unique_identifier,
|
||||||
|
api_type='User Profile',
|
||||||
|
data=self._user_profile,
|
||||||
|
update_time=datetime.now(tz=UTC),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return self._user_profile
|
||||||
|
|
||||||
|
def _parse_profile(self, original_user_profile: bytes) -> UserProfile:
|
||||||
|
html = etree.HTML(original_user_profile)
|
||||||
|
user_name = html.xpath('//div[@class="mycontent"]/h1/text()')[0].replace("'s profile", '')
|
||||||
|
today = None
|
||||||
|
with suppress(ValueError):
|
||||||
|
today = Data(
|
||||||
|
lpm=float(str(html.xpath('//div[@class="mycontent"]/text()[3]')[0]).replace('lpm:', '').strip()),
|
||||||
|
apm=float(str(html.xpath('//div[@class="mycontent"]/text()[4]')[0]).replace('apm:', '').strip()),
|
||||||
|
)
|
||||||
|
table = StringIO(
|
||||||
|
etree.tostring(
|
||||||
|
html.xpath('//div[@class="mycontent"]/table[@class="mytable"]')[0],
|
||||||
|
encoding='utf-8',
|
||||||
|
).decode()
|
||||||
|
)
|
||||||
|
dataframe = read_html(table, encoding='utf-8', header=0)[0]
|
||||||
|
total: list[Data] = []
|
||||||
|
for _, value in dataframe.iterrows():
|
||||||
|
total.append(Data(lpm=value['lpm'], apm=value['apm']))
|
||||||
|
return UserProfile(user_name=user_name, today=today, total=total)
|
||||||
@@ -0,0 +1,17 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
from ....schemas import BaseUser
|
||||||
|
from ...constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
class User(BaseUser):
|
||||||
|
platform: Literal['TOP'] = GAME_TYPE
|
||||||
|
|
||||||
|
user_name: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def unique_identifier(self) -> str:
|
||||||
|
return self.user_name
|
||||||
@@ -0,0 +1,12 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
lpm: float
|
||||||
|
apm: float
|
||||||
|
|
||||||
|
|
||||||
|
class UserProfile(BaseModel):
|
||||||
|
user_name: str
|
||||||
|
today: Data | None
|
||||||
|
total: list[Data] | None
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
from urllib.parse import urlunparse
|
||||||
|
|
||||||
|
from nonebot.adapters import Bot
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_userinfo import BotUserInfo, EventUserInfo, UserInfo # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import BindStatus, create_or_update_bind, trigger
|
||||||
|
from ...utils.avatar import get_avatar
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.render import Bind, render
|
||||||
|
from ...utils.render.schemas.base import People
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
|
from . import alc
|
||||||
|
from .api import Player
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('bind')
|
||||||
|
async def _(
|
||||||
|
bot: Bot,
|
||||||
|
account: Player,
|
||||||
|
event_session: EventSession,
|
||||||
|
bot_info: UserInfo = BotUserInfo(), # noqa: B008
|
||||||
|
event_user_info: UserInfo = EventUserInfo(), # noqa: B008
|
||||||
|
):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='bind',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
user = await account.user
|
||||||
|
async with get_session() as session:
|
||||||
|
bind_status = await create_or_update_bind(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=event_user_info.user_id,
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
game_account=user.unique_identifier,
|
||||||
|
)
|
||||||
|
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'binding',
|
||||||
|
Bind(
|
||||||
|
platform=GAME_TYPE,
|
||||||
|
status='unknown',
|
||||||
|
user=People(
|
||||||
|
avatar=await get_avatar(event_user_info, 'Data URI', None),
|
||||||
|
name=user.user_name,
|
||||||
|
),
|
||||||
|
bot=People(
|
||||||
|
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||||
|
name=bot_info.user_name,
|
||||||
|
),
|
||||||
|
command='top查我',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
await UniMessage.image(
|
||||||
|
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||||
|
).finish()
|
||||||
@@ -1,4 +1,8 @@
|
|||||||
from ...utils.typing import GameType
|
from re import compile
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
GAME_TYPE: Literal['TOP'] = 'TOP'
|
||||||
|
|
||||||
GAME_TYPE: GameType = 'TOP'
|
|
||||||
BASE_URL = 'http://tetrisonline.pl/top/'
|
BASE_URL = 'http://tetrisonline.pl/top/'
|
||||||
|
|
||||||
|
USER_NAME = compile(r'^[a-zA-Z0-9_]{1,16}$')
|
||||||
|
|||||||
@@ -1,142 +0,0 @@
|
|||||||
from contextlib import suppress
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from io import StringIO
|
|
||||||
from re import match
|
|
||||||
from typing import NoReturn
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from lxml import etree
|
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
from pandas import read_html
|
|
||||||
|
|
||||||
from ...db import create_or_update_bind
|
|
||||||
from ...utils.exception import MessageFormatError, RequestError
|
|
||||||
from ...utils.request import Request, splice_url
|
|
||||||
from ...utils.typing import GameType
|
|
||||||
from .. import ProcessedData as ProcessedDataMeta
|
|
||||||
from .. import Processor as ProcessorMeta
|
|
||||||
from .. import RawResponse as RawResponseMeta
|
|
||||||
from .. import User as UserMeta
|
|
||||||
from .constant import BASE_URL, GAME_TYPE
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class User(UserMeta):
|
|
||||||
name: str
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RawResponse(RawResponseMeta):
|
|
||||||
user_profile: bytes | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ProcessedData(ProcessedDataMeta):
|
|
||||||
user_profile: str | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Data:
|
|
||||||
lpm: float
|
|
||||||
apm: float
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class GameData:
|
|
||||||
day: Data | None
|
|
||||||
total: Data | None
|
|
||||||
|
|
||||||
|
|
||||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
|
||||||
if match(r'^[a-zA-Z0-9_]{1,16}$', info):
|
|
||||||
return User(name=info)
|
|
||||||
return MessageFormatError('用户名不合法')
|
|
||||||
|
|
||||||
|
|
||||||
class Processor(ProcessorMeta):
|
|
||||||
user: User
|
|
||||||
raw_response: RawResponse
|
|
||||||
processed_data: ProcessedData
|
|
||||||
|
|
||||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
|
||||||
super().__init__(event_id, user, command_args)
|
|
||||||
self.raw_response = RawResponse()
|
|
||||||
self.processed_data = ProcessedData()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def game_platform(self) -> GameType:
|
|
||||||
return GAME_TYPE
|
|
||||||
|
|
||||||
async def handle_bind(self, platform: str, account: str) -> str:
|
|
||||||
"""处理绑定消息"""
|
|
||||||
self.command_type = 'bind'
|
|
||||||
await self.check_user()
|
|
||||||
async with get_session() as session:
|
|
||||||
return await create_or_update_bind(
|
|
||||||
session=session,
|
|
||||||
chat_platform=platform,
|
|
||||||
chat_account=account,
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
game_account=self.user.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def handle_query(self) -> str:
|
|
||||||
"""处理查询消息"""
|
|
||||||
self.command_type = 'query'
|
|
||||||
await self.check_user()
|
|
||||||
return await self.generate_message()
|
|
||||||
|
|
||||||
async def get_user_profile(self) -> str:
|
|
||||||
"""获取用户信息"""
|
|
||||||
if self.processed_data.user_profile is None:
|
|
||||||
url = splice_url([BASE_URL, 'profile.php', f'?{urlencode({"user":self.user.name})}'])
|
|
||||||
self.raw_response.user_profile = await Request.request(url, is_json=False)
|
|
||||||
self.processed_data.user_profile = self.raw_response.user_profile.decode()
|
|
||||||
return self.processed_data.user_profile
|
|
||||||
|
|
||||||
async def check_user(self) -> None | NoReturn:
|
|
||||||
if 'user not found!' in await self.get_user_profile():
|
|
||||||
raise RequestError('用户不存在!')
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def get_user_name(self) -> str:
|
|
||||||
"""获取用户名"""
|
|
||||||
data = etree.HTML(await self.get_user_profile()).xpath('//div[@class="mycontent"]/h1/text()')
|
|
||||||
return data[0].replace("'s profile", '')
|
|
||||||
|
|
||||||
async def get_game_data(self) -> GameData:
|
|
||||||
"""获取游戏统计数据"""
|
|
||||||
html = etree.HTML(await self.get_user_profile())
|
|
||||||
day = None
|
|
||||||
with suppress(ValueError):
|
|
||||||
day = Data(
|
|
||||||
lpm=float(str(html.xpath('//div[@class="mycontent"]/text()[3]')[0]).replace('lpm:', '').strip()),
|
|
||||||
apm=float(str(html.xpath('//div[@class="mycontent"]/text()[4]')[0]).replace('apm:', '').strip()),
|
|
||||||
)
|
|
||||||
table = StringIO(
|
|
||||||
etree.tostring(
|
|
||||||
html.xpath('//div[@class="mycontent"]/table[@class="mytable"]')[0],
|
|
||||||
encoding='utf-8',
|
|
||||||
).decode()
|
|
||||||
)
|
|
||||||
dataframe = read_html(table, encoding='utf-8', header=0)[0]
|
|
||||||
total = Data(lpm=dataframe['lpm'].mean(), apm=dataframe['apm'].mean()) if len(dataframe) != 0 else None
|
|
||||||
return GameData(day=day, total=total)
|
|
||||||
|
|
||||||
async def generate_message(self) -> str:
|
|
||||||
"""生成消息"""
|
|
||||||
game_data = await self.get_game_data()
|
|
||||||
message = ''
|
|
||||||
if game_data.day is not None:
|
|
||||||
message += f'用户 {self.user.name} 24小时内统计数据为: '
|
|
||||||
message += f"\nL'PM: {round(game_data.day.lpm,2)} ( {round(game_data.day.lpm/24,2)} pps )"
|
|
||||||
message += f'\nAPM: {round(game_data.day.apm,2)} ( x{round(game_data.day.apm/game_data.day.lpm,2)} )'
|
|
||||||
else:
|
|
||||||
message += f'用户 {self.user.name} 暂无24小时内统计数据'
|
|
||||||
if game_data.total is not None:
|
|
||||||
message += '\n历史统计数据为: '
|
|
||||||
message += f"\nL'PM: {round(game_data.total.lpm,2)} ( {round(game_data.total.lpm/24,2)} pps )"
|
|
||||||
message += f'\nAPM: {round(game_data.total.apm,2)} ( x{round(game_data.total.apm/game_data.total.lpm,2)} )'
|
|
||||||
else:
|
|
||||||
message += '\n暂无历史统计数据'
|
|
||||||
return message
|
|
||||||
@@ -0,0 +1,73 @@
|
|||||||
|
from nonebot.adapters import Bot, Event
|
||||||
|
from nonebot.matcher import Matcher
|
||||||
|
from nonebot_plugin_alconna import At
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import query_bind_info, trigger
|
||||||
|
from ...utils.metrics import get_metrics
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.typing import Me
|
||||||
|
from ..constant import CANT_VERIFY_MESSAGE
|
||||||
|
from . import alc
|
||||||
|
from .api import Player
|
||||||
|
from .api.schemas.user_profile import UserProfile
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
bind = await query_bind_info(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
)
|
||||||
|
if bind is None:
|
||||||
|
await matcher.finish('未查询到绑定信息')
|
||||||
|
message = CANT_VERIFY_MESSAGE
|
||||||
|
await (message + make_query_text(await Player(user_name=bind.game_account, trust=True).get_profile())).finish()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(account: Player, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
await (make_query_text(await account.get_profile())).finish()
|
||||||
|
|
||||||
|
|
||||||
|
def make_query_text(profile: UserProfile) -> UniMessage:
|
||||||
|
message = ''
|
||||||
|
if profile.today is not None:
|
||||||
|
today = get_metrics(lpm=profile.today.lpm, apm=profile.today.apm)
|
||||||
|
message += f'用户 {profile.user_name} 24小时内统计数据为: '
|
||||||
|
message += f"\nL'PM: {today.lpm} ( {today.pps} pps )"
|
||||||
|
message += f'\nAPM: {today.apm} ( x{today.apl} )'
|
||||||
|
else:
|
||||||
|
message += f'用户 {profile.user_name} 暂无24小时内统计数据'
|
||||||
|
if profile.total is not None:
|
||||||
|
total_lpm = total_apm = 0.0
|
||||||
|
for value in profile.total:
|
||||||
|
total_lpm += value.lpm
|
||||||
|
total_apm += value.apm
|
||||||
|
num = len(profile.total)
|
||||||
|
total = get_metrics(lpm=total_lpm / num, apm=total_apm / num)
|
||||||
|
message += '\n历史统计数据为: '
|
||||||
|
message += f"\nL'PM: {total.lpm} ( {total.pps} pps )"
|
||||||
|
message += f'\nAPM: {total.apm} ( x{total.apl} )'
|
||||||
|
else:
|
||||||
|
message += '\n暂无历史统计数据'
|
||||||
|
return UniMessage(message)
|
||||||
@@ -1,19 +1,24 @@
|
|||||||
from typing import Any
|
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
|
||||||
|
|
||||||
from arclet.alconna import Alconna, Arg, ArgFlag, Args, CommandMeta, Option
|
|
||||||
from nonebot.adapters import Bot, Event
|
|
||||||
from nonebot.matcher import Matcher
|
|
||||||
from nonebot_plugin_alconna import At, on_alconna
|
from nonebot_plugin_alconna import At, on_alconna
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
|
|
||||||
from ...db import query_bind_info
|
from ...utils.exception import MessageFormatError
|
||||||
from ...utils.exception import NeedCatchError
|
|
||||||
from ...utils.platform import get_platform
|
|
||||||
from ...utils.typing import Me
|
from ...utils.typing import Me
|
||||||
from .. import add_default_handlers
|
from .. import add_default_handlers
|
||||||
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
from ..constant import BIND_COMMAND, QUERY_COMMAND
|
||||||
from .constant import GAME_TYPE
|
from .api import Player
|
||||||
from .processor import Processor, User, identify_user_info
|
from .constant import USER_NAME
|
||||||
|
|
||||||
|
|
||||||
|
def get_player(teaid_or_name: str) -> Player | MessageFormatError:
|
||||||
|
if (
|
||||||
|
teaid_or_name.startswith(('onebot-', 'qqguild-', 'kook-', 'discord-'))
|
||||||
|
and teaid_or_name.split('-', maxsplit=1)[1].isdigit()
|
||||||
|
):
|
||||||
|
return Player(teaid=teaid_or_name, trust=True)
|
||||||
|
if USER_NAME.match(teaid_or_name) and not teaid_or_name.isdigit() and 2 <= len(teaid_or_name) <= 18: # noqa: PLR2004
|
||||||
|
return Player(user_name=teaid_or_name, trust=True)
|
||||||
|
return MessageFormatError('用户名/ID不合法')
|
||||||
|
|
||||||
|
|
||||||
alc = on_alconna(
|
alc = on_alconna(
|
||||||
Alconna(
|
Alconna(
|
||||||
@@ -23,7 +28,7 @@ alc = on_alconna(
|
|||||||
Args(
|
Args(
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info,
|
get_player,
|
||||||
notice='茶服 用户名 / TeaID',
|
notice='茶服 用户名 / TeaID',
|
||||||
flags=[ArgFlag.HIDDEN],
|
flags=[ArgFlag.HIDDEN],
|
||||||
)
|
)
|
||||||
@@ -44,7 +49,7 @@ alc = on_alconna(
|
|||||||
),
|
),
|
||||||
Arg(
|
Arg(
|
||||||
'account',
|
'account',
|
||||||
identify_user_info,
|
get_player,
|
||||||
notice='茶服 用户名 / TeaID',
|
notice='茶服 用户名 / TeaID',
|
||||||
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
|
||||||
),
|
),
|
||||||
@@ -55,7 +60,7 @@ alc = on_alconna(
|
|||||||
dest='query',
|
dest='query',
|
||||||
help_text='查询 茶服 游戏信息',
|
help_text='查询 茶服 游戏信息',
|
||||||
),
|
),
|
||||||
Arg('other', Any),
|
Arg('other', AllParam, flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL]),
|
||||||
meta=CommandMeta(
|
meta=CommandMeta(
|
||||||
description='查询 TetrisOnline茶服 的信息',
|
description='查询 TetrisOnline茶服 的信息',
|
||||||
example='茶服查我',
|
example='茶服查我',
|
||||||
@@ -68,78 +73,7 @@ alc = on_alconna(
|
|||||||
aliases={'tos', 'TOS'},
|
aliases={'tos', 'TOS'},
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
|
||||||
from nonebot.adapters.onebot.v11 import GROUP, MessageEvent
|
|
||||||
from nonebot.adapters.onebot.v11 import Bot as OB11Bot
|
|
||||||
|
|
||||||
@alc.assign('bind')
|
|
||||||
async def _(event: MessageEvent, matcher: Matcher):
|
|
||||||
await matcher.finish('QQ 平台无需绑定')
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(bot: OB11Bot, event: MessageEvent, matcher: Matcher, target: At | Me):
|
|
||||||
if event.is_tome() and await GROUP(bot, event):
|
|
||||||
await matcher.finish('不能查询bot的信息')
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=User(teaid=target.target if isinstance(target, At) else event.get_user_id()),
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('bind')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_bind(platform=get_platform(bot), account=event.get_user_id()))
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me):
|
|
||||||
async with get_session() as session:
|
|
||||||
bind = await query_bind_info(
|
|
||||||
session=session,
|
|
||||||
chat_platform=get_platform(bot),
|
|
||||||
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
)
|
|
||||||
if bind is None:
|
|
||||||
await matcher.finish('未查询到绑定信息')
|
|
||||||
message = '* 由于无法验证绑定信息, 不能保证查询到的用户为本人\n'
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=User(name=bind.game_account),
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(message + await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
|
||||||
@alc.assign('query')
|
|
||||||
async def _(event: Event, matcher: Matcher, account: User):
|
|
||||||
proc = Processor(
|
|
||||||
event_id=id(event),
|
|
||||||
user=account,
|
|
||||||
command_args=[],
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await matcher.finish(await proc.handle_query())
|
|
||||||
except NeedCatchError as e:
|
|
||||||
await matcher.finish(str(e))
|
|
||||||
|
|
||||||
|
from . import bind, query # noqa: E402, F401
|
||||||
|
|
||||||
add_default_handlers(alc)
|
add_default_handlers(alc)
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
from .player import Player
|
||||||
|
|
||||||
|
__all__ = ['Player']
|
||||||
@@ -0,0 +1,20 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from nonebot_plugin_orm import Model
|
||||||
|
from sqlalchemy import DateTime, String
|
||||||
|
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
|
||||||
|
|
||||||
|
from ....db.models import PydanticType
|
||||||
|
from .schemas.user_info import UserInfoSuccess
|
||||||
|
from .schemas.user_profile import UserProfile
|
||||||
|
|
||||||
|
|
||||||
|
class TOSHistoricalData(MappedAsDataclass, Model):
|
||||||
|
id: Mapped[int] = mapped_column(init=False, primary_key=True)
|
||||||
|
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
|
||||||
|
api_type: Mapped[Literal['User Info', 'User Profile']] = mapped_column(String(16), index=True)
|
||||||
|
data: Mapped[UserInfoSuccess | UserProfile] = mapped_column(
|
||||||
|
PydanticType(get_model=[], models={UserInfoSuccess, UserProfile})
|
||||||
|
)
|
||||||
|
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
|
||||||
@@ -0,0 +1,128 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import overload
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
from httpx import TimeoutException
|
||||||
|
from nonebot.compat import type_validate_json
|
||||||
|
|
||||||
|
from ....db import anti_duplicate_add
|
||||||
|
from ....utils.exception import RequestError
|
||||||
|
from ....utils.request import Request, splice_url
|
||||||
|
from ..constant import BASE_URL, USER_NAME
|
||||||
|
from .models import TOSHistoricalData
|
||||||
|
from .schemas.user import User
|
||||||
|
from .schemas.user_info import UserInfo, UserInfoSuccess
|
||||||
|
from .schemas.user_profile import UserProfile
|
||||||
|
|
||||||
|
UTC = timezone.utc
|
||||||
|
|
||||||
|
|
||||||
|
class Player:
|
||||||
|
@overload
|
||||||
|
def __init__(self, *, teaid: str, trust: bool = False): ...
|
||||||
|
@overload
|
||||||
|
def __init__(self, *, user_name: str, trust: bool = False): ...
|
||||||
|
def __init__(self, *, teaid: str | None = None, user_name: str | None = None, trust: bool = False):
|
||||||
|
self.teaid = teaid
|
||||||
|
self.user_name = user_name
|
||||||
|
if not trust:
|
||||||
|
if self.teaid is not None:
|
||||||
|
if (
|
||||||
|
not self.teaid.startswith(('onebot-', 'qqguild-', 'kook-', 'discord-'))
|
||||||
|
or not self.teaid.split('-', maxsplit=1)[1].isdigit()
|
||||||
|
):
|
||||||
|
msg = 'Invalid teaid'
|
||||||
|
raise ValueError(msg)
|
||||||
|
elif self.user_name is not None:
|
||||||
|
if not USER_NAME.match(self.user_name) or self.user_name.isdigit() or 2 > len(self.user_name) > 18: # noqa: PLR2004
|
||||||
|
msg = 'Invalid user name'
|
||||||
|
raise ValueError(msg)
|
||||||
|
else:
|
||||||
|
msg = 'Invalid user'
|
||||||
|
raise ValueError(msg)
|
||||||
|
self.__user: User | None = None
|
||||||
|
self._user_info: UserInfoSuccess | None = None
|
||||||
|
self._user_profile: dict[str, UserProfile] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
async def user(self) -> User:
|
||||||
|
if self.__user is None:
|
||||||
|
user_info = await self.get_info()
|
||||||
|
self.__user = User(teaid=user_info.data.teaid, name=user_info.data.name)
|
||||||
|
self.teaid = user_info.data.teaid
|
||||||
|
self.user_name = user_info.data.name
|
||||||
|
return self.__user
|
||||||
|
|
||||||
|
async def get_info(self) -> UserInfoSuccess:
|
||||||
|
"""获取用户信息"""
|
||||||
|
if self._user_info is None:
|
||||||
|
if self.teaid is not None:
|
||||||
|
url = [
|
||||||
|
splice_url(
|
||||||
|
[
|
||||||
|
i,
|
||||||
|
'getTeaIdInfo',
|
||||||
|
f'?{urlencode({"teaId":self.teaid})}',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for i in BASE_URL
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
url = [
|
||||||
|
splice_url(
|
||||||
|
[
|
||||||
|
i,
|
||||||
|
'getUsernameInfo',
|
||||||
|
f'?{urlencode({"username":self.user_name})}',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for i in BASE_URL
|
||||||
|
]
|
||||||
|
raw_user_info = await Request.failover_request(url, failover_code=[502], failover_exc=(TimeoutException,))
|
||||||
|
user_info: UserInfo = type_validate_json(UserInfo, raw_user_info) # type: ignore[arg-type]
|
||||||
|
if not isinstance(user_info, UserInfoSuccess):
|
||||||
|
msg = f'用户信息请求错误:\n{user_info.error}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
self._user_info = user_info
|
||||||
|
await anti_duplicate_add(
|
||||||
|
TOSHistoricalData,
|
||||||
|
TOSHistoricalData(
|
||||||
|
user_unique_identifier=(await self.user).unique_identifier,
|
||||||
|
api_type='User Info',
|
||||||
|
data=user_info,
|
||||||
|
update_time=datetime.now(UTC),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return self._user_info
|
||||||
|
|
||||||
|
async def get_profile(self, other_parameter: dict[str, str | bytes] | None = None) -> UserProfile:
|
||||||
|
"""获取用户数据"""
|
||||||
|
if other_parameter is None:
|
||||||
|
other_parameter = {}
|
||||||
|
params = urlencode(dict(sorted(other_parameter.items())))
|
||||||
|
if self._user_profile.get(params) is None:
|
||||||
|
raw_user_profile = await Request.failover_request(
|
||||||
|
[
|
||||||
|
splice_url(
|
||||||
|
[
|
||||||
|
i,
|
||||||
|
'getProfile',
|
||||||
|
f'?{urlencode({"id":self.teaid or self.user_name,**other_parameter})}',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for i in BASE_URL
|
||||||
|
],
|
||||||
|
failover_code=[502],
|
||||||
|
failover_exc=(TimeoutException,),
|
||||||
|
)
|
||||||
|
self._user_profile[params] = type_validate_json(UserProfile, raw_user_profile)
|
||||||
|
await anti_duplicate_add(
|
||||||
|
TOSHistoricalData,
|
||||||
|
TOSHistoricalData(
|
||||||
|
user_unique_identifier=(await self.user).unique_identifier,
|
||||||
|
api_type='User Profile',
|
||||||
|
data=self._user_profile[params],
|
||||||
|
update_time=datetime.now(UTC),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
return self._user_profile[params]
|
||||||
@@ -0,0 +1,18 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from typing_extensions import override
|
||||||
|
|
||||||
|
from ....schemas import BaseUser
|
||||||
|
from ...constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
class User(BaseUser):
|
||||||
|
platform: Literal['TOS'] = GAME_TYPE
|
||||||
|
|
||||||
|
teaid: str
|
||||||
|
name: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
@override
|
||||||
|
def unique_identifier(self) -> str:
|
||||||
|
return self.teaid
|
||||||
@@ -0,0 +1,89 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class PeriodMatch(BaseModel):
|
||||||
|
name: str
|
||||||
|
teaid: str = Field(..., alias='teaId')
|
||||||
|
rating: str
|
||||||
|
rd: str
|
||||||
|
start_time: datetime = Field(..., alias='startTime')
|
||||||
|
end_time: datetime = Field(..., alias='endTime')
|
||||||
|
win: str
|
||||||
|
lose: str
|
||||||
|
score: str
|
||||||
|
|
||||||
|
|
||||||
|
class UserDataTotalItem(BaseModel):
|
||||||
|
time_map: str = Field(..., alias='timeMap')
|
||||||
|
pieces_map: str = Field(..., alias='piecesMap')
|
||||||
|
clear_lines_map: str = Field(..., alias='clearLinesMap')
|
||||||
|
attacks_map: str = Field(..., alias='attacksMap')
|
||||||
|
dig_map: str = Field(..., alias='digMap')
|
||||||
|
send_map: str = Field(..., alias='sendMap')
|
||||||
|
rise_map: str = Field(..., alias='riseMap')
|
||||||
|
offset_map: str = Field(..., alias='offsetMap')
|
||||||
|
receive_map: str = Field(..., alias='receiveMap')
|
||||||
|
games_map: str = Field(..., alias='gamesMap')
|
||||||
|
tetris_map: str = Field(..., alias='tetrisMap')
|
||||||
|
combo_map: str = Field(..., alias='comboMap')
|
||||||
|
tspin_map: str = Field(..., alias='tspinMap')
|
||||||
|
b2b_map: str = Field(..., alias='b2bMap')
|
||||||
|
perfect_clear_map: str = Field(..., alias='perfectClearMap')
|
||||||
|
time_no_map: str = Field(..., alias='timeNoMap')
|
||||||
|
pieces_no_map: str = Field(..., alias='piecesNoMap')
|
||||||
|
clear_lines_no_map: str = Field(..., alias='clearLinesNoMap')
|
||||||
|
attacks_no_map: str = Field(..., alias='attacksNoMap')
|
||||||
|
dig_no_map: str = Field(..., alias='digNoMap')
|
||||||
|
send_no_map: str = Field(..., alias='sendNoMap')
|
||||||
|
rise_no_map: str = Field(..., alias='riseNoMap')
|
||||||
|
offset_no_map: str = Field(..., alias='offsetNoMap')
|
||||||
|
receive_no_map: str = Field(..., alias='receiveNoMap')
|
||||||
|
games_no_map: str = Field(..., alias='gamesNoMap')
|
||||||
|
tetris_no_map: str = Field(..., alias='tetrisNoMap')
|
||||||
|
combo_no_map: str = Field(..., alias='comboNoMap')
|
||||||
|
tspin_no_map: str = Field(..., alias='tspinNoMap')
|
||||||
|
b2b_no_map: str = Field(..., alias='b2bNoMap')
|
||||||
|
perfect_clear_no_map: str = Field(..., alias='perfectClearNoMap')
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
teaid: str = Field(..., alias='teaId')
|
||||||
|
name: str
|
||||||
|
total_exp: str = Field(..., alias='totalExp')
|
||||||
|
ranking: str
|
||||||
|
ranked_games: str = Field(..., alias='rankedGames')
|
||||||
|
rating_now: str = Field(..., alias='ratingNow')
|
||||||
|
rd_now: str = Field(..., alias='rdNow')
|
||||||
|
vol_now: str = Field(..., alias='volNow')
|
||||||
|
rating_last: str = Field(..., alias='ratingLast')
|
||||||
|
rd_last: str = Field(..., alias='rdLast')
|
||||||
|
vol_last: str = Field(..., alias='volLast')
|
||||||
|
period_matches: list[PeriodMatch] = Field(..., alias='periodMatches')
|
||||||
|
user_data_total: list[UserDataTotalItem] = Field(..., alias='userDataTotal')
|
||||||
|
ranking_items: str = Field(..., alias='rankingItems')
|
||||||
|
ranking_game_items: str = Field(..., alias='rankingGameItems')
|
||||||
|
training_level: str = Field(..., alias='trainingLevel')
|
||||||
|
training_wins: str = Field(..., alias='trainingWins')
|
||||||
|
pb_sprint: str = Field(..., alias='PBSprint')
|
||||||
|
pb_marathon: str = Field(..., alias='PBMarathon')
|
||||||
|
pb_challenge: str = Field(..., alias='PBChallenge')
|
||||||
|
register_date: datetime = Field(..., alias='registerDate')
|
||||||
|
last_login_date: datetime = Field(..., alias='lastLoginDate')
|
||||||
|
|
||||||
|
|
||||||
|
class UserInfoSuccess(BaseModel):
|
||||||
|
code: int
|
||||||
|
success: Literal[True]
|
||||||
|
data: Data
|
||||||
|
|
||||||
|
|
||||||
|
class FailedModel(BaseModel):
|
||||||
|
code: int
|
||||||
|
success: Literal[False]
|
||||||
|
error: str
|
||||||
|
|
||||||
|
|
||||||
|
UserInfo = UserInfoSuccess | FailedModel
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
idmultiplayergameresult: int
|
||||||
|
iduser: str
|
||||||
|
teaid: str
|
||||||
|
time: int
|
||||||
|
clear_lines: int
|
||||||
|
attack: int
|
||||||
|
send: int
|
||||||
|
offset: int
|
||||||
|
receive: int
|
||||||
|
rise: int
|
||||||
|
dig: int
|
||||||
|
pieces: int
|
||||||
|
max_combo: int
|
||||||
|
pc_count: int
|
||||||
|
place: int
|
||||||
|
num_players: int
|
||||||
|
fumen_code: Literal['0', '1'] # wtf
|
||||||
|
rule_set: str
|
||||||
|
garbage: str
|
||||||
|
idmultiplayergame: int
|
||||||
|
datetime: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class UserProfile(BaseModel):
|
||||||
|
code: int
|
||||||
|
success: bool
|
||||||
|
data: list[Data]
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
from urllib.parse import urlunparse
|
||||||
|
|
||||||
|
from nonebot.adapters import Bot
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_userinfo import BotUserInfo, EventUserInfo, UserInfo # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import BindStatus, create_or_update_bind, trigger
|
||||||
|
from ...utils.avatar import get_avatar
|
||||||
|
from ...utils.host import HostPage, get_self_netloc
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.render import Bind, render
|
||||||
|
from ...utils.render.schemas.base import People
|
||||||
|
from ...utils.screenshot import screenshot
|
||||||
|
from . import alc
|
||||||
|
from .api import Player
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('bind')
|
||||||
|
async def _(
|
||||||
|
bot: Bot,
|
||||||
|
account: Player,
|
||||||
|
event_session: EventSession,
|
||||||
|
bot_info: UserInfo = BotUserInfo(), # noqa: B008
|
||||||
|
event_user_info: UserInfo = EventUserInfo(), # noqa: B008
|
||||||
|
):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='bind',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
user = await account.user
|
||||||
|
async with get_session() as session:
|
||||||
|
bind_status = await create_or_update_bind(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=event_user_info.user_id,
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
game_account=user.unique_identifier,
|
||||||
|
)
|
||||||
|
user_info = await account.get_info()
|
||||||
|
if bind_status in (BindStatus.SUCCESS, BindStatus.UPDATE):
|
||||||
|
async with HostPage(
|
||||||
|
await render(
|
||||||
|
'binding',
|
||||||
|
Bind(
|
||||||
|
platform=GAME_TYPE,
|
||||||
|
status='unknown',
|
||||||
|
user=People(
|
||||||
|
avatar=await get_avatar(event_user_info, 'Data URI', None), name=user_info.data.name
|
||||||
|
),
|
||||||
|
bot=People(
|
||||||
|
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),
|
||||||
|
name=bot_info.user_remark or bot_info.user_displayname or bot_info.user_name,
|
||||||
|
),
|
||||||
|
command='茶服查我',
|
||||||
|
),
|
||||||
|
)
|
||||||
|
) as page_hash:
|
||||||
|
await UniMessage.image(
|
||||||
|
raw=await screenshot(urlunparse(('http', get_self_netloc(), f'/host/{page_hash}.html', '', '', '')))
|
||||||
|
).finish()
|
||||||
@@ -1,4 +1,16 @@
|
|||||||
from ...utils.typing import GameType
|
from re import compile
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
GAME_TYPE: GameType = 'TOS'
|
GAME_TYPE: Literal['TOS'] = 'TOS'
|
||||||
BASE_URL = 'https://teatube.cn:8888/'
|
|
||||||
|
BASE_URL = {
|
||||||
|
'https://teatube.cn:8888/',
|
||||||
|
'http://cafuuchino1.studio26f.org:19970',
|
||||||
|
'http://cafuuchino2.studio26f.org:19970',
|
||||||
|
'http://cafuuchino3.studio26f.org:19970',
|
||||||
|
'http://cafuuchino4.studio26f.org:19970',
|
||||||
|
}
|
||||||
|
|
||||||
|
USER_NAME = compile(
|
||||||
|
r'^(?!\.)(?!com[0-9]$)(?!con$)(?!lpt[0-9]$)(?!nul$)(?!prn$)[^\-][^\+][^\|\*\?\\\s\!:<>/$"]*[^\.\|\*\?\\\s\!:<>/$"]+$'
|
||||||
|
)
|
||||||
|
|||||||
@@ -1,221 +0,0 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
from re import match
|
|
||||||
from typing import Any
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
|
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
from pydantic import parse_raw_as
|
|
||||||
|
|
||||||
from ...db import create_or_update_bind
|
|
||||||
from ...utils.exception import MessageFormatError, RequestError
|
|
||||||
from ...utils.request import Request, splice_url
|
|
||||||
from ...utils.typing import GameType
|
|
||||||
from .. import ProcessedData as ProcessedDataMeta
|
|
||||||
from .. import Processor as ProcessorMeta
|
|
||||||
from .. import RawResponse as RawResponseMeta
|
|
||||||
from .. import User as UserMeta
|
|
||||||
from .constant import BASE_URL, GAME_TYPE
|
|
||||||
from .schemas.user_info import SuccessModel as InfoSuccess
|
|
||||||
from .schemas.user_info import UserInfo
|
|
||||||
from .schemas.user_profile import UserProfile
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class User(UserMeta):
|
|
||||||
teaid: str | None = None
|
|
||||||
name: str | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class RawResponse(RawResponseMeta):
|
|
||||||
user_profile: dict[frozenset[tuple[str, Any]], bytes]
|
|
||||||
user_info: bytes | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class ProcessedData(ProcessedDataMeta):
|
|
||||||
user_profile: dict[frozenset[tuple[str, Any]], UserProfile]
|
|
||||||
user_info: InfoSuccess | None = None
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class GameData:
|
|
||||||
num: int
|
|
||||||
pps: float
|
|
||||||
lpm: float
|
|
||||||
apm: float
|
|
||||||
adpm: float
|
|
||||||
apl: float
|
|
||||||
adpl: float
|
|
||||||
vs: float
|
|
||||||
|
|
||||||
|
|
||||||
def identify_user_info(info: str) -> User | MessageFormatError:
|
|
||||||
if (
|
|
||||||
match(
|
|
||||||
r'^(?!\.)(?!com[0-9]$)(?!con$)(?!lpt[0-9]$)(?!nul$)(?!prn$)[^\-][^\+][^\|\*\?\\\s\!:<>/$"]*[^\.\|\*\?\\\s\!:<>/$"]+$',
|
|
||||||
info,
|
|
||||||
)
|
|
||||||
and info.isdigit() is False
|
|
||||||
and 2 <= len(info) <= 18 # noqa: PLR2004
|
|
||||||
):
|
|
||||||
return User(name=info)
|
|
||||||
if info.isdigit():
|
|
||||||
return User(teaid=info)
|
|
||||||
return MessageFormatError('用户名/QQ号不合法')
|
|
||||||
|
|
||||||
|
|
||||||
class Processor(ProcessorMeta):
|
|
||||||
user: User
|
|
||||||
raw_response: RawResponse
|
|
||||||
processed_data: ProcessedData
|
|
||||||
|
|
||||||
def __init__(self, event_id: int, user: User, command_args: list[str]) -> None:
|
|
||||||
super().__init__(event_id, user, command_args)
|
|
||||||
self.raw_response = RawResponse(user_profile={})
|
|
||||||
self.processed_data = ProcessedData(user_profile={})
|
|
||||||
|
|
||||||
@property
|
|
||||||
def game_platform(self) -> GameType:
|
|
||||||
return GAME_TYPE
|
|
||||||
|
|
||||||
async def handle_bind(self, platform: str, account: str) -> str:
|
|
||||||
"""处理绑定消息"""
|
|
||||||
self.command_type = 'bind'
|
|
||||||
await self.get_user()
|
|
||||||
if self.user.name is None:
|
|
||||||
raise # FIXME: 不知道怎么才能把这类型给变过来了
|
|
||||||
async with get_session() as session:
|
|
||||||
return await create_or_update_bind(
|
|
||||||
session=session,
|
|
||||||
chat_platform=platform,
|
|
||||||
chat_account=account,
|
|
||||||
game_platform=GAME_TYPE,
|
|
||||||
game_account=self.user.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def handle_query(self) -> str:
|
|
||||||
"""处理查询消息"""
|
|
||||||
self.command_type = 'query'
|
|
||||||
await self.get_user()
|
|
||||||
return await self.generate_message()
|
|
||||||
|
|
||||||
async def get_user(self) -> None:
|
|
||||||
"""
|
|
||||||
用于获取 UserName 和 UserID 的函数
|
|
||||||
"""
|
|
||||||
if self.user.name is None:
|
|
||||||
self.user.name = (await self.get_user_info()).data.name
|
|
||||||
if self.user.teaid is None:
|
|
||||||
self.user.teaid = (await self.get_user_info()).data.teaid
|
|
||||||
|
|
||||||
async def get_user_info(self) -> InfoSuccess:
|
|
||||||
"""获取用户信息"""
|
|
||||||
if self.processed_data.user_info is None:
|
|
||||||
if self.user.teaid is not None:
|
|
||||||
url = splice_url(
|
|
||||||
[
|
|
||||||
BASE_URL,
|
|
||||||
'getTeaIdInfo',
|
|
||||||
f'?{urlencode({"teaId":self.user.teaid})}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
url = splice_url(
|
|
||||||
[
|
|
||||||
BASE_URL,
|
|
||||||
'getUsernameInfo',
|
|
||||||
f'?{urlencode({"username":self.user.name})}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
self.raw_response.user_info = await Request.request(url)
|
|
||||||
user_info: UserInfo = parse_raw_as(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
|
|
||||||
if not isinstance(user_info, InfoSuccess):
|
|
||||||
raise RequestError(f'用户信息请求错误:\n{user_info.error}')
|
|
||||||
self.processed_data.user_info = user_info
|
|
||||||
return self.processed_data.user_info
|
|
||||||
|
|
||||||
async def get_user_profile(self, other_parameter: dict[str, Any] | None = None) -> UserProfile:
|
|
||||||
"""获取用户数据"""
|
|
||||||
if other_parameter is None:
|
|
||||||
other_parameter = {}
|
|
||||||
fset = frozenset(other_parameter.items())
|
|
||||||
if self.processed_data.user_profile.get(fset) is None:
|
|
||||||
self.raw_response.user_profile[fset] = await Request.request(
|
|
||||||
splice_url(
|
|
||||||
[
|
|
||||||
BASE_URL,
|
|
||||||
'getProfile',
|
|
||||||
f'?{urlencode({"id":self.user.teaid or self.user.name},**other_parameter)}',
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.processed_data.user_profile[fset] = UserProfile.parse_raw(self.raw_response.user_profile[fset])
|
|
||||||
return self.processed_data.user_profile[fset]
|
|
||||||
|
|
||||||
async def get_game_data(self) -> GameData | None:
|
|
||||||
"""获取游戏数据"""
|
|
||||||
user_profile = await self.get_user_profile()
|
|
||||||
if user_profile.data == []:
|
|
||||||
return None
|
|
||||||
weighted_total_lpm = weighted_total_apm = weighted_total_adpm = 0.0
|
|
||||||
total_time = 0.0
|
|
||||||
num = 0
|
|
||||||
for i in user_profile.data:
|
|
||||||
# 排除单人局和时间为0的游戏
|
|
||||||
# 茶: 不计算没挖掘的局, 即使apm和lpm也如此
|
|
||||||
if i.num_players == 1 or i.time == 0 or i.dig is None:
|
|
||||||
continue
|
|
||||||
# 加权计算
|
|
||||||
time = i.time / 1000
|
|
||||||
lpm = 24 * (i.pieces / time)
|
|
||||||
apm = (i.attack / time) * 60
|
|
||||||
adpm = ((i.attack + i.dig) / time) * 60
|
|
||||||
weighted_total_lpm += lpm * time
|
|
||||||
weighted_total_apm += apm * time
|
|
||||||
weighted_total_adpm += adpm * time
|
|
||||||
total_time += time
|
|
||||||
num += 1
|
|
||||||
if num == 50: # noqa: PLR2004 # TODO: 将查询局数作为可选命令参数
|
|
||||||
break
|
|
||||||
if num == 0:
|
|
||||||
return None
|
|
||||||
# TODO: 如果有效局数不满50, 没有无dig信息的局, 且userData['data']内有50个局, 则继续往前获取信息
|
|
||||||
lpm = weighted_total_lpm / total_time
|
|
||||||
apm = weighted_total_apm / total_time
|
|
||||||
adpm = weighted_total_adpm / total_time
|
|
||||||
return GameData(
|
|
||||||
num=num,
|
|
||||||
pps=round(lpm / 24, 2),
|
|
||||||
lpm=round(lpm, 2),
|
|
||||||
apm=round(apm, 2),
|
|
||||||
adpm=round(adpm, 2),
|
|
||||||
apl=round((apm / lpm), 2),
|
|
||||||
adpl=round((adpm / lpm), 2),
|
|
||||||
vs=round((adpm / 60 * 100), 2),
|
|
||||||
)
|
|
||||||
|
|
||||||
async def generate_message(self) -> str:
|
|
||||||
"""生成消息"""
|
|
||||||
user_info = (await self.get_user_info()).data
|
|
||||||
message = f'用户 {user_info.name} ({user_info.teaid}) '
|
|
||||||
if user_info.ranked_games == '0':
|
|
||||||
message += '暂无段位统计数据'
|
|
||||||
else:
|
|
||||||
message += f', 段位分 {round(float(user_info.rating_now),2)}±{round(float(user_info.rd_now),2)} ({round(float(user_info.vol_now),2)}) '
|
|
||||||
game_data = await self.get_game_data()
|
|
||||||
if game_data is None:
|
|
||||||
message += ', 暂无游戏数据'
|
|
||||||
else:
|
|
||||||
message += f', 最近 {game_data.num} 局数据'
|
|
||||||
message += f"\nL'PM: {game_data.lpm} ( {game_data.pps} pps )"
|
|
||||||
message += f'\nAPM: {game_data.apm} ( x{game_data.apl} )'
|
|
||||||
message += f'\nADPM: {game_data.adpm} ( x{game_data.adpl} ) ( {game_data.vs}vs )'
|
|
||||||
message += (
|
|
||||||
f'\n40L: {float(user_info.pb_sprint)/1000:.2f}s'
|
|
||||||
if user_info.pb_sprint != 2147483647 # noqa: PLR2004
|
|
||||||
else ''
|
|
||||||
)
|
|
||||||
message += f'\nMarathon: {user_info.pb_marathon}' if user_info.pb_marathon != 0 else ''
|
|
||||||
message += f'\nChallenge: {user_info.pb_challenge}' if user_info.pb_challenge != 0 else ''
|
|
||||||
return message
|
|
||||||
@@ -0,0 +1,170 @@
|
|||||||
|
from asyncio import gather
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from nonebot.adapters import Bot, Event
|
||||||
|
from nonebot.matcher import Matcher
|
||||||
|
from nonebot_plugin_alconna import At
|
||||||
|
from nonebot_plugin_alconna.uniseg import UniMessage
|
||||||
|
from nonebot_plugin_orm import get_session
|
||||||
|
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
|
||||||
|
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from ...db import query_bind_info, trigger
|
||||||
|
from ...utils.metrics import TetrisMetricsProWithLPMADPM, get_metrics
|
||||||
|
from ...utils.platform import get_platform
|
||||||
|
from ...utils.typing import Me
|
||||||
|
from ..constant import CANT_VERIFY_MESSAGE
|
||||||
|
from . import alc
|
||||||
|
from .api import Player
|
||||||
|
from .constant import GAME_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
def add_special_handlers(
|
||||||
|
teaid_prefix: Literal['onebot-', 'kook-', 'discord-', 'qqguild-'], match_event: type[Event]
|
||||||
|
) -> None:
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(event: Event, target: At | Me, event_session: EventSession):
|
||||||
|
if isinstance(event, match_event):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
await (
|
||||||
|
await make_query_text(
|
||||||
|
Player(
|
||||||
|
teaid=f'{teaid_prefix}{target.target}'
|
||||||
|
if isinstance(target, At)
|
||||||
|
else f'{teaid_prefix}{event.get_user_id()}',
|
||||||
|
trust=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
).finish()
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
from nonebot.adapters.onebot.v11 import MessageEvent as OB11MessageEvent
|
||||||
|
|
||||||
|
add_special_handlers('onebot-', OB11MessageEvent)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from nonebot.adapters.qq.event import GuildMessageEvent as QQGuildMessageEvent
|
||||||
|
from nonebot.adapters.qq.event import QQMessageEvent
|
||||||
|
|
||||||
|
add_special_handlers('qqguild-', QQGuildMessageEvent)
|
||||||
|
add_special_handlers('onebot-', QQMessageEvent)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from nonebot.adapters.kaiheila.event import MessageEvent as KookMessageEvent
|
||||||
|
|
||||||
|
add_special_handlers('kook-', KookMessageEvent)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from nonebot.adapters.discord import MessageEvent as DiscordMessageEvent
|
||||||
|
|
||||||
|
add_special_handlers('discord-', DiscordMessageEvent)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(bot: Bot, event: Event, matcher: Matcher, target: At | Me, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
async with get_session() as session:
|
||||||
|
bind = await query_bind_info(
|
||||||
|
session=session,
|
||||||
|
chat_platform=get_platform(bot),
|
||||||
|
chat_account=(target.target if isinstance(target, At) else event.get_user_id()),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
)
|
||||||
|
if bind is None:
|
||||||
|
await matcher.finish('未查询到绑定信息')
|
||||||
|
message = CANT_VERIFY_MESSAGE
|
||||||
|
await (message + await make_query_text(Player(teaid=bind.game_account, trust=True))).finish()
|
||||||
|
|
||||||
|
|
||||||
|
@alc.assign('query')
|
||||||
|
async def _(account: Player, event_session: EventSession):
|
||||||
|
async with trigger(
|
||||||
|
session_persist_id=await get_session_persist_id(event_session),
|
||||||
|
game_platform=GAME_TYPE,
|
||||||
|
command_type='query',
|
||||||
|
command_args=[],
|
||||||
|
):
|
||||||
|
await (await make_query_text(account)).finish()
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class GameData:
|
||||||
|
game_num: int
|
||||||
|
metrics: TetrisMetricsProWithLPMADPM
|
||||||
|
|
||||||
|
|
||||||
|
async def get_game_data(player: Player, query_num: int = 50) -> GameData | None:
|
||||||
|
"""获取游戏数据"""
|
||||||
|
user_profile = await player.get_profile()
|
||||||
|
if user_profile.data == []:
|
||||||
|
return None
|
||||||
|
weighted_total_lpm = weighted_total_apm = weighted_total_adpm = total_time = 0.0
|
||||||
|
num = 0
|
||||||
|
for i in user_profile.data:
|
||||||
|
# 排除单人局和时间为0的游戏
|
||||||
|
# 茶: 不计算没挖掘的局, 即使apm和lpm也如此
|
||||||
|
if i.num_players == 1 or i.time == 0 or i.dig is None:
|
||||||
|
continue
|
||||||
|
# 加权计算
|
||||||
|
time = i.time / 1000
|
||||||
|
lpm = 24 * (i.pieces / time)
|
||||||
|
apm = (i.attack / time) * 60
|
||||||
|
adpm = ((i.attack + i.dig) / time) * 60
|
||||||
|
weighted_total_lpm += lpm * time
|
||||||
|
weighted_total_apm += apm * time
|
||||||
|
weighted_total_adpm += adpm * time
|
||||||
|
total_time += time
|
||||||
|
num += 1
|
||||||
|
if num >= query_num:
|
||||||
|
break
|
||||||
|
if num == 0:
|
||||||
|
return None
|
||||||
|
# TODO: 如果有效局数小于 {查询数} , 并且没有无dig信息的局, 且 user_profile.data 内有{请求数}个局, 则继续往前获取信息
|
||||||
|
metrics = get_metrics(
|
||||||
|
lpm=weighted_total_lpm / total_time, apm=weighted_total_apm / total_time, adpm=weighted_total_adpm / total_time
|
||||||
|
)
|
||||||
|
lpm = weighted_total_lpm / total_time
|
||||||
|
apm = weighted_total_apm / total_time
|
||||||
|
adpm = weighted_total_adpm / total_time
|
||||||
|
return GameData(game_num=num, metrics=metrics)
|
||||||
|
|
||||||
|
|
||||||
|
async def make_query_text(player: Player) -> UniMessage:
|
||||||
|
user_info, game_data = await gather(player.get_info(), get_game_data(player))
|
||||||
|
user_data = user_info.data
|
||||||
|
message = f'用户 {user_data.name} ({user_data.teaid}) '
|
||||||
|
if user_data.ranked_games == '0':
|
||||||
|
message += '暂无段位统计数据'
|
||||||
|
else:
|
||||||
|
message += f', 段位分 {round(float(user_data.rating_now),2)}±{round(float(user_data.rd_now),2)} ({round(float(user_data.vol_now),2)}) '
|
||||||
|
if game_data is None:
|
||||||
|
message += ', 暂无游戏数据'
|
||||||
|
else:
|
||||||
|
message += f', 最近 {game_data.game_num} 局数据'
|
||||||
|
message += f"\nL'PM: {game_data.metrics.lpm} ( {game_data.metrics.pps} pps )"
|
||||||
|
message += f'\nAPM: {game_data.metrics.apm} ( x{game_data.metrics.apl} )'
|
||||||
|
message += f'\nADPM: {game_data.metrics.adpm} ( x{game_data.metrics.adpl} ) ( {game_data.metrics.vs}vs )'
|
||||||
|
message += f'\n40L: {float(user_data.pb_sprint)/1000:.2f}s' if user_data.pb_sprint != '2147483647' else ''
|
||||||
|
message += f'\nMarathon: {user_data.pb_marathon}' if user_data.pb_marathon != '0' else ''
|
||||||
|
message += f'\nChallenge: {user_data.pb_challenge}' if user_data.pb_challenge != '0' else ''
|
||||||
|
return UniMessage(message)
|
||||||
@@ -1,86 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
|
|
||||||
class SuccessModel(BaseModel):
|
|
||||||
class Data(BaseModel):
|
|
||||||
class PeriodMatch(BaseModel):
|
|
||||||
name: str
|
|
||||||
teaid: str = Field(..., alias='teaId')
|
|
||||||
rating: str
|
|
||||||
rd: str
|
|
||||||
start_time: datetime = Field(..., alias='startTime')
|
|
||||||
end_time: datetime = Field(..., alias='endTime')
|
|
||||||
win: str
|
|
||||||
lose: str
|
|
||||||
score: str
|
|
||||||
|
|
||||||
class UserDataTotalItem(BaseModel):
|
|
||||||
time_map: str = Field(..., alias='timeMap')
|
|
||||||
pieces_map: str = Field(..., alias='piecesMap')
|
|
||||||
clear_lines_map: str = Field(..., alias='clearLinesMap')
|
|
||||||
attacks_map: str = Field(..., alias='attacksMap')
|
|
||||||
dig_map: str = Field(..., alias='digMap')
|
|
||||||
send_map: str = Field(..., alias='sendMap')
|
|
||||||
rise_map: str = Field(..., alias='riseMap')
|
|
||||||
offset_map: str = Field(..., alias='offsetMap')
|
|
||||||
receive_map: str = Field(..., alias='receiveMap')
|
|
||||||
games_map: str = Field(..., alias='gamesMap')
|
|
||||||
tetris_map: str = Field(..., alias='tetrisMap')
|
|
||||||
combo_map: str = Field(..., alias='comboMap')
|
|
||||||
tspin_map: str = Field(..., alias='tspinMap')
|
|
||||||
b2b_map: str = Field(..., alias='b2bMap')
|
|
||||||
perfect_clear_map: str = Field(..., alias='perfectClearMap')
|
|
||||||
time_no_map: str = Field(..., alias='timeNoMap')
|
|
||||||
pieces_no_map: str = Field(..., alias='piecesNoMap')
|
|
||||||
clear_lines_no_map: str = Field(..., alias='clearLinesNoMap')
|
|
||||||
attacks_no_map: str = Field(..., alias='attacksNoMap')
|
|
||||||
dig_no_map: str = Field(..., alias='digNoMap')
|
|
||||||
send_no_map: str = Field(..., alias='sendNoMap')
|
|
||||||
rise_no_map: str = Field(..., alias='riseNoMap')
|
|
||||||
offset_no_map: str = Field(..., alias='offsetNoMap')
|
|
||||||
receive_no_map: str = Field(..., alias='receiveNoMap')
|
|
||||||
games_no_map: str = Field(..., alias='gamesNoMap')
|
|
||||||
tetris_no_map: str = Field(..., alias='tetrisNoMap')
|
|
||||||
combo_no_map: str = Field(..., alias='comboNoMap')
|
|
||||||
tspin_no_map: str = Field(..., alias='tspinNoMap')
|
|
||||||
b2b_no_map: str = Field(..., alias='b2bNoMap')
|
|
||||||
perfect_clear_no_map: str = Field(..., alias='perfectClearNoMap')
|
|
||||||
|
|
||||||
teaid: str = Field(..., alias='teaId')
|
|
||||||
name: str
|
|
||||||
total_exp: str = Field(..., alias='totalExp')
|
|
||||||
ranking: str
|
|
||||||
ranked_games: str = Field(..., alias='rankedGames')
|
|
||||||
rating_now: str = Field(..., alias='ratingNow')
|
|
||||||
rd_now: str = Field(..., alias='rdNow')
|
|
||||||
vol_now: str = Field(..., alias='volNow')
|
|
||||||
rating_last: str = Field(..., alias='ratingLast')
|
|
||||||
rd_last: str = Field(..., alias='rdLast')
|
|
||||||
vol_last: str = Field(..., alias='volLast')
|
|
||||||
period_matches: list[PeriodMatch] = Field(..., alias='periodMatches')
|
|
||||||
user_data_total: list[UserDataTotalItem] = Field(..., alias='userDataTotal')
|
|
||||||
ranking_items: str = Field(..., alias='rankingItems')
|
|
||||||
ranking_game_items: str = Field(..., alias='rankingGameItems')
|
|
||||||
training_level: str = Field(..., alias='trainingLevel')
|
|
||||||
training_wins: str = Field(..., alias='trainingWins')
|
|
||||||
pb_sprint: str = Field(..., alias='PBSprint')
|
|
||||||
pb_marathon: str = Field(..., alias='PBMarathon')
|
|
||||||
pb_challenge: str = Field(..., alias='PBChallenge')
|
|
||||||
register_date: datetime = Field(..., alias='registerDate')
|
|
||||||
last_login_date: datetime = Field(..., alias='lastLoginDate')
|
|
||||||
|
|
||||||
code: int
|
|
||||||
success: Literal[True]
|
|
||||||
data: Data
|
|
||||||
|
|
||||||
|
|
||||||
class FailedModel(BaseModel):
|
|
||||||
code: int
|
|
||||||
success: Literal[False]
|
|
||||||
error: str
|
|
||||||
|
|
||||||
|
|
||||||
UserInfo = SuccessModel | FailedModel
|
|
||||||
@@ -1,33 +0,0 @@
|
|||||||
from datetime import datetime
|
|
||||||
from typing import Literal
|
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class UserProfile(BaseModel):
|
|
||||||
class Data(BaseModel):
|
|
||||||
idmultiplayergameresult: int
|
|
||||||
iduser: str
|
|
||||||
teaid: str
|
|
||||||
time: int
|
|
||||||
clear_lines: int
|
|
||||||
attack: int
|
|
||||||
send: int
|
|
||||||
offset: int
|
|
||||||
receive: int
|
|
||||||
rise: int
|
|
||||||
dig: int
|
|
||||||
pieces: int
|
|
||||||
max_combo: int
|
|
||||||
pc_count: int
|
|
||||||
place: int
|
|
||||||
num_players: int
|
|
||||||
fumen_code: Literal['0', '1'] # wtf
|
|
||||||
rule_set: str
|
|
||||||
garbage: str
|
|
||||||
idmultiplayergame: int
|
|
||||||
datetime: datetime
|
|
||||||
|
|
||||||
code: int
|
|
||||||
success: bool
|
|
||||||
data: list[Data]
|
|
||||||
54
nonebot_plugin_tetris_stats/utils/avatar.py
Normal file
54
nonebot_plugin_tetris_stats/utils/avatar.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from base64 import b64encode
|
||||||
|
from io import BytesIO
|
||||||
|
from typing import Literal, overload
|
||||||
|
|
||||||
|
from nonebot_plugin_userinfo import UserInfo # type: ignore[import-untyped]
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def get_avatar(user: UserInfo, scheme: Literal['Data URI'], default: str | None) -> str:
|
||||||
|
"""获取用户头像的指定格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user (UserInfo): 要获取的用户
|
||||||
|
scheme (Literal['Data URI']): 格式
|
||||||
|
default (str | None): 获取不到时的默认值
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TypeError: Can't get avatar: 当获取不到头像并且没有设置默认值时抛出
|
||||||
|
TypeError: Can't get avatar format: 当获取到的头像无法识别格式时抛出
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Data URI 格式的头像
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def get_avatar(user: UserInfo, scheme: Literal['bytes'], default: str | None) -> bytes:
|
||||||
|
"""获取用户头像的指定格式
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user (UserInfo): 要获取的用户
|
||||||
|
scheme (Literal['bytes']): 格式
|
||||||
|
default (str | None): 获取不到时的默认值
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bytes: bytes 格式的头像
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
async def get_avatar(user: UserInfo, scheme: Literal['Data URI', 'bytes'], default: str | None) -> str | bytes:
|
||||||
|
if user.user_avatar is None:
|
||||||
|
if default is None:
|
||||||
|
msg = "Can't get avatar"
|
||||||
|
raise TypeError(msg)
|
||||||
|
return default
|
||||||
|
bot_avatar = await user.user_avatar.get_image()
|
||||||
|
if scheme == 'Data URI':
|
||||||
|
avatar_format = Image.open(BytesIO(bot_avatar)).format
|
||||||
|
if avatar_format is None:
|
||||||
|
msg = "Can't get avatar format"
|
||||||
|
raise TypeError(msg)
|
||||||
|
return f'data:{Image.MIME[avatar_format]};base64,{b64encode(bot_avatar).decode()}'
|
||||||
|
return bot_avatar
|
||||||
@@ -15,12 +15,12 @@ global_config = driver.config
|
|||||||
|
|
||||||
@driver.on_startup
|
@driver.on_startup
|
||||||
async def _():
|
async def _():
|
||||||
await BrowserManager._init_playwright()
|
await BrowserManager.init_playwright()
|
||||||
|
|
||||||
|
|
||||||
@driver.on_shutdown
|
@driver.on_shutdown
|
||||||
async def _():
|
async def _():
|
||||||
await BrowserManager._close_browser()
|
await BrowserManager.close_browser()
|
||||||
|
|
||||||
|
|
||||||
class BrowserManager:
|
class BrowserManager:
|
||||||
@@ -29,9 +29,10 @@ class BrowserManager:
|
|||||||
_browser: Browser | None = None
|
_browser: Browser | None = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _init_playwright(cls) -> None:
|
async def init_playwright(cls) -> None:
|
||||||
if system() == 'Windows' and getattr(global_config, 'fastapi_reload', False):
|
if system() == 'Windows' and getattr(global_config, 'fastapi_reload', False):
|
||||||
raise ImportError('加载失败, Windows 必须设置 FASTAPI_RELOAD=false 才能正常运行 playwright')
|
msg = '加载失败, Windows 必须设置 FASTAPI_RELOAD=false 才能正常运行 playwright'
|
||||||
|
raise ImportError(msg)
|
||||||
logger.info('开始 安装/更新 playwright 浏览器')
|
logger.info('开始 安装/更新 playwright 浏览器')
|
||||||
environ['PLAYWRIGHT_DOWNLOAD_HOST'] = 'https://npmmirror.com/mirrors/playwright/'
|
environ['PLAYWRIGHT_DOWNLOAD_HOST'] = 'https://npmmirror.com/mirrors/playwright/'
|
||||||
if cls._call_playwright(['', 'install', 'firefox']):
|
if cls._call_playwright(['', 'install', 'firefox']):
|
||||||
@@ -45,10 +46,9 @@ class BrowserManager:
|
|||||||
logger.error('安装/更新 playwright 浏览器失败')
|
logger.error('安装/更新 playwright 浏览器失败')
|
||||||
try:
|
try:
|
||||||
await cls._start_browser()
|
await cls._start_browser()
|
||||||
except BaseException as e: # noqa: BLE001 不知道会有什么异常, 交给用户解决
|
except BaseException as e: # 不知道会有什么异常, 交给用户解决
|
||||||
raise ImportError(
|
msg = 'playwright 启动失败, 请尝试在命令行运行 playwright install-deps firefox, 如果仍然启动失败, 请参考上面的报错👆'
|
||||||
'playwright 启动失败, 请尝试在命令行运行 playwright install-deps firefox, 如果仍然启动失败, 请参考上面的报错👆'
|
raise ImportError(msg) from e
|
||||||
) from e
|
|
||||||
else:
|
else:
|
||||||
logger.success('playwright 启动成功')
|
logger.success('playwright 启动成功')
|
||||||
|
|
||||||
@@ -81,7 +81,7 @@ class BrowserManager:
|
|||||||
return cls._browser or await cls._start_browser()
|
return cls._browser or await cls._start_browser()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _close_browser(cls) -> None:
|
async def close_browser(cls) -> None:
|
||||||
"""关闭浏览器实例"""
|
"""关闭浏览器实例"""
|
||||||
if isinstance(cls._browser, Browser):
|
if isinstance(cls._browser, Browser):
|
||||||
await cls._browser.close()
|
await cls._browser.close()
|
||||||
|
|||||||
@@ -15,21 +15,25 @@ class NeedCatchError(TetrisStatsError):
|
|||||||
"""需要被捕获的异常基类"""
|
"""需要被捕获的异常基类"""
|
||||||
|
|
||||||
|
|
||||||
class DoNotCatchError(TetrisStatsError):
|
|
||||||
"""不应该被捕获的异常基类"""
|
|
||||||
|
|
||||||
|
|
||||||
class RequestError(NeedCatchError):
|
class RequestError(NeedCatchError):
|
||||||
"""请求错误"""
|
"""请求错误"""
|
||||||
|
|
||||||
|
def __init__(self, message: str = '', *, status_code: int | None = None):
|
||||||
|
super().__init__(message)
|
||||||
|
self.status_code = status_code
|
||||||
|
|
||||||
|
|
||||||
class MessageFormatError(NeedCatchError):
|
class MessageFormatError(NeedCatchError):
|
||||||
"""用户发送的消息格式不正确"""
|
"""用户发送的消息格式不正确"""
|
||||||
|
|
||||||
|
|
||||||
class DatabaseVersionError(DoNotCatchError):
|
class DoNotCatchError(TetrisStatsError):
|
||||||
"""数据库版本错误"""
|
"""不应该被捕获的异常基类"""
|
||||||
|
|
||||||
|
|
||||||
class WhatTheFuckError(DoNotCatchError):
|
class WhatTheFuckError(DoNotCatchError):
|
||||||
"""用于表示不应该出现的情况 ("""
|
"""用于表示不应该出现的情况 ("""
|
||||||
|
|
||||||
|
|
||||||
|
class HandleNotFinishedError(DoNotCatchError):
|
||||||
|
"""任务没有正常完成处理的错误"""
|
||||||
|
|||||||
73
nonebot_plugin_tetris_stats/utils/host.py
Normal file
73
nonebot_plugin_tetris_stats/utils/host.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
from hashlib import sha256
|
||||||
|
from ipaddress import IPv4Address, IPv6Address
|
||||||
|
from typing import TYPE_CHECKING, ClassVar
|
||||||
|
|
||||||
|
from fastapi import FastAPI, status
|
||||||
|
from fastapi.responses import HTMLResponse
|
||||||
|
from fastapi.staticfiles import StaticFiles
|
||||||
|
from nonebot import get_app, get_driver
|
||||||
|
from nonebot.log import logger
|
||||||
|
from nonebot_plugin_localstore import get_cache_dir # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
from .templates import templates_dir
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pydantic import IPvAnyAddress
|
||||||
|
|
||||||
|
app = get_app()
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
global_config = driver.config
|
||||||
|
|
||||||
|
cache_dir = get_cache_dir('nonebot_plugin_tetris_stats')
|
||||||
|
|
||||||
|
if not isinstance(app, FastAPI):
|
||||||
|
msg = '本插件需要 FastAPI 驱动器才能运行'
|
||||||
|
raise RuntimeError(msg) # noqa: TRY004
|
||||||
|
|
||||||
|
NOT_FOUND = HTMLResponse('404 Not Found', status_code=status.HTTP_404_NOT_FOUND)
|
||||||
|
|
||||||
|
|
||||||
|
class HostPage:
|
||||||
|
pages: ClassVar[dict[str, str]] = {}
|
||||||
|
|
||||||
|
def __init__(self, page: str) -> None:
|
||||||
|
self.page_hash = sha256(page.encode()).hexdigest()
|
||||||
|
self.pages[self.page_hash] = page
|
||||||
|
|
||||||
|
async def __aenter__(self) -> str:
|
||||||
|
return self.page_hash
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc, tb) -> None: # noqa: ANN001
|
||||||
|
self.pages.pop(self.page_hash, None)
|
||||||
|
|
||||||
|
|
||||||
|
@driver.on_startup
|
||||||
|
def _():
|
||||||
|
app.mount(
|
||||||
|
'/host/assets',
|
||||||
|
StaticFiles(directory=templates_dir / 'assets'),
|
||||||
|
name='assets',
|
||||||
|
)
|
||||||
|
logger.success('assets mounted')
|
||||||
|
|
||||||
|
|
||||||
|
@app.get('/host/{page_hash}.html', status_code=status.HTTP_200_OK)
|
||||||
|
async def _(page_hash: str) -> HTMLResponse:
|
||||||
|
if page_hash in HostPage.pages:
|
||||||
|
return HTMLResponse(HostPage.pages[page_hash])
|
||||||
|
return NOT_FOUND
|
||||||
|
|
||||||
|
|
||||||
|
def get_self_netloc() -> str:
|
||||||
|
host: IPv4Address | IPv6Address | IPvAnyAddress = global_config.host
|
||||||
|
if isinstance(host, IPv4Address):
|
||||||
|
if host == IPv4Address('0.0.0.0'): # noqa: S104
|
||||||
|
host = IPv4Address('127.0.0.1')
|
||||||
|
netloc = f'{host}:{global_config.port}'
|
||||||
|
else:
|
||||||
|
if host == IPv6Address('::'):
|
||||||
|
host = IPv6Address('::1')
|
||||||
|
netloc = f'[{host}]:{global_config.port}'
|
||||||
|
return netloc
|
||||||
@@ -144,136 +144,96 @@ class TetrisMetricsProWithLPMADPM(TetrisMetricsBasicWithLPM, TetrisMetricsBaseWi
|
|||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: Number,
|
pps: Number,
|
||||||
lpm: None = None,
|
|
||||||
apm: None = None,
|
|
||||||
vs: None = None,
|
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBaseWithPPS:
|
) -> TetrisMetricsBaseWithPPS: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: Number,
|
lpm: Number,
|
||||||
apm: None = None,
|
|
||||||
vs: None = None,
|
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBaseWithLPM:
|
) -> TetrisMetricsBaseWithLPM: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: None = None,
|
|
||||||
apm: None = None,
|
|
||||||
vs: Number,
|
vs: Number,
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBaseWithVS:
|
) -> TetrisMetricsBaseWithVS: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: None = None,
|
|
||||||
apm: None = None,
|
|
||||||
vs: None = None,
|
|
||||||
adpm: Number,
|
adpm: Number,
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBaseWithADPM:
|
) -> TetrisMetricsBaseWithADPM: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: Number,
|
pps: Number,
|
||||||
lpm: None = None,
|
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: None = None,
|
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBasicWithPPS:
|
) -> TetrisMetricsBasicWithPPS: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: Number,
|
lpm: Number,
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: None = None,
|
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsBasicWithLPM:
|
) -> TetrisMetricsBasicWithLPM: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: Number,
|
pps: Number,
|
||||||
lpm: None = None,
|
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: Number,
|
vs: Number,
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsProWithPPSVS:
|
) -> TetrisMetricsProWithPPSVS: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: Number,
|
pps: Number,
|
||||||
lpm: None = None,
|
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: None = None,
|
|
||||||
adpm: Number,
|
adpm: Number,
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsProWithPPSADPM:
|
) -> TetrisMetricsProWithPPSADPM: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: Number,
|
lpm: Number,
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: Number,
|
vs: Number,
|
||||||
adpm: None = None,
|
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsProWithLPMVS:
|
) -> TetrisMetricsProWithLPMVS: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
@overload
|
@overload
|
||||||
def get_metrics( # noqa: PLR0913
|
def get_metrics(
|
||||||
*,
|
*,
|
||||||
pps: None = None,
|
|
||||||
lpm: Number,
|
lpm: Number,
|
||||||
apm: Number,
|
apm: Number,
|
||||||
vs: None = None,
|
|
||||||
adpm: Number,
|
adpm: Number,
|
||||||
precision: int = 2,
|
precision: int = 2,
|
||||||
) -> TetrisMetricsProWithLPMADPM:
|
) -> TetrisMetricsProWithLPMADPM: ...
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
def get_metrics( # noqa: PLR0911, PLR0912, PLR0913
|
def get_metrics( # noqa: PLR0911, PLR0912, PLR0913, C901
|
||||||
*,
|
*,
|
||||||
pps: Number | None = None,
|
pps: Number | None = None,
|
||||||
lpm: Number | None = None,
|
lpm: Number | None = None,
|
||||||
|
|||||||
@@ -1,92 +0,0 @@
|
|||||||
from datetime import UTC, datetime
|
|
||||||
from typing import ClassVar
|
|
||||||
|
|
||||||
from nonebot import get_driver, get_plugin
|
|
||||||
from nonebot.adapters import Bot, Event
|
|
||||||
from nonebot.matcher import Matcher
|
|
||||||
from nonebot.message import run_postprocessor, run_preprocessor
|
|
||||||
from nonebot_plugin_orm import get_session
|
|
||||||
|
|
||||||
from ..db.models import HistoricalData
|
|
||||||
|
|
||||||
driver = get_driver()
|
|
||||||
|
|
||||||
|
|
||||||
class Recorder:
|
|
||||||
matchers: ClassVar[set[type[Matcher]]] = set()
|
|
||||||
historical_data: ClassVar[dict[int, tuple[HistoricalData, bool]]] = {}
|
|
||||||
error_event: ClassVar[set[int]] = set()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create_historical_data(cls, event_id: int, historical_data: HistoricalData) -> None:
|
|
||||||
cls.historical_data[event_id] = (historical_data, False)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def update_historical_data(cls, event_id: int, historical_data: HistoricalData) -> None:
|
|
||||||
if event_id not in cls.historical_data:
|
|
||||||
raise KeyError
|
|
||||||
cls.historical_data[event_id] = (historical_data, True)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_historical_data(cls, event_id: int) -> HistoricalData:
|
|
||||||
return cls.historical_data[event_id][0]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def save_historical_data(cls, event_id: int) -> None:
|
|
||||||
historical_data, completed = cls.del_historical_data(event_id)
|
|
||||||
if completed:
|
|
||||||
async with get_session() as session:
|
|
||||||
session.add(historical_data)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def del_historical_data(cls, event_id: int) -> tuple[HistoricalData, bool]:
|
|
||||||
return cls.historical_data.pop(event_id)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def add_error_event(cls, event_id: int) -> None:
|
|
||||||
cls.error_event.add(event_id)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def del_error_event(cls, event_id: int) -> None:
|
|
||||||
cls.error_event.remove(event_id)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def is_error_event(cls, event_id: int) -> bool:
|
|
||||||
return event_id in cls.error_event
|
|
||||||
|
|
||||||
|
|
||||||
@driver.on_startup
|
|
||||||
def _():
|
|
||||||
plugin = get_plugin('nonebot_plugin_tetris_stats')
|
|
||||||
if plugin is not None:
|
|
||||||
Recorder.matchers = plugin.matcher
|
|
||||||
else:
|
|
||||||
raise RuntimeError('获取不到自身插件对象')
|
|
||||||
|
|
||||||
|
|
||||||
@run_preprocessor
|
|
||||||
def _(bot: Bot, event: Event, matcher: Matcher):
|
|
||||||
if isinstance(matcher, tuple(Recorder.matchers)):
|
|
||||||
Recorder.create_historical_data(
|
|
||||||
event_id=id(event),
|
|
||||||
historical_data=HistoricalData(
|
|
||||||
trigger_time=datetime.now(tz=UTC),
|
|
||||||
bot_platform=bot.type,
|
|
||||||
bot_account=bot.self_id,
|
|
||||||
source_type=event.get_type(),
|
|
||||||
source_account=event.get_session_id(),
|
|
||||||
message=event.get_message(),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@run_postprocessor
|
|
||||||
async def _(event: Event, matcher: Matcher, exception: Exception | None):
|
|
||||||
if isinstance(matcher, tuple(Recorder.matchers)):
|
|
||||||
event_id = id(event)
|
|
||||||
if exception is not None:
|
|
||||||
Recorder.add_error_event(event_id)
|
|
||||||
Recorder.del_historical_data(event_id)
|
|
||||||
else:
|
|
||||||
await Recorder.save_historical_data(event_id)
|
|
||||||
29
nonebot_plugin_tetris_stats/utils/render/__init__.py
Normal file
29
nonebot_plugin_tetris_stats/utils/render/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from typing import Literal, overload
|
||||||
|
|
||||||
|
from jinja2 import Environment, FileSystemLoader
|
||||||
|
from nonebot.compat import PYDANTIC_V2
|
||||||
|
|
||||||
|
from ..templates import templates_dir
|
||||||
|
from .schemas.bind import Bind
|
||||||
|
from .schemas.tetrio_info import TETRIOInfo
|
||||||
|
|
||||||
|
env = Environment(
|
||||||
|
loader=FileSystemLoader(templates_dir), autoescape=True, trim_blocks=True, lstrip_blocks=True, enable_async=True
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def render(render_type: Literal['binding'], data: Bind) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
async def render(render_type: Literal['tetrio/info'], data: TETRIOInfo) -> str: ...
|
||||||
|
|
||||||
|
|
||||||
|
async def render(render_type: Literal['binding', 'tetrio/info'], data: Bind | TETRIOInfo) -> str:
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
return await env.get_template('index.html').render_async(path=render_type, data=data.model_dump_json())
|
||||||
|
return await env.get_template('index.html').render_async(path=render_type, data=data.json())
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ['render', 'Bind', 'TETRIOInfo']
|
||||||
13
nonebot_plugin_tetris_stats/utils/render/schemas/base.py
Normal file
13
nonebot_plugin_tetris_stats/utils/render/schemas/base.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class Avatar(BaseModel):
|
||||||
|
type: Literal['identicon']
|
||||||
|
hash: str
|
||||||
|
|
||||||
|
|
||||||
|
class People(BaseModel):
|
||||||
|
avatar: str | Avatar
|
||||||
|
name: str
|
||||||
13
nonebot_plugin_tetris_stats/utils/render/schemas/bind.py
Normal file
13
nonebot_plugin_tetris_stats/utils/render/schemas/bind.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from .base import People
|
||||||
|
|
||||||
|
|
||||||
|
class Bind(BaseModel):
|
||||||
|
platform: Literal['TETR.IO', 'TOP', 'TOS']
|
||||||
|
status: Literal['error', 'success', 'unknown', 'unlink', 'unverified']
|
||||||
|
user: People
|
||||||
|
bot: People
|
||||||
|
command: str
|
||||||
@@ -0,0 +1,77 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Annotated, ClassVar
|
||||||
|
|
||||||
|
from nonebot.compat import PYDANTIC_V2
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from ....game_data_processor.io_data_processor.api.typing import Rank
|
||||||
|
from ...typing import Number
|
||||||
|
from .base import People
|
||||||
|
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
from pydantic import PlainSerializer
|
||||||
|
|
||||||
|
|
||||||
|
def format_datetime_to_timestamp(dt: datetime) -> int:
|
||||||
|
return int(dt.timestamp() * 1000)
|
||||||
|
|
||||||
|
|
||||||
|
class User(People):
|
||||||
|
bio: str | None
|
||||||
|
|
||||||
|
|
||||||
|
class Ranking(BaseModel):
|
||||||
|
rating: Number
|
||||||
|
rd: Number
|
||||||
|
|
||||||
|
|
||||||
|
class TetraLeague(BaseModel):
|
||||||
|
rank: Rank
|
||||||
|
tr: Number
|
||||||
|
global_rank: Number
|
||||||
|
pps: Number
|
||||||
|
lpm: Number
|
||||||
|
apm: Number
|
||||||
|
apl: Number
|
||||||
|
vs: Number
|
||||||
|
adpm: Number
|
||||||
|
adpl: Number
|
||||||
|
|
||||||
|
|
||||||
|
class Data(BaseModel):
|
||||||
|
if PYDANTIC_V2:
|
||||||
|
record_at: Annotated[datetime, PlainSerializer(format_datetime_to_timestamp, return_type=int)]
|
||||||
|
else:
|
||||||
|
record_at: datetime # type: ignore[no-redef]
|
||||||
|
tr: Number
|
||||||
|
|
||||||
|
|
||||||
|
class TetraLeagueHistory(BaseModel):
|
||||||
|
data: list[Data]
|
||||||
|
split_interval: Number
|
||||||
|
min_tr: Number
|
||||||
|
max_tr: Number
|
||||||
|
offset: Number
|
||||||
|
|
||||||
|
|
||||||
|
class Radar(BaseModel):
|
||||||
|
app: Number
|
||||||
|
dsps: Number
|
||||||
|
dspp: Number
|
||||||
|
ci: Number
|
||||||
|
ge: Number
|
||||||
|
|
||||||
|
|
||||||
|
class TETRIOInfo(BaseModel):
|
||||||
|
user: User
|
||||||
|
ranking: Ranking
|
||||||
|
tetra_league: TetraLeague
|
||||||
|
tetra_league_history: TetraLeagueHistory
|
||||||
|
radar: Radar
|
||||||
|
sprint: str
|
||||||
|
blitz: str
|
||||||
|
|
||||||
|
if not PYDANTIC_V2:
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
json_encoders: ClassVar[dict] = {datetime: format_datetime_to_timestamp}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
|
from collections.abc import Sequence
|
||||||
|
from http import HTTPStatus
|
||||||
from urllib.parse import urljoin, urlparse
|
from urllib.parse import urljoin, urlparse
|
||||||
|
|
||||||
from aiofiles import open
|
from aiofiles import open
|
||||||
from httpx import AsyncClient, HTTPError
|
from httpx import AsyncClient, HTTPError
|
||||||
from nonebot import get_driver
|
from nonebot import get_driver, get_plugin_config
|
||||||
from nonebot.log import logger
|
from nonebot.log import logger
|
||||||
from playwright.async_api import Response
|
from playwright.async_api import Response
|
||||||
from ujson import JSONDecodeError, dumps, loads
|
from ujson import JSONDecodeError, dumps, loads
|
||||||
@@ -12,18 +14,18 @@ from .browser import BrowserManager
|
|||||||
from .exception import RequestError
|
from .exception import RequestError
|
||||||
|
|
||||||
driver = get_driver()
|
driver = get_driver()
|
||||||
config = Config.parse_obj(driver.config)
|
config = get_plugin_config(Config)
|
||||||
|
|
||||||
|
|
||||||
@driver.on_startup
|
@driver.on_startup
|
||||||
async def _():
|
async def _():
|
||||||
await Request._init_cache()
|
await Request.init_cache()
|
||||||
await Request._read_cache()
|
await Request.read_cache()
|
||||||
|
|
||||||
|
|
||||||
@driver.on_shutdown
|
@driver.on_shutdown
|
||||||
async def _():
|
async def _():
|
||||||
await Request._write_cache()
|
await Request.write_cache()
|
||||||
|
|
||||||
|
|
||||||
def splice_url(url_list: list[str]) -> str:
|
def splice_url(url_list: list[str]) -> str:
|
||||||
@@ -38,7 +40,7 @@ def splice_url(url_list: list[str]) -> str:
|
|||||||
class Request:
|
class Request:
|
||||||
"""网络请求相关类"""
|
"""网络请求相关类"""
|
||||||
|
|
||||||
_CACHE_FILE = CACHE_PATH.joinpath('cloudflare_cache.json')
|
_CACHE_FILE = CACHE_PATH / 'cloudflare_cache.json'
|
||||||
_headers: dict | None = None
|
_headers: dict | None = None
|
||||||
_cookies: dict | None = None
|
_cookies: dict | None = None
|
||||||
|
|
||||||
@@ -46,71 +48,72 @@ class Request:
|
|||||||
async def _anti_cloudflare(cls, url: str) -> bytes:
|
async def _anti_cloudflare(cls, url: str) -> bytes:
|
||||||
"""用firefox硬穿五秒盾"""
|
"""用firefox硬穿五秒盾"""
|
||||||
browser = await BrowserManager.get_browser()
|
browser = await BrowserManager.get_browser()
|
||||||
context = await browser.new_context()
|
async with await browser.new_context() as context, await context.new_page() as page:
|
||||||
page = await context.new_page()
|
response = await page.goto(url)
|
||||||
response = await page.goto(url)
|
attempts = 0
|
||||||
attempts = 0
|
while attempts < 60: # noqa: PLR2004
|
||||||
while attempts < 60: # noqa: PLR2004
|
attempts += 1
|
||||||
attempts += 1
|
text = await page.locator('body').text_content()
|
||||||
text = await page.locator('body').text_content()
|
if text is None:
|
||||||
if text is None:
|
await page.wait_for_timeout(1000)
|
||||||
await page.wait_for_timeout(1000)
|
continue
|
||||||
continue
|
if await page.title() == 'Please Wait... | Cloudflare':
|
||||||
if await page.title() == 'Please Wait... | Cloudflare':
|
logger.warning('疑似触发了 Cloudflare 的验证码')
|
||||||
logger.warning('疑似触发了 Cloudflare 的验证码')
|
break
|
||||||
break
|
|
||||||
try:
|
|
||||||
loads(text)
|
|
||||||
except JSONDecodeError:
|
|
||||||
await page.wait_for_timeout(1000)
|
|
||||||
else:
|
|
||||||
if not isinstance(response, Response):
|
|
||||||
raise RequestError('api请求失败')
|
|
||||||
cls._headers = await response.request.all_headers()
|
|
||||||
try:
|
try:
|
||||||
cls._cookies = {i['name']: i['value'] for i in await context.cookies()}
|
loads(text)
|
||||||
except KeyError:
|
except JSONDecodeError:
|
||||||
cls._cookies = None
|
await page.wait_for_timeout(1000)
|
||||||
await page.close()
|
else:
|
||||||
await context.close()
|
if not isinstance(response, Response):
|
||||||
return await response.body()
|
msg = 'api请求失败'
|
||||||
await page.close()
|
raise RequestError(msg)
|
||||||
await context.close()
|
cls._headers = await response.request.all_headers()
|
||||||
raise RequestError('绕过五秒盾失败')
|
try:
|
||||||
|
cls._cookies = {
|
||||||
|
name: value
|
||||||
|
for i in await context.cookies()
|
||||||
|
if (name := i.get('name')) is not None and (value := i.get('value')) is not None
|
||||||
|
}
|
||||||
|
except KeyError:
|
||||||
|
cls._cookies = None
|
||||||
|
return await response.body()
|
||||||
|
msg = '绕过五秒盾失败'
|
||||||
|
raise RequestError(msg)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _init_cache(cls) -> None:
|
async def init_cache(cls) -> None:
|
||||||
"""初始化缓存文件"""
|
"""初始化缓存文件"""
|
||||||
if not cls._CACHE_FILE.exists():
|
if not cls._CACHE_FILE.exists():
|
||||||
async with open(file=cls._CACHE_FILE, mode='w', encoding='UTF-8') as file:
|
async with open(file=cls._CACHE_FILE, mode='w', encoding='UTF-8') as file:
|
||||||
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _read_cache(cls) -> None:
|
async def read_cache(cls) -> None:
|
||||||
"""读取缓存文件"""
|
"""读取缓存文件"""
|
||||||
try:
|
try:
|
||||||
async with open(file=cls._CACHE_FILE, mode='r', encoding='UTF-8') as file:
|
async with open(file=cls._CACHE_FILE, mode='r', encoding='UTF-8') as file:
|
||||||
json = loads(await file.read())
|
json = loads(await file.read())
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
await cls._init_cache()
|
await cls.init_cache()
|
||||||
except (PermissionError, JSONDecodeError):
|
except (PermissionError, JSONDecodeError):
|
||||||
cls._CACHE_FILE.unlink()
|
cls._CACHE_FILE.unlink()
|
||||||
await cls._init_cache()
|
await cls.init_cache()
|
||||||
else:
|
else:
|
||||||
cls._headers = json['headers']
|
cls._headers = json['headers']
|
||||||
cls._cookies = json['cookies']
|
cls._cookies = json['cookies']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def _write_cache(cls) -> None:
|
async def write_cache(cls) -> None:
|
||||||
"""写入缓存文件"""
|
"""写入缓存文件"""
|
||||||
try:
|
try:
|
||||||
async with open(file=cls._CACHE_FILE, mode='r+', encoding='UTF-8') as file:
|
async with open(file=cls._CACHE_FILE, mode='r+', encoding='UTF-8') as file:
|
||||||
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
await file.write(dumps({'headers': cls._headers, 'cookies': cls._cookies}))
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
await cls._init_cache()
|
await cls.init_cache()
|
||||||
except (PermissionError, JSONDecodeError):
|
except (PermissionError, JSONDecodeError):
|
||||||
cls._CACHE_FILE.unlink()
|
cls._CACHE_FILE.unlink()
|
||||||
await cls._init_cache()
|
await cls.init_cache()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def request(cls, url: str, *, is_json: bool = True) -> bytes:
|
async def request(cls, url: str, *, is_json: bool = True) -> bytes:
|
||||||
@@ -118,12 +121,47 @@ class Request:
|
|||||||
try:
|
try:
|
||||||
async with AsyncClient(cookies=cls._cookies, timeout=config.tetris_req_timeout) as session:
|
async with AsyncClient(cookies=cls._cookies, timeout=config.tetris_req_timeout) as session:
|
||||||
response = await session.get(url, headers=cls._headers)
|
response = await session.get(url, headers=cls._headers)
|
||||||
|
if response.status_code != HTTPStatus.OK:
|
||||||
|
msg = f'请求错误 code: {response.status_code} {HTTPStatus(response.status_code).phrase}\n{response.text}'
|
||||||
|
raise RequestError(msg, status_code=response.status_code)
|
||||||
if is_json:
|
if is_json:
|
||||||
loads(response.content)
|
loads(response.content)
|
||||||
return response.content
|
return response.content
|
||||||
except HTTPError as e:
|
except HTTPError as e:
|
||||||
raise RequestError(f'请求错误\n{e!r}') from e
|
msg = f'请求错误 \n{e!r}'
|
||||||
|
raise RequestError(msg) from e
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
if urlparse(url).netloc.lower().endswith('tetr.io'):
|
if urlparse(url).netloc.lower().endswith('tetr.io'):
|
||||||
return await cls._anti_cloudflare(url)
|
return await cls._anti_cloudflare(url)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def failover_request(
|
||||||
|
cls,
|
||||||
|
urls: Sequence[str],
|
||||||
|
*,
|
||||||
|
failover_code: Sequence[int],
|
||||||
|
failover_exc: tuple[type[BaseException], ...],
|
||||||
|
is_json: bool = True,
|
||||||
|
) -> bytes:
|
||||||
|
error_list: list[RequestError] = []
|
||||||
|
for i in urls:
|
||||||
|
logger.debug(f'尝试请求 {i}')
|
||||||
|
try:
|
||||||
|
return await cls.request(i, is_json=is_json)
|
||||||
|
except RequestError as e:
|
||||||
|
if e.status_code in failover_code: # 如果状态码在 failover_code 中, 则继续尝试下一个URL
|
||||||
|
error_list.append(e)
|
||||||
|
continue
|
||||||
|
# 如果状态码不在故障转移列表中, 则查找异常栈, 如果异常栈内有 failover_exc 内的异常类型, 则继续尝试下一个URL
|
||||||
|
tb = e.__traceback__
|
||||||
|
while tb is not None:
|
||||||
|
if isinstance(tb.tb_frame.f_locals.get('exc_value'), failover_exc):
|
||||||
|
error_list.append(e)
|
||||||
|
break
|
||||||
|
tb = tb.tb_next
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
continue
|
||||||
|
msg = f'所有地址皆不可用\n{error_list!r}'
|
||||||
|
raise RequestError(msg)
|
||||||
|
|||||||
38
nonebot_plugin_tetris_stats/utils/retry.py
Normal file
38
nonebot_plugin_tetris_stats/utils/retry.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from asyncio import sleep
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from datetime import timedelta
|
||||||
|
from functools import wraps
|
||||||
|
from typing import TypeVar, cast
|
||||||
|
|
||||||
|
from nonebot.log import logger
|
||||||
|
|
||||||
|
T = TypeVar('T')
|
||||||
|
|
||||||
|
|
||||||
|
def retry(
|
||||||
|
max_attempts: int = 3,
|
||||||
|
exception_type: type[BaseException] | tuple[type[BaseException], ...] = Exception,
|
||||||
|
delay: timedelta | None = None,
|
||||||
|
) -> Callable[[Callable[..., Awaitable[T]]], Callable[..., Awaitable[T]]]:
|
||||||
|
def decorator(func: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
|
||||||
|
@wraps(func)
|
||||||
|
async def wrapper(*args, **kwargs) -> T: # noqa: ANN002, ANN003
|
||||||
|
attempts = 0
|
||||||
|
while attempts < max_attempts + 1:
|
||||||
|
try:
|
||||||
|
return await func(*args, **kwargs)
|
||||||
|
except exception_type as e: # noqa: PERF203
|
||||||
|
logger.exception(e)
|
||||||
|
attempts += 1
|
||||||
|
if attempts <= max_attempts:
|
||||||
|
if delay is not None:
|
||||||
|
await sleep(delay.total_seconds())
|
||||||
|
logger.debug(f'Retrying: {func.__name__} ({attempts}/{max_attempts})')
|
||||||
|
continue
|
||||||
|
raise
|
||||||
|
msg = 'Unexpectedly reached the end of the retry loop'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
return cast(Callable[..., Awaitable[T]], wrapper)
|
||||||
|
|
||||||
|
return decorator
|
||||||
11
nonebot_plugin_tetris_stats/utils/screenshot.py
Normal file
11
nonebot_plugin_tetris_stats/utils/screenshot.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from .browser import BrowserManager
|
||||||
|
|
||||||
|
|
||||||
|
async def screenshot(url: str) -> bytes:
|
||||||
|
browser = await BrowserManager.get_browser()
|
||||||
|
async with (
|
||||||
|
await browser.new_page(no_viewport=True, viewport={'width': 0, 'height': 0}) as page,
|
||||||
|
):
|
||||||
|
await page.goto(url)
|
||||||
|
await page.wait_for_load_state('networkidle')
|
||||||
|
return await page.screenshot(full_page=True, type='png')
|
||||||
71
nonebot_plugin_tetris_stats/utils/templates.py
Normal file
71
nonebot_plugin_tetris_stats/utils/templates.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
from asyncio.subprocess import PIPE, create_subprocess_exec
|
||||||
|
from shutil import rmtree
|
||||||
|
|
||||||
|
from nonebot import get_driver
|
||||||
|
from nonebot.log import logger
|
||||||
|
from nonebot.permission import SUPERUSER
|
||||||
|
from nonebot_plugin_alconna import on_alconna
|
||||||
|
from nonebot_plugin_localstore import get_data_dir # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
driver = get_driver()
|
||||||
|
|
||||||
|
templates_dir = get_data_dir('nonebot_plugin_tetris_stats') / 'templates'
|
||||||
|
|
||||||
|
alc = on_alconna('更新模板', permission=SUPERUSER)
|
||||||
|
|
||||||
|
|
||||||
|
@driver.on_startup
|
||||||
|
async def init_templates() -> None:
|
||||||
|
try:
|
||||||
|
await create_subprocess_exec('git', '--version', stdout=PIPE)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
msg = '未找到 git, 请确保 git 已安装并在环境变量中\n安装步骤请参阅: https://git-scm.com/book/zh/v2/%E8%B5%B7%E6%AD%A5-%E5%AE%89%E8%A3%85-Git'
|
||||||
|
raise RuntimeError(msg) from e
|
||||||
|
if not templates_dir.exists():
|
||||||
|
logger.info('模板仓库不存在, 正在尝试初始化...')
|
||||||
|
proc = await create_subprocess_exec(
|
||||||
|
'git',
|
||||||
|
'clone',
|
||||||
|
'-b',
|
||||||
|
'gh-pages',
|
||||||
|
'https://github.com/A-Minos/tetris-stats-templates',
|
||||||
|
templates_dir,
|
||||||
|
'--depth=1',
|
||||||
|
stdout=PIPE,
|
||||||
|
stderr=PIPE,
|
||||||
|
)
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
for i in stderr.decode().splitlines():
|
||||||
|
logger.error(i)
|
||||||
|
msg = '初始化模板仓库失败'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
logger.success('模板仓库初始化成功')
|
||||||
|
return
|
||||||
|
proc = await create_subprocess_exec(
|
||||||
|
'git', 'rev-parse', '--is-inside-work-tree', stdout=PIPE, stderr=PIPE, cwd=templates_dir
|
||||||
|
)
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
if proc.returncode != 0:
|
||||||
|
for i in stderr.decode().splitlines():
|
||||||
|
logger.error(i)
|
||||||
|
logger.warning('模板仓库状态异常, 尝试重新初始化')
|
||||||
|
rmtree(templates_dir)
|
||||||
|
await init_templates()
|
||||||
|
return
|
||||||
|
logger.info('正在更新模板仓库...')
|
||||||
|
proc = await create_subprocess_exec('git', 'pull', stdout=PIPE, stderr=PIPE, cwd=templates_dir)
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
logger.info(stdout.decode().strip())
|
||||||
|
if proc.returncode != 0:
|
||||||
|
for i in stderr.decode().splitlines():
|
||||||
|
logger.error(i)
|
||||||
|
msg = '更新模板仓库失败'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
logger.success('模板仓库更新成功')
|
||||||
|
|
||||||
|
|
||||||
|
@alc.handle()
|
||||||
|
async def _():
|
||||||
|
await init_templates()
|
||||||
|
await alc.finish('模板仓库更新成功')
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from typing import Any, Literal
|
from typing import Any, Literal
|
||||||
|
|
||||||
Number = int | float
|
Number = float | int
|
||||||
GameType = Literal['IO', 'TOP', 'TOS']
|
GameType = Literal['IO', 'TOP', 'TOS']
|
||||||
CommandType = Literal['bind', 'query']
|
CommandType = Literal['bind', 'query']
|
||||||
AsyncCallable = Callable[..., Awaitable[Any]]
|
AsyncCallable = Callable[..., Awaitable[Any]]
|
||||||
|
|||||||
2735
poetry.lock
generated
2735
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
|||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = 'nonebot-plugin-tetris-stats'
|
name = 'nonebot-plugin-tetris-stats'
|
||||||
version = '1.0.0.a5'
|
version = '1.2.8'
|
||||||
description = '一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件'
|
description = '一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件'
|
||||||
authors = ['scdhh <wallfjjd@gmail.com>']
|
authors = ['scdhh <wallfjjd@gmail.com>']
|
||||||
readme = 'README.md'
|
readme = 'README.md'
|
||||||
@@ -10,43 +10,60 @@ license = 'AGPL-3.0'
|
|||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = '^3.10'
|
python = '^3.10'
|
||||||
nonebot2 = '^2.0.0-beta.3'
|
nonebot2 = { extras = ["fastapi"], version = "^2.3.0" }
|
||||||
lxml = '^4.9.1'
|
nonebot-plugin-alconna = ">=0.40"
|
||||||
pandas = '>=1.4.3,<3.0.0'
|
nonebot-plugin-apscheduler = "^0.4.0"
|
||||||
playwright = '^1.24.1'
|
nonebot-plugin-localstore = "^0.6.0"
|
||||||
ujson = '^5.4.0'
|
nonebot-plugin-orm = ">=0.1.1,<0.8.0"
|
||||||
|
nonebot-plugin-session = "^0.3.1"
|
||||||
|
nonebot-plugin-session-orm = "^0.2.0"
|
||||||
|
nonebot-plugin-userinfo = "^0.2.4"
|
||||||
|
aiocache = "^0.12.2"
|
||||||
aiofiles = "^23.2.1"
|
aiofiles = "^23.2.1"
|
||||||
nonebot-plugin-orm = ">=0.1.1,<0.6.0"
|
httpx = "^0.27.0"
|
||||||
nonebot-plugin-localstore = "^0.5.1"
|
jinja2 = "^3.1.3"
|
||||||
httpx = "^0.25.0"
|
lxml = '^5.1.0'
|
||||||
nonebot-plugin-alconna = ">=0.30,<0.34"
|
pandas = '>=1.4.3,<3.0.0'
|
||||||
nonebot-plugin-apscheduler = "^0.3.0"
|
pillow = "^10.3.0"
|
||||||
|
playwright = '^1.41.2'
|
||||||
|
rich = "^13.7.1"
|
||||||
|
ujson = '^5.9.0'
|
||||||
|
zstandard = "^0.22.0"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
mypy = '>=0.991,<1.8'
|
mypy = '>=1.9'
|
||||||
types-ujson = '^5.7.0'
|
ruff = '>=0.3.0'
|
||||||
|
types-aiofiles = "^23.2.0.20240106"
|
||||||
|
types-lxml = "^2024.2.9"
|
||||||
|
types-pillow = "^10.2.0.20240423"
|
||||||
|
types-ujson = '^5.9.0'
|
||||||
pandas-stubs = '>=1.5.2,<3.0.0'
|
pandas-stubs = '>=1.5.2,<3.0.0'
|
||||||
ruff = '>=0.0.239,<0.1.6'
|
nonebot-plugin-orm = { extras = ["default"], version = ">=0.3,<0.8" }
|
||||||
types-aiofiles = "^23.2.0.0"
|
nonebot-adapter-discord = "^0.1.3"
|
||||||
nonebot2 = { extras = ["fastapi"], version = "^2.1.1" }
|
nonebot-adapter-kaiheila = "^0.3.4"
|
||||||
types-lxml = "^2023.3.28"
|
nonebot-adapter-onebot = "^2.4.1"
|
||||||
nonebot-plugin-orm = { extras = ["default"], version = ">=0.3,<0.6" }
|
nonebot-adapter-qq = "^1.4.4"
|
||||||
nonebot-adapter-onebot = "^2.3.1"
|
nonebot-adapter-satori = "^0.11.4"
|
||||||
nonebot-adapter-satori = "^0.7.0"
|
|
||||||
|
|
||||||
[tool.poetry.group.debug.dependencies]
|
[tool.poetry.group.debug.dependencies]
|
||||||
|
memory-profiler = "^0.61.0"
|
||||||
objprint = '^0.2.2'
|
objprint = '^0.2.2'
|
||||||
viztracer = "^0.16.0"
|
viztracer = "^0.16.2"
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ['poetry-core>=1.0.0']
|
requires = ['poetry-core>=1.0.0']
|
||||||
build-backend = 'poetry.core.masonry.api'
|
build-backend = 'poetry.core.masonry.api'
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
|
line-length = 120
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
'F', # pyflakes
|
'F', # pyflakes
|
||||||
'E', # pycodestyle errors
|
'E', # pycodestyle errors
|
||||||
'W', # pycodestyle warnings
|
'W', # pycodestyle warnings
|
||||||
|
'C90', # mccabe
|
||||||
'I', # isort
|
'I', # isort
|
||||||
'N', # PEP8-naming
|
'N', # PEP8-naming
|
||||||
'UP', # pyupgrade
|
'UP', # pyupgrade
|
||||||
@@ -58,22 +75,33 @@ select = [
|
|||||||
'FBT', # flake8-boolean-trap
|
'FBT', # flake8-boolean-trap
|
||||||
'B', # flake8-bugbear
|
'B', # flake8-bugbear
|
||||||
'A', # flake8-builtins
|
'A', # flake8-builtins
|
||||||
|
'COM', # flake8-commas
|
||||||
'C4', # flake8-comprehensions
|
'C4', # flake8-comprehensions
|
||||||
'DTZ', # flake8-datetimez
|
'DTZ', # flake8-datetimez
|
||||||
|
'T10', # flake8-debugger
|
||||||
|
'EM', # flake8-errmsg
|
||||||
'FA', # flake8-future-annotations
|
'FA', # flake8-future-annotations
|
||||||
'ISC', # flake8-implicit-str-concat
|
'ISC', # flake8-implicit-str-concat
|
||||||
|
'ICN', # flake8-import-conventions
|
||||||
'PIE', # flake8-pie
|
'PIE', # flake8-pie
|
||||||
'T20', # flake8-print
|
'T20', # flake8-print
|
||||||
'Q', # flake8-quotes
|
'Q', # flake8-quotes
|
||||||
'RSE', # flake8-raise
|
'RSE', # flake8-raise
|
||||||
'RET', # flake8-return
|
'RET', # flake8-return
|
||||||
|
'SLF', # flake8-self
|
||||||
'SIM', # flake8-simplify
|
'SIM', # flake8-simplify
|
||||||
|
'TID', # flake8-tidy-imports
|
||||||
|
'TCH', # flake8-type-checking
|
||||||
|
'ARG', # flake8-unused-arguments
|
||||||
'PTH', # flake8-use-pathlib
|
'PTH', # flake8-use-pathlib
|
||||||
|
'ERA', # eradicate
|
||||||
'PD', # pandas-vet
|
'PD', # pandas-vet
|
||||||
|
'PGH', # pygrep-hooks
|
||||||
'PL', # pylint
|
'PL', # pylint
|
||||||
'TRY', # tryceratops
|
'TRY', # tryceratops
|
||||||
'FLY', # flynt
|
'FLY', # flynt
|
||||||
'PERF', # Perflint
|
'PERF', # Perflint
|
||||||
|
'FURB', # refurb
|
||||||
'RUF', # Ruff-specific rules
|
'RUF', # Ruff-specific rules
|
||||||
]
|
]
|
||||||
ignore = [
|
ignore = [
|
||||||
@@ -82,15 +110,16 @@ ignore = [
|
|||||||
'ANN102', # 由 type checker 自动推断
|
'ANN102', # 由 type checker 自动推断
|
||||||
'ANN202', # 向 NoneBot 注册的函数
|
'ANN202', # 向 NoneBot 注册的函数
|
||||||
'TRY003',
|
'TRY003',
|
||||||
|
'COM812', # 强制尾随逗号
|
||||||
|
'TID252', # 相对导入
|
||||||
|
'ISC001', # format warning
|
||||||
]
|
]
|
||||||
line-length = 120
|
|
||||||
target-version = "py310"
|
|
||||||
flake8-quotes = { inline-quotes = 'single', multiline-quotes = 'double' }
|
flake8-quotes = { inline-quotes = 'single', multiline-quotes = 'double' }
|
||||||
|
|
||||||
[tool.ruff.flake8-annotations]
|
[tool.ruff.lint.flake8-annotations]
|
||||||
mypy-init-return = true
|
mypy-init-return = true
|
||||||
|
|
||||||
[tool.ruff.flake8-builtins]
|
[tool.ruff.lint.flake8-builtins]
|
||||||
builtins-ignorelist = ["id"]
|
builtins-ignorelist = ["id"]
|
||||||
|
|
||||||
[tool.ruff.format]
|
[tool.ruff.format]
|
||||||
|
|||||||
Reference in New Issue
Block a user