适配 TETR.IO 新赛季 (#380)

*  新 api 的 schemas

* 👽️ 更新新赛季 api 的 schemas

*  添加依赖 async-lru

* 👽️ 更新新赛季 api 封装

* 👽️ 适配新赛季 api 40l

* 🐛 api_type 忘记更新了

* 👽️ 适配新赛季 api blitz

* 👽️ 适配新赛季 api bind

* 🔥 暂时删除一些指令
等待新赛季开始

* 🚨 auto fix by pre-commit hooks

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
呵呵です
2024-07-29 17:03:05 +08:00
committed by GitHub
parent d8d56b44db
commit 256d13d1df
21 changed files with 600 additions and 1194 deletions

View File

@@ -3,9 +3,12 @@ from nonebot_plugin_alconna import At
from ...utils.exception import MessageFormatError
from ...utils.typing import Me
from .. import add_block_handlers, alc, command
# from .. import add_block_handlers, alc, command
from .. import alc, command
from .api import Player
from .api.typing import ValidRank
# from .api.typing import ValidRank
from .constant import USER_ID, USER_NAME
from .typing import Template
@@ -33,30 +36,30 @@ command.add(
),
help_text='绑定 TETR.IO 账号',
),
Subcommand(
'query',
Args(
Arg(
'target',
At | Me,
notice='@想要查询的人 / 自己',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
Arg(
'account',
get_player,
notice='TETR.IO 用户名 / ID',
flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
),
),
Option(
'--template',
Arg('template', Template),
alias=['-T'],
help_text='要使用的查询模板',
),
help_text='查询 TETR.IO 游戏信息',
),
# Subcommand(
# 'query',
# Args(
# Arg(
# 'target',
# At | Me,
# notice='@想要查询的人 / 自己',
# flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
# ),
# Arg(
# 'account',
# get_player,
# notice='TETR.IO 用户名 / ID',
# flags=[ArgFlag.HIDDEN, ArgFlag.OPTIONAL],
# ),
# ),
# Option(
# '--template',
# Arg('template', Template),
# alias=['-T'],
# help_text='要使用的查询模板',
# ),
# help_text='查询 TETR.IO 游戏信息',
# ),
Subcommand(
'record',
Option(
@@ -82,27 +85,33 @@ command.add(
),
),
),
Subcommand(
'list',
Option('--max-tr', Arg('max_tr', float), help_text='TR的上限'),
Option('--min-tr', Arg('min_tr', float), help_text='TR的下限'),
Option('--limit', Arg('limit', int), help_text='查询数量'),
Option('--country', Arg('country', str), help_text='国家代码'),
help_text='查询 TETR.IO 段位排行榜',
),
Subcommand(
'rank',
Option(
'--all',
dest='all',
),
Option(
'--detail',
Arg('rank', ValidRank),
alias=['-D'],
),
help_text='查询 TETR.IO 段位信息',
),
# Subcommand(
# 'list',
# Option('--max-tr', Arg('max_tr', float), help_text='TR的上限'),
# Option('--min-tr', Arg('min_tr', float), help_text='TR的下限'),
# Option('--limit', Arg('limit', int), help_text='查询数量'),
# Option('--country', Arg('country', str), help_text='国家代码'),
# help_text='查询 TETR.IO 段位排行榜',
# ),
# Subcommand(
# 'rank',
# Subcommand(
# '--all',
# Option(
# '--template',
# Arg('template', Template),
# alias=['-T'],
# help_text='要使用的查询模板',
# ),
# dest='all',
# ),
# Option(
# '--detail',
# Arg('rank', ValidRank),
# alias=['-D'],
# ),
# help_text='查询 TETR.IO 段位信息',
# ),
Subcommand(
'config',
Option(
@@ -111,18 +120,19 @@ command.add(
alias=['-DT', 'DefaultTemplate'],
),
),
alias=['TETRIO', 'tetr.io', 'tetrio', 'io'],
dest='TETRIO',
help_text='TETR.IO 游戏相关指令',
)
)
def rank_wrapper(slot: int | str, content: str | None):
if slot == 'rank' and not content:
return '--all'
if content is not None:
return f'--detail {content.lower()}'
return content
# def rank_wrapper(slot: int | str, content: str | None):
# if slot == 'rank' and not content:
# return '--all'
# if content is not None:
# return f'--detail {content.lower()}'
# return content
alc.shortcut(
@@ -130,11 +140,11 @@ alc.shortcut(
command='tstats TETR.IO bind',
humanized='io绑定',
)
alc.shortcut(
'(?i:io)(?i:查询|查|query|stats)',
command='tstats TETR.IO query',
humanized='io查',
)
# alc.shortcut(
# '(?i:io)(?i:查询|查|query|stats)',
# command='tstats TETR.IO query',
# humanized='io查',
# )
alc.shortcut(
'(?i:io)(?i:记录|record)(?i:40l)',
command='tstats TETR.IO record --40l',
@@ -145,36 +155,37 @@ alc.shortcut(
command='tstats TETR.IO record --blitz',
humanized='io记录blitz',
)
alc.shortcut(
r'(?i:io)(?i:段位|段|rank)\s*(?P<rank>[a-zA-Z+-]{0,2})',
command='tstats TETR.IO rank {rank}',
humanized='iorank',
fuzzy=False,
wrapper=rank_wrapper,
)
# alc.shortcut(
# r'(?i:io)(?i:段位|段|rank)\s*(?P<rank>[a-zA-Z+-]{0,2})',
# command='tstats TETR.IO rank {rank}',
# humanized='iorank',
# fuzzy=False,
# wrapper=rank_wrapper,
# )
alc.shortcut(
'(?i:io)(?i:配置|配|config)',
command='tstats TETR.IO config',
humanized='io配置',
)
alc.shortcut(
'fkosk',
command='tstats TETR.IO query',
arguments=[''],
fuzzy=False,
humanized='An Easter egg!',
)
# alc.shortcut(
# 'fkosk',
# command='tstats TETR.IO query',
# arguments=['我'],
# fuzzy=False,
# humanized='An Easter egg!',
# )
add_block_handlers(alc.assign('TETRIO.query'))
# add_block_handlers(alc.assign('TETRIO.query'))
from . import bind, config, list, query, rank, record # noqa: E402
# from . import bind, config, list, query, rank, record
from . import bind, config, record # noqa: E402
__all__ = [
'bind',
'config',
'list',
'query',
'rank',
# 'list',
# 'query',
# 'rank',
'record',
]

View File

@@ -7,11 +7,12 @@ from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
from ....db.models import PydanticType
from .schemas.base import SuccessModel
from .typing import Summaries
class TETRIOHistoricalData(MappedAsDataclass, Model):
id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
api_type: Mapped[Literal['User Info', 'User Records']] = mapped_column(String(16), index=True)
api_type: Mapped[Literal['User Info', Summaries]] = mapped_column(String(16), index=True)
data: Mapped[SuccessModel] = mapped_column(PydanticType(get_model=[SuccessModel.__subclasses__], models=set()))
update_time: Mapped[datetime] = mapped_column(DateTime, index=True)

View File

@@ -1,5 +1,7 @@
from typing import overload
from types import MappingProxyType
from typing import Literal, overload
from async_lru import alru_cache
from nonebot.compat import type_validate_json
from ....db import anti_duplicate_add
@@ -9,12 +11,31 @@ from ..constant import BASE_URL, USER_ID, USER_NAME
from .cache import Cache
from .models import TETRIOHistoricalData
from .schemas.base import FailedModel
from .schemas.summaries import (
AchievementsSuccessModel,
SoloSuccessModel,
SummariesModel,
ZenithSuccessModel,
ZenSuccessModel,
)
from .schemas.summaries.base import User as SummariesUser
from .schemas.user import User
from .schemas.user_info import UserInfo, UserInfoSuccess
from .schemas.user_records import SoloModeRecord, UserRecords, UserRecordsSuccess, Zen
from .typing import Summaries
class Player:
__SUMMARIES_MAPPING: MappingProxyType[Summaries, type[SummariesModel]] = MappingProxyType(
{
'40l': SoloSuccessModel,
'blitz': SoloSuccessModel,
'zenith': ZenithSuccessModel,
'zenithex': ZenithSuccessModel,
'zen': ZenSuccessModel,
'achievements': AchievementsSuccessModel,
}
)
@overload
def __init__(self, *, user_id: str, trust: bool = False): ...
@overload
@@ -36,7 +57,7 @@ class Player:
raise ValueError(msg)
self.__user: User | None = None
self._user_info: UserInfoSuccess | None = None
self._user_records: UserRecordsSuccess | None = None
self._summaries: dict[Summaries, SummariesModel] = {}
@property
def _request_user_parameter(self) -> str:
@@ -49,14 +70,21 @@ class Player:
@property
async def user(self) -> User:
if self.__user is None:
if self.__user is not None:
return self.__user
if (user := (await self._get_local_summaries_user())) is not None:
self.__user = User(
ID=user.id,
name=user.username,
)
else:
user_info = await self.get_info()
self.__user = User(
ID=user_info.data.user.id,
name=user_info.data.user.username,
ID=user_info.data.id,
name=user_info.data.username,
)
self.user_id = user_info.data.user.id
self.user_name = user_info.data.user.username
self.user_id = user_info.data.id
self.user_name = user_info.data.username
return self.__user
async def get_info(self) -> UserInfoSuccess:
@@ -79,36 +107,85 @@ class Player:
)
return self._user_info
async def get_records(self) -> UserRecordsSuccess:
"""Get User Records"""
if self._user_records is None:
raw_user_records = await Cache.get(
splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}/', 'records'])
@overload
async def get_summaries(self, summaries_type: Literal['40l']) -> SoloSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['blitz']) -> SoloSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['zenith']) -> ZenithSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['zenithex']) -> ZenithSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['zen']) -> ZenSuccessModel: ...
@overload
async def get_summaries(self, summaries_type: Literal['achievements']) -> AchievementsSuccessModel: ...
async def get_summaries(self, summaries_type: Summaries) -> SummariesModel:
if summaries_type not in self._summaries:
raw_summaries = await Cache.get(
splice_url([BASE_URL, 'users/', f'{self._request_user_parameter}/', 'summaries/', summaries_type])
)
user_records: UserRecords = type_validate_json(UserRecords, raw_user_records) # type: ignore[arg-type]
if isinstance(user_records, FailedModel):
msg = f'用户Solo数据请求错误:\n{user_records.error}'
summaries: SummariesModel | FailedModel = type_validate_json(
self.__SUMMARIES_MAPPING[summaries_type] | FailedModel, # type: ignore[arg-type]
raw_summaries,
)
if isinstance(summaries, FailedModel):
msg = f'用户Summaries数据请求错误:\n{summaries.error}'
raise RequestError(msg)
self._user_records = user_records
self._summaries[summaries_type] = summaries
await anti_duplicate_add(
TETRIOHistoricalData,
TETRIOHistoricalData(
user_unique_identifier=(await self.user).unique_identifier,
api_type='User Records',
data=user_records,
update_time=user_records.cache.cached_at,
api_type=summaries_type,
data=summaries,
update_time=summaries.cache.cached_at,
),
)
return self._user_records
return self._summaries[summaries_type]
@property
async def sprint(self) -> SoloModeRecord:
return (await self.get_records()).data.records.sprint
@alru_cache
async def sprint(self) -> SoloSuccessModel:
return await self.get_summaries('40l')
@property
async def blitz(self) -> SoloModeRecord:
return (await self.get_records()).data.records.blitz
@alru_cache
async def blitz(self) -> SoloSuccessModel:
return await self.get_summaries('blitz')
@property
async def zen(self) -> Zen:
return (await self.get_records()).data.zen
@alru_cache
async def zen(self) -> ZenSuccessModel:
return await self.get_summaries('zen')
async def _get_local_summaries_user(self) -> SummariesUser | None:
allow_summaries: set[Literal['40l', 'blitz', 'zenith', 'zenithex']] = {
'40l',
'blitz',
'zenith',
'zenithex',
}
if has_summaries := (allow_summaries & self._summaries.keys()):
for i in has_summaries:
if (record := (await self.get_summaries(i)).data.record) is not None:
return record.user
return None
@property
@alru_cache
async def avatar_revision(self) -> int | None:
if self._user_info is not None:
return self._user_info.data.avatar_revision
if (user := (await self._get_local_summaries_user())) is not None:
return user.avatar_revision
return (await self.get_info()).data.avatar_revision
@property
@alru_cache
async def banner_revision(self) -> int | None:
if self._user_info is not None:
return self._user_info.data.banner_revision
if (user := (await self._get_local_summaries_user())) is not None:
return user.banner_revision
return (await self.get_info()).data.banner_revision

View File

@@ -0,0 +1,20 @@
from .achievements import Achievements, AchievementsSuccessModel
from .solo import Blitz, SoloSuccessModel, Sprint
from .zen import Zen, ZenSuccessModel
from .zenith import Zenith, ZenithEx, ZenithSuccessModel
SummariesModel = AchievementsSuccessModel | SoloSuccessModel | ZenSuccessModel | ZenithSuccessModel
__all__ = [
'Achievements',
'AchievementsSuccessModel',
'Blitz',
'Sprint',
'SoloSuccessModel',
'Zen',
'ZenSuccessModel',
'Zenith',
'ZenithEx',
'ZenithSuccessModel',
'SummariesModel',
]

View File

@@ -0,0 +1,29 @@
from typing import TypeAlias
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
class Achievement(BaseModel):
# 这**都是些啥
k: int
o: int
rt: int
vt: int
min: int
deci: int
name: str
object: str
category: str
hidden: bool
desc: str
n: str
stub: bool
class AchievementsSuccessModel(SuccessModel):
data: list[Achievement]
Achievements: TypeAlias = AchievementsSuccessModel | FailedModel

View File

@@ -0,0 +1,29 @@
from pydantic import BaseModel
class User(BaseModel):
id: str
username: str
avatar_revision: int
banner_revision: int
country: str
verified: int
supporter: int
class AggregateStats(BaseModel):
apm: float
pps: float
vsscore: float
class Finesse(BaseModel):
combo: int
faults: int
perfectpieces: int
class P(BaseModel): # what is P
pri: float
sec: float
ter: float

View File

@@ -0,0 +1,103 @@
from datetime import datetime
from typing import Literal, TypeAlias
from pydantic import BaseModel, Field
from ..base import FailedModel, SuccessModel
from .base import AggregateStats, Finesse, P, User
class Time(BaseModel):
start: int
zero: bool
locked: bool
prev: int
frameoffset: int
class Clears(BaseModel):
singles: int
doubles: int
triples: int
quads: int
realtspins: int
minitspins: int
minitspinsingles: int
tspinsingles: int
minitspindoubles: int
tspindoubles: int
tspintriples: int
tspinquads: int
allclear: int
class Garbage(BaseModel):
sent: int
received: int
attack: int
cleared: int
class Stats(BaseModel):
seed: int
lines: int
level_lines: int
level_lines_needed: int
inputs: int
holds: int
time: Time
score: int
zenlevel: int
zenprogress: int
level: int
combo: int
currentcombopower: int | None = None
topcombo: int
btb: int
topbtb: int
currentbtbchainpower: int | None = None
tspins: int
piecesplaced: int
clears: Clears
garbage: Garbage
kills: int
finesse: Finesse
finaltime: float
class Results(BaseModel):
aggregatestats: AggregateStats
stats: Stats
gameoverreason: str
class Record(BaseModel):
id: str = Field(..., alias='_id')
replayid: str
stub: bool
gamemode: Literal['40l', 'blitz']
pb: bool
oncepb: bool
ts: datetime
revolution: None
user: User
otherusers: list
leaderboards: list[str]
results: Results
extras: dict
disputed: bool
p: P
class Data(BaseModel):
record: Record | None
rank: int
rank_local: int
class SoloSuccessModel(SuccessModel):
data: Data
Sprint: TypeAlias = SoloSuccessModel | FailedModel
Blitz: TypeAlias = SoloSuccessModel | FailedModel

View File

@@ -0,0 +1,17 @@
from typing import TypeAlias
from pydantic import BaseModel
from ..base import FailedModel, SuccessModel
class Data(BaseModel):
level: int
score: int
class ZenSuccessModel(SuccessModel):
data: Data
Zen: TypeAlias = ZenSuccessModel | FailedModel

View File

@@ -0,0 +1,131 @@
from datetime import datetime
from typing import Literal, TypeAlias
from pydantic import BaseModel, Field
from ..base import FailedModel, SuccessModel
from .base import AggregateStats, Finesse, P, User
class Clears(BaseModel):
singles: int
doubles: int
triples: int
quads: int
pentas: int
realtspins: int
minitspins: int
minitspinsingles: int
tspinsingles: int
minitspindoubles: int
tspindoubles: int
minitspintriples: int
tspintriples: int
minitspinquads: int
tspinquads: int
tspinpentas: int
allclear: int
class Garbage(BaseModel):
sent: int
sent_nomult: int
maxspike: int
maxspike_nomult: int
received: int
attack: int
cleared: int
class _Zenith(BaseModel):
altitude: float
rank: float
peakrank: float
avgrankpts: float
floor: int
targetingfactor: float
targetinggrace: float
totalbonus: float
revives: int
revives_total: int = Field(..., alias='revivesTotal')
speedrun: bool
speedrun_seen: bool
splits: list[int]
class Stats(BaseModel):
lines: int
level_lines: int
level_lines_needed: int
inputs: int
holds: int
score: int
zenlevel: int
zenprogress: int
level: int
combo: int
topcombo: int
combopower: int
btb: int
topbtb: int
btbpower: int
tspins: int
piecesplaced: int
clears: Clears
garbage: Garbage
kills: int
finesse: Finesse
zenith: _Zenith
finaltime: int
class Results(BaseModel):
aggregatestats: AggregateStats
stats: Stats
gameoverreason: str
class ExtrasZenith(BaseModel):
mods: list[str]
class Extras(BaseModel):
zenith: ExtrasZenith
class Record(BaseModel):
id: str = Field(..., alias='_id')
replayid: str
stub: bool
gamemode: Literal['zenith', 'zenithex']
pb: bool
oncepb: bool
ts: datetime
revolution: None
user: User
otherusers: list
leaderboards: list[str]
results: Results
extras: Extras
disputed: bool
p: P
class Best(BaseModel):
record: None # WTF
rank: int
class Data(BaseModel):
record: Record | None
rank: int
rank_local: int
best: Best
class ZenithSuccessModel(SuccessModel):
data: Data
Zenith: TypeAlias = ZenithSuccessModel | FailedModel
ZenithEx: TypeAlias = ZenithSuccessModel | FailedModel

View File

@@ -3,7 +3,6 @@ from typing import Literal
from pydantic import BaseModel, Field
from ..typing import Rank
from .base import FailedModel
from .base import SuccessModel as BaseSuccessModel
@@ -15,67 +14,6 @@ class Badge(BaseModel):
ts: datetime | Literal[False] | None = None
class MetaLeague(BaseModel):
decaying: bool
class NeverPlayedLeague(MetaLeague):
gamesplayed: Literal[0]
gameswon: Literal[0]
rating: Literal[-1]
rank: Literal['z']
standing: Literal[-1]
standing_local: Literal[-1]
next_rank: None
prev_rank: None
next_at: Literal[-1]
prev_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
apm: None = None
pps: None = None
vs: None = None
class NeverRatedLeague(MetaLeague):
gamesplayed: Literal[1, 2, 3, 4, 5, 6, 7, 8, 9]
gameswon: int
rating: Literal[-1]
rank: Literal['z']
standing: Literal[-1]
standing_local: Literal[-1]
next_rank: None
prev_rank: None
next_at: Literal[-1]
prev_at: Literal[-1]
percentile: Literal[-1]
percentile_rank: Literal['z']
apm: float
pps: float
vs: float | None = None
class RatedLeague(MetaLeague):
gamesplayed: int
gameswon: int
rating: float
rank: Rank
bestrank: Rank
standing: int
standing_local: int
next_rank: Rank | None = None
prev_rank: Rank | None = None
next_at: int
prev_at: int
percentile: float
percentile_rank: str
glicko: float
rd: float
apm: float
pps: float
vs: float | None = None
class Discord(BaseModel):
id: str
username: str
@@ -89,7 +27,7 @@ class Distinguishment(BaseModel):
type: str
class User(BaseModel):
class Data(BaseModel):
id: str = Field(..., alias='_id')
username: str
role: Literal['anon', 'user', 'bot', 'halfmod', 'mod', 'admin', 'sysop', 'banned']
@@ -105,7 +43,6 @@ class User(BaseModel):
supporter: bool | None = None # osk说是必有, 但实际上不是 fkosk
supporter_tier: int
verified: bool
league: NeverPlayedLeague | NeverRatedLeague | RatedLeague
avatar_revision: int | None = None
"""This user's avatar ID. Get their avatar at
@@ -122,10 +59,6 @@ class User(BaseModel):
distinguishment: Distinguishment | None = None
class Data(BaseModel):
user: User
class UserInfoSuccess(BaseSuccessModel):
data: Data

View File

@@ -21,3 +21,13 @@ ValidRank = Literal[
]
Rank = ValidRank | Literal['z'] # 未定级
Summaries = Literal[
'40l',
'blitz',
'zenith',
'zenithex',
# 'league', # 等待正式赛季开始
'zen',
'achievements',
]

View File

@@ -45,11 +45,10 @@ async def _(nb_user: User, account: Player, event_session: EventSession, bot_inf
platform='TETR.IO',
status='unknown',
user=People(
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": user_info.data.user.avatar_revision})}'
if user_info.data.user.avatar_revision is not None
and user_info.data.user.avatar_revision != 0
else Avatar(type='identicon', hash=md5(user_info.data.user.id.encode()).hexdigest()), # noqa: S324
name=user_info.data.user.username.upper(),
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
if (avatar_revision := (await account.avatar_revision)) is not None and avatar_revision != 0
else Avatar(type='identicon', hash=md5(user.ID.encode()).hexdigest()), # noqa: S324
name=user.name.upper(),
),
bot=People(
avatar=await get_avatar(bot_info, 'Data URI', '../../static/logo/logo.svg'),

View File

@@ -1,80 +0,0 @@
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
from ...db import trigger
from ...utils.host import HostPage, get_self_netloc
from ...utils.metrics import get_metrics
from ...utils.render import render
from ...utils.render.schemas.tetrio.tetrio_user_list_v2 import List, TetraLeague, User
from ...utils.screenshot import screenshot
from .. import alc
from .api.schemas.tetra_league import ValidLeague
from .api.tetra_league import Parameter, leaderboard
from .constant import GAME_TYPE
@alc.assign('TETRIO.list')
async def _(
event_session: EventSession,
max_tr: float | None = None,
min_tr: float | None = None,
limit: int | None = None,
country: str | None = None,
):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='list',
command_args=[
f'{key} {value}'
for key, value in zip(
('--max-tr', '--min-tr', '--limit', '--country'), (max_tr, min_tr, limit, country), strict=True
)
if value is not None
],
):
parameter: Parameter = {}
if max_tr is not None:
parameter['after'] = max_tr
if min_tr is not None:
parameter['before'] = min_tr
if limit is not None:
parameter['limit'] = limit
if country is not None:
parameter['country'] = country
league = await leaderboard(parameter)
async with HostPage(
await render(
'v2/tetrio/user/list',
List(
show_index=True,
users=[
User(
id=i.id,
name=i.username.upper(),
avatar=f'https://tetr.io/user-content/avatars/{i.id}.jpg',
country=i.country,
verified=i.verified,
tetra_league=TetraLeague(
rank=i.league.rank,
tr=round(i.league.rating, 2),
glicko=round(i.league.glicko, 2),
rd=round(i.league.rd, 2),
decaying=i.league.decaying,
pps=(metrics := get_metrics(pps=i.league.pps, apm=i.league.apm, vs=i.league.vs)).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs,
adpl=metrics.adpl,
),
xp=i.xp,
join_at=None,
)
for i in league.data.users
if isinstance(i.league, ValidLeague)
],
),
)
) as page_hash:
await UniMessage.image(raw=await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')).finish()

View File

@@ -1,554 +0,0 @@
from asyncio import gather
from collections import defaultdict
from datetime import date, datetime, timedelta, timezone
from hashlib import md5
from math import ceil, floor
from typing import ClassVar, TypeVar, overload
from urllib.parse import urlencode
from zoneinfo import ZoneInfo
from aiofiles import open
from nonebot import get_driver
from nonebot.adapters import Event
from nonebot.compat import type_validate_json
from nonebot.matcher import Matcher
from nonebot_plugin_alconna import At
from nonebot_plugin_alconna.uniseg import UniMessage
from nonebot_plugin_apscheduler import scheduler # type: ignore[import-untyped]
from nonebot_plugin_localstore import get_data_file # type: ignore[import-untyped]
from nonebot_plugin_orm import get_session
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
from nonebot_plugin_user import User as NBUser # type: ignore[import-untyped]
from nonebot_plugin_user import get_user # type: ignore[import-untyped]
from sqlalchemy import select
from zstandard import ZstdDecompressor
from ...db import query_bind_info, trigger
from ...utils.exception import FallbackError
from ...utils.host import HostPage, get_self_netloc
from ...utils.metrics import TetrisMetricsProWithPPSVS, get_metrics
from ...utils.render import render
from ...utils.render.schemas.base import Avatar, Ranking
from ...utils.render.schemas.tetrio.tetrio_info import Info as V1TemplateInfo
from ...utils.render.schemas.tetrio.tetrio_info import Radar, TetraLeague, TetraLeagueHistory, TetraLeagueHistoryData
from ...utils.render.schemas.tetrio.tetrio_info import User as V1TemplateUser
from ...utils.render.schemas.tetrio.tetrio_user_info_v2 import Badge, Blitz, Sprint, Statistic, TetraLeagueStatistic
from ...utils.render.schemas.tetrio.tetrio_user_info_v2 import Info as V2TemplateInfo
from ...utils.render.schemas.tetrio.tetrio_user_info_v2 import TetraLeague as V2TemplateTetraLeague
from ...utils.render.schemas.tetrio.tetrio_user_info_v2 import User as V2TemplateUser
from ...utils.screenshot import screenshot
from ...utils.typing import Me, Number
from ..constant import CANT_VERIFY_MESSAGE
from . import alc
from .api import Player, User, UserInfoSuccess
from .api.models import TETRIOHistoricalData
from .api.schemas.tetra_league import TetraLeagueSuccess
from .api.schemas.user_info import NeverPlayedLeague, NeverRatedLeague, RatedLeague
from .constant import GAME_TYPE, TR_MAX, TR_MIN
from .models import IORank, TETRIOUserConfig
from .typing import Template
UTC = timezone.utc
driver = get_driver()
@alc.assign('TETRIO.query')
async def _( # noqa: PLR0913
user: NBUser,
event: Event,
matcher: Matcher,
target: At | Me,
event_session: EventSession,
template: Template | None = None,
):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='query',
command_args=[f'--default-template {template}'] if template is not None else [],
):
async with get_session() as session:
bind = await query_bind_info(
session=session,
user=await get_user(
event_session.platform, target.target if isinstance(target, At) else event.get_user_id()
),
game_platform=GAME_TYPE,
)
if template is None:
template = await session.scalar(
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
)
if bind is None:
await matcher.finish('未查询到绑定信息')
message = UniMessage(CANT_VERIFY_MESSAGE)
player = Player(user_id=bind.game_account, trust=True)
await (message + (await make_query_result(player, template or 'v1'))).finish()
@alc.assign('TETRIO.query')
async def _(user: NBUser, account: Player, event_session: EventSession, template: Template | None = None):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='query',
command_args=[f'--default-template {template}'] if template is not None else [],
):
async with get_session() as session:
if template is None:
template = await session.scalar(
select(TETRIOUserConfig.query_template).where(TETRIOUserConfig.id == user.id)
)
await (await make_query_result(account, template or 'v1')).finish()
def get_value_bounds(values: list[int | float]) -> tuple[int, int]:
value_max = 10 * ceil(max(values) / 10)
value_min = 10 * floor(min(values) / 10)
return value_max, value_min
def get_split(value_max: int, value_min: int) -> tuple[int, int]:
offset = 0
overflow = 0
while True:
if (new_max_value := value_max + offset + overflow) > TR_MAX:
overflow -= 1
continue
if (new_min_value := value_min - offset + overflow) < TR_MIN:
overflow += 1
continue
if ((new_max_value - new_min_value) / 40).is_integer():
split_value = int((value_max + offset - (value_min - offset)) / 4)
break
offset += 1
return split_value, offset + overflow
def get_specified_point(
previous_point: TetraLeagueHistoryData,
behind_point: TetraLeagueHistoryData,
point_time: datetime,
) -> TetraLeagueHistoryData:
"""根据给出的 previous_point 和 behind_point, 推算 point_time 点处的数据
Args:
previous_point (Data): 前面的数据点
behind_point (Data): 后面的数据点
point_time (datetime): 要推算的点的位置
Returns:
Data: 要推算的点的数据
"""
# 求两个点的斜率
slope = (behind_point.tr - previous_point.tr) / (
datetime.timestamp(behind_point.record_at) - datetime.timestamp(previous_point.record_at)
)
return TetraLeagueHistoryData(
record_at=point_time,
tr=previous_point.tr + slope * (datetime.timestamp(point_time) - datetime.timestamp(previous_point.record_at)),
)
async def query_historical_data(user: User, user_info: UserInfoSuccess) -> list[TetraLeagueHistoryData]:
today = datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
forward = timedelta(days=9)
start_time = (today - forward).astimezone(UTC)
async with get_session() as session:
historical_data = (
await session.scalars(
select(TETRIOHistoricalData)
.where(TETRIOHistoricalData.update_time >= start_time)
.where(TETRIOHistoricalData.user_unique_identifier == user.unique_identifier)
.where(TETRIOHistoricalData.api_type == 'User Info')
)
).all()
if historical_data:
extra = (
await session.scalars(
select(TETRIOHistoricalData)
.where(TETRIOHistoricalData.user_unique_identifier == user.unique_identifier)
.where(TETRIOHistoricalData.api_type == 'User Info')
.order_by(TETRIOHistoricalData.id.desc())
.where(TETRIOHistoricalData.id < min([i.id for i in historical_data]))
.limit(1)
)
).one_or_none()
if extra is not None:
historical_data = list(historical_data)
historical_data.append(extra)
full_export_data = FullExport.get_data(user.unique_identifier)
if not historical_data and not full_export_data:
return [
TetraLeagueHistoryData(record_at=today - forward, tr=user_info.data.user.league.rating),
TetraLeagueHistoryData(record_at=today.replace(microsecond=1000), tr=user_info.data.user.league.rating),
]
histories = [
TetraLeagueHistoryData(
record_at=i.update_time.astimezone(ZoneInfo('Asia/Shanghai')),
tr=i.data.data.user.league.rating,
)
for i in historical_data
if isinstance(i.data, UserInfoSuccess) and isinstance(i.data.data.user.league, RatedLeague)
] + full_export_data
# 按照时间排序
histories = sorted(histories, key=lambda x: x.record_at)
for index, value in enumerate(histories):
# 在历史记录里找有没有今天0点后的数据, 并且至少要有两个数据点
if value.record_at > today and len(histories) >= 2: # noqa: PLR2004
histories = histories[:index] + [
get_specified_point(histories[index - 1], histories[index], today.replace(microsecond=1000))
]
break
else:
histories.append(
get_specified_point(
histories[-1],
TetraLeagueHistoryData(record_at=user_info.cache.cached_at, tr=user_info.data.user.league.rating),
today.replace(microsecond=1000),
)
)
if histories[0].record_at < (today - forward):
histories[0] = get_specified_point(
histories[0],
histories[1],
today - forward,
)
else:
histories.insert(0, TetraLeagueHistoryData(record_at=today - forward, tr=histories[0].tr))
return histories
L = TypeVar('L', NeverPlayedLeague, NeverRatedLeague, RatedLeague)
@overload
def get_league(user_info: UserInfoSuccess, league_type: type[L]) -> L: ...
@overload
def get_league(
user_info: UserInfoSuccess, league_type: None = None
) -> NeverPlayedLeague | NeverRatedLeague | RatedLeague: ...
def get_league(
user_info: UserInfoSuccess, league_type: type[L] | None = None
) -> L | NeverPlayedLeague | NeverRatedLeague | RatedLeague:
league = user_info.data.user.league
if league_type is None:
return league
if isinstance(league, league_type):
return league
raise FallbackError
async def make_query_image_v1(player: Player) -> bytes:
user, user_info, sprint, blitz = await gather(player.user, player.get_info(), player.sprint, player.blitz)
league = get_league(user_info, RatedLeague)
if league.vs is None:
raise FallbackError
histories = await query_historical_data(user, user_info)
value_max, value_min = get_value_bounds([i.tr for i in histories])
split_value, offset = get_split(value_max, value_min)
if sprint.record is not None:
duration = timedelta(milliseconds=sprint.record.endcontext.final_time).total_seconds()
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
else:
sprint_value = 'N/A'
blitz_value = f'{blitz.record.endcontext.score:,}' if blitz.record is not None else 'N/A'
netloc = get_self_netloc()
async with HostPage(
page=await render(
'v1/tetrio/info',
V1TemplateInfo(
user=V1TemplateUser(
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": user_info.data.user.avatar_revision})}'
if user_info.data.user.avatar_revision is not None and user_info.data.user.avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user_info.data.user.id.encode()).hexdigest(), # noqa: S324
),
name=user.name.upper(),
bio=user_info.data.user.bio,
),
ranking=Ranking(
rating=round(league.glicko, 2),
rd=round(league.rd, 2),
),
tetra_league=TetraLeague(
rank=league.rank,
tr=round(league.rating, 2),
global_rank=league.standing,
pps=league.pps,
lpm=round(lpm := (league.pps * 24), 2),
apm=league.apm,
apl=round(league.apm / lpm, 2),
vs=league.vs,
adpm=round(adpm := (league.vs * 0.6), 2),
adpl=round(adpm / lpm, 2),
),
tetra_league_history=TetraLeagueHistory(
data=histories,
split_interval=split_value,
min_tr=value_min,
max_tr=value_max,
offset=offset,
),
radar=Radar(
app=(app := (league.apm / (60 * league.pps))),
dsps=(dsps := ((league.vs / 100) - (league.apm / 60))),
dspp=(dspp := (dsps / league.pps)),
ci=150 * dspp - 125 * app + 50 * (league.vs / league.apm) - 25,
ge=2 * ((app * dsps) / league.pps),
),
sprint=sprint_value,
blitz=blitz_value,
),
)
) as page_hash:
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
N = TypeVar('N', int, float)
def handling_special_value(value: N) -> N | None:
return value if value != -1 else None
async def make_query_image_v2(player: Player) -> bytes:
user, user_info, sprint, blitz, zen = await gather(
player.user, player.get_info(), player.sprint, player.blitz, player.zen
)
league = get_league(user_info)
histories = await query_historical_data(user, user_info)
if sprint.record is not None:
duration = timedelta(milliseconds=sprint.record.endcontext.final_time).total_seconds()
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
else:
sprint_value = 'N/A'
play_time: str | None
if (game_time := handling_special_value(user_info.data.user.gametime)) is not None:
if game_time // 3600 > 0:
play_time = f'{game_time//3600:.0f}h {game_time % 3600 // 60:.0f}m {game_time % 60:.0f}s'
elif game_time // 60 > 0:
play_time = f'{game_time//60:.0f}m {game_time % 60:.0f}s'
else:
play_time = f'{game_time:.0f}s'
else:
play_time = game_time
netloc = get_self_netloc()
async with HostPage(
await render(
'v2/tetrio/user/info',
V2TemplateInfo(
user=V2TemplateUser(
id=user.ID,
name=user.name.upper(),
bio=user_info.data.user.bio,
banner=f'http://{netloc}/host/resource/tetrio/banners/{user.ID}?{urlencode({"revision": user_info.data.user.banner_revision})}'
if user_info.data.user.banner_revision is not None and user_info.data.user.banner_revision != 0
else None,
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": user_info.data.user.avatar_revision})}'
if user_info.data.user.avatar_revision is not None and user_info.data.user.avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user_info.data.user.id.encode()).hexdigest(), # noqa: S324
),
badges=[
Badge(
id=i.id,
description=i.label,
group=i.group,
receive_at=i.ts if isinstance(i.ts, datetime) else None,
)
for i in user_info.data.user.badges
],
country=user_info.data.user.country,
role=user_info.data.user.role,
xp=user_info.data.user.xp,
friend_count=user_info.data.user.friend_count,
supporter_tier=user_info.data.user.supporter_tier,
bad_standing=user_info.data.user.badstanding or False,
verified=user_info.data.user.verified,
playtime=play_time,
join_at=user_info.data.user.ts,
),
tetra_league=V2TemplateTetraLeague(
rank=league.rank,
highest_rank=league.bestrank,
tr=round(league.rating, 2),
glicko=round(league.glicko, 2),
rd=round(league.rd, 2),
global_rank=handling_special_value(league.standing),
country_rank=handling_special_value(league.standing_local),
pps=(
metrics := get_metrics(pps=league.pps, apm=league.apm, vs=league.vs)
if league.vs is not None
else get_metrics(pps=league.pps, apm=league.apm)
).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs if isinstance(metrics, TetrisMetricsProWithPPSVS) else None,
adpl=metrics.adpl if isinstance(metrics, TetrisMetricsProWithPPSVS) else None,
statistic=TetraLeagueStatistic(
total=league.gamesplayed,
wins=league.gameswon,
),
decaying=league.decaying,
history=histories,
)
if isinstance(league, RatedLeague)
else None,
statistic=Statistic(
total=handling_special_value(user_info.data.user.gamesplayed),
wins=handling_special_value(user_info.data.user.gameswon),
),
sprint=Sprint(
time=sprint_value,
global_rank=sprint.rank,
play_at=sprint.record.ts,
)
if sprint.record is not None
else None,
blitz=Blitz(
score=blitz.record.endcontext.score,
global_rank=blitz.rank,
play_at=blitz.record.ts,
)
if blitz.record is not None
else None,
zen=zen,
),
),
) as page_hash:
return await screenshot(f'http://{netloc}/host/{page_hash}.html')
async def make_query_text(player: Player) -> UniMessage:
user, user_info, sprint, blitz = await gather(player.user, player.get_info(), player.sprint, player.blitz)
league = get_league(user_info)
user_name = user.name.upper()
message = ''
if isinstance(league, NeverPlayedLeague):
message += f'用户 {user_name} 没有排位统计数据'
else:
if isinstance(league, NeverRatedLeague):
message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
else:
if league.rank == 'z':
message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
else:
message += f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
metrics = (
get_metrics(pps=league.pps, apm=league.apm, vs=league.vs)
if league.vs is not None
else get_metrics(pps=league.pps, apm=league.apm)
)
message += f"\nL'PM: {metrics.lpm} ( {metrics.pps} pps )"
message += f'\nAPM: {metrics.apm} ( x{metrics.apl} )'
if isinstance(metrics, TetrisMetricsProWithPPSVS):
message += f'\nADPM: {metrics.adpm} ( x{metrics.adpl} ) ( {metrics.vs}vs )'
if sprint.record is not None:
message += f'\n40L: {round(sprint.record.endcontext.final_time/1000,2)}s'
message += f' ( #{sprint.rank} )' if sprint.rank is not None else ''
if blitz.record is not None:
message += f'\nBlitz: {blitz.record.endcontext.score}'
message += f' ( #{blitz.rank} )' if blitz.rank is not None else ''
return UniMessage(message)
async def make_query_result(player: Player, template: Template) -> UniMessage:
try:
if template == 'v1':
return UniMessage.image(raw=await make_query_image_v1(player))
if template == 'v2':
return UniMessage.image(raw=await make_query_image_v2(player))
except FallbackError:
...
return await make_query_text(player)
class FullExport:
cache: ClassVar[defaultdict[str, set[tuple[datetime, Number]]]] = defaultdict(set)
latest_update: ClassVar[date | None] = None
@classmethod
async def init(cls) -> None:
async with get_session() as session:
full_exports = (await session.scalars(select(IORank).where(IORank.update_time >= cls.start_time()))).all()
await gather(
*[
cls._load(update_time, file_hash)
for file_hash, update_time in {
i.file_hash: i.update_time for i in full_exports if i.file_hash is not None
}.items()
]
)
@classmethod
async def update(cls) -> None:
if cls.latest_update == datetime.now(tz=ZoneInfo('Asia/Shanghai')).date():
return
start_time = cls.start_time()
for i in cls.cache:
cls.cache[i] = {j for j in cls.cache[i] if j[0] >= start_time}
latest_time = max(cls.cache)
async with get_session() as session:
full_exports = (await session.scalars(select(IORank).where(IORank.update_time > latest_time))).all()
await gather(
*[
cls._load(update_time, file_hash)
for file_hash, update_time in {
i.file_hash: i.update_time for i in full_exports if i.file_hash is not None
}.items()
]
)
cls.latest_update = datetime.now(tz=ZoneInfo('Asia/Shanghai')).date()
@classmethod
def get_data(cls, unique_identifier: str) -> list[TetraLeagueHistoryData]:
return [TetraLeagueHistoryData(record_at=i[0], tr=i[1]) for i in cls.cache[unique_identifier]]
@classmethod
def start_time(cls) -> datetime:
return (
datetime.now(ZoneInfo('Asia/Shanghai')).replace(hour=0, minute=0, second=0, microsecond=0)
- timedelta(days=9)
).astimezone(UTC)
@classmethod
async def _load(cls, update_time: datetime, file_hash: str) -> None:
try:
users = type_validate_json(TetraLeagueSuccess, await cls.decompress(file_hash)).data.users
except FileNotFoundError:
await cls.clear_invalid(file_hash)
return
update_time = update_time.astimezone(ZoneInfo('Asia/Shanghai'))
for i in users:
cls.cache[i.id].add((update_time, i.league.rating))
@classmethod
async def decompress(cls, file_hash: str) -> bytes:
async with open(get_data_file('nonebot_plugin_tetris_stats', f'{file_hash}.json.zst'), mode='rb') as file:
return ZstdDecompressor().decompress(await file.read())
@classmethod
async def clear_invalid(cls, file_hash: str) -> None:
async with get_session() as session:
full_exports = (await session.scalars(select(IORank).where(IORank.file_hash == file_hash))).all()
for i in full_exports:
i.file_hash = None
await session.commit()
@driver.on_startup
async def _():
await FullExport.init()
scheduler.add_job(FullExport.update, 'interval', hours=1)

View File

@@ -1,113 +0,0 @@
from collections import defaultdict
from collections.abc import Callable
from datetime import datetime, timedelta, timezone
from hashlib import sha512
from math import floor
from statistics import mean
from typing import TYPE_CHECKING
from aiofiles import open
from nonebot import get_driver
from nonebot.compat import model_dump
from nonebot.utils import run_sync
from nonebot_plugin_apscheduler import scheduler
from nonebot_plugin_localstore import get_data_file
from nonebot_plugin_orm import get_session
from sqlalchemy import select
from zstandard import ZstdCompressor
from ....utils.exception import RequestError
from ....utils.retry import retry
from ..api.schemas.base import FailedModel
from ..api.schemas.tetra_league import ValidUser
from ..api.schemas.user import User
from ..api.tetra_league import full_export
from ..constant import RANK_PERCENTILE
from ..models import IORank
if TYPE_CHECKING:
from ..api.typing import Rank
UTC = timezone.utc
driver = get_driver()
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
@retry(exception_type=RequestError, delay=timedelta(minutes=15))
async def get_tetra_league_data() -> None:
league, original = await full_export(with_original=True)
if isinstance(league, FailedModel):
msg = f'排行榜数据请求错误:\n{league.error}'
raise RequestError(msg)
def pps(user: ValidUser) -> float:
return user.league.pps
def apm(user: ValidUser) -> float:
return user.league.apm
def vs(user: ValidUser) -> float:
return user.league.vs
def _min(users: list[ValidUser], field: Callable[[ValidUser], float]) -> ValidUser:
return min(users, key=field)
def _max(users: list[ValidUser], field: Callable[[ValidUser], float]) -> ValidUser:
return max(users, key=field)
def build_extremes_data(
users: list[ValidUser],
field: Callable[[ValidUser], float],
sort: Callable[[list[ValidUser], Callable[[ValidUser], float]], ValidUser],
) -> tuple[dict[str, str], float]:
user = sort(users, field)
return model_dump(User(ID=user.id, name=user.username)), field(user)
data_hash: str | None = await run_sync((await run_sync(sha512)(original)).hexdigest)()
async with open(get_data_file('nonebot_plugin_tetris_stats', f'{data_hash}.json.zst'), mode='wb') as file:
await file.write(await run_sync(ZstdCompressor(level=12, threads=-1).compress)(original))
users = [i for i in league.data.users if isinstance(i, ValidUser)]
rank_to_users: defaultdict[Rank, list[ValidUser]] = defaultdict(list)
for i in users:
rank_to_users[i.league.rank].append(i)
rank_info: list[IORank] = []
for rank, percentile in RANK_PERCENTILE.items():
offset = floor((percentile / 100) * len(users)) - 1
tr_line = users[offset].league.rating
rank_users = rank_to_users[rank]
rank_info.append(
IORank(
rank=rank,
tr_line=tr_line,
player_count=len(rank_users),
low_pps=(build_extremes_data(rank_users, pps, _min)),
low_apm=(build_extremes_data(rank_users, apm, _min)),
low_vs=(build_extremes_data(rank_users, vs, _min)),
avg_pps=mean({i.league.pps for i in rank_users}),
avg_apm=mean({i.league.apm for i in rank_users}),
avg_vs=mean({i.league.vs for i in rank_users}),
high_pps=(build_extremes_data(rank_users, pps, _max)),
high_apm=(build_extremes_data(rank_users, apm, _max)),
high_vs=(build_extremes_data(rank_users, vs, _max)),
update_time=league.cache.cached_at,
file_hash=data_hash,
)
)
async with get_session() as session:
session.add_all(rank_info)
await session.commit()
@driver.on_startup
async def _() -> None:
async with get_session() as session:
latest_time = await session.scalar(select(IORank.update_time).order_by(IORank.id.desc()).limit(1))
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
await get_tetra_league_data()
from . import all, detail # noqa: E402
__all__ = ['all', 'detail']

View File

@@ -1,78 +0,0 @@
from datetime import timedelta
from nonebot_plugin_alconna import UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
from sqlalchemy import func, select
from ....db import trigger
from ....utils.host import HostPage, get_self_netloc
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.tetrio.tetrio_rank import AverageData, Data, ItemData
from ....utils.screenshot import screenshot
from .. import alc
from ..constant import GAME_TYPE
from ..models import IORank
@alc.assign('TETRIO.rank.all')
async def _(event_session: EventSession):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='rank',
command_args=['--all'],
):
async with get_session() as session:
latest_update_time = (
await session.scalars(select(IORank.update_time).order_by(IORank.id.desc()).limit(1))
).one()
compare_time = (
await session.scalars(
select(IORank.update_time)
.order_by(
func.abs(
func.julianday(IORank.update_time)
- func.julianday(latest_update_time - timedelta(hours=24))
)
)
.limit(1)
)
).one()
latest_data = (
await session.scalars(
select(IORank).where(IORank.update_time == latest_update_time).order_by(IORank.tr_line.desc())
)
).all()
compare_data = (
await session.scalars(
select(IORank).where(IORank.update_time == compare_time).order_by(IORank.tr_line.desc())
)
).all()
async with HostPage(
await render(
'v2/tetrio/rank',
Data(
items={
i[0].rank: ItemData(
require_tr=round(i[0].tr_line, 2),
trending=round(i[0].tr_line - i[1].tr_line, 2),
average_data=AverageData(
pps=(metrics := get_metrics(pps=i[0].avg_pps, apm=i[0].avg_apm, vs=i[0].avg_vs)).pps,
apm=metrics.apm,
apl=metrics.apl,
vs=metrics.vs,
adpl=metrics.adpl,
),
players=i[0].player_count,
)
for i in zip(latest_data, compare_data, strict=True)
},
updated_at=latest_update_time,
),
)
) as page_hash:
await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
await UniMessage.image(raw=await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')).finish()

View File

@@ -1,138 +0,0 @@
from datetime import datetime, timedelta, timezone
from zoneinfo import ZoneInfo
from nonebot import get_driver
from nonebot_plugin_alconna import UniMessage
from nonebot_plugin_orm import get_session
from nonebot_plugin_session import EventSession # type: ignore[import-untyped]
from nonebot_plugin_session_orm import get_session_persist_id # type: ignore[import-untyped]
from sqlalchemy import func, select
from ....db import trigger
from ....utils.host import HostPage, get_self_netloc
from ....utils.metrics import get_metrics
from ....utils.render import render
from ....utils.render.schemas.tetrio.tetrio_rank_detail import Data, SpecialData
from ....utils.screenshot import screenshot
from .. import alc
from ..api.typing import ValidRank
from ..constant import GAME_TYPE
from ..models import IORank
UTC = timezone.utc
driver = get_driver()
@alc.assign('TETRIO.rank')
async def _(rank: ValidRank, event_session: EventSession):
async with trigger(
session_persist_id=await get_session_persist_id(event_session),
game_platform=GAME_TYPE,
command_type='rank',
command_args=[f'--detail {rank}'],
):
async with get_session() as session:
latest_data = (
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(1))
).one()
compare_data = (
await session.scalars(
select(IORank)
.where(IORank.rank == rank)
.order_by(
func.abs(
func.julianday(IORank.update_time)
- func.julianday(latest_data.update_time - timedelta(hours=24))
)
)
.limit(1)
)
).one()
await UniMessage.image(raw=await make_image(latest_data, compare_data)).finish()
async def make_image(latest_data: IORank, compare_data: IORank) -> bytes:
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
low_pps = get_metrics(pps=latest_data.low_pps[1])
low_vs = get_metrics(vs=latest_data.low_vs[1])
max_pps = get_metrics(pps=latest_data.high_pps[1])
max_vs = get_metrics(vs=latest_data.high_vs[1])
async with HostPage(
await render(
'v2/tetrio/rank/detail',
Data(
name=latest_data.rank,
trending=round(latest_data.tr_line - compare_data.tr_line, 2),
require_tr=round(latest_data.tr_line, 2),
players=latest_data.player_count,
minimum_data=SpecialData(
apm=latest_data.low_apm[1],
pps=low_pps.pps,
lpm=low_pps.lpm,
vs=low_vs.vs,
adpm=low_vs.adpm,
apm_holder=latest_data.low_apm[0]['name'].upper(),
pps_holder=latest_data.low_pps[0]['name'].upper(),
vs_holder=latest_data.low_vs[0]['name'].upper(),
),
average_data=SpecialData(
apm=avg.apm,
pps=avg.pps,
lpm=avg.lpm,
vs=avg.vs,
adpm=avg.adpm,
apl=avg.apl,
adpl=avg.adpl,
),
maximum_data=SpecialData(
apm=latest_data.high_apm[1],
pps=max_pps.pps,
lpm=max_pps.lpm,
vs=max_vs.vs,
adpm=max_vs.adpm,
apm_holder=latest_data.high_apm[0]['name'].upper(),
pps_holder=latest_data.high_pps[0]['name'].upper(),
vs_holder=latest_data.high_vs[0]['name'].upper(),
),
updated_at=latest_data.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo('Asia/Shanghai')),
),
)
) as page_hash:
return await screenshot(f'http://{get_self_netloc()}/host/{page_hash}.html')
async def make_text(latest_data: IORank, compare_data: IORank) -> str:
message = ''
if (datetime.now(UTC) - latest_data.update_time.replace(tzinfo=UTC)) > timedelta(hours=7):
message += 'Warning: 数据超过7小时未更新, 请联系Bot主人查看后台\n'
message += f'{latest_data.rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
if compare_data.id != latest_data.id:
message += f'对比 {(latest_data.update_time-compare_data.update_time).total_seconds()/3600:.2f} 小时前趋势: {f"{difference:.2f}" if (difference:=latest_data.tr_line-compare_data.tr_line) > 0 else f"{-difference:.2f}" if difference < 0 else ""}'
else:
message += '暂无对比数据'
avg = get_metrics(pps=latest_data.avg_pps, apm=latest_data.avg_apm, vs=latest_data.avg_vs)
low_pps = get_metrics(pps=latest_data.low_pps[1])
low_vs = get_metrics(vs=latest_data.low_vs[1])
max_pps = get_metrics(pps=latest_data.high_pps[1])
max_vs = get_metrics(vs=latest_data.high_vs[1])
message += (
'\n'
'平均数据:\n'
f"L'PM: {avg.lpm} ( {avg.pps} pps )\n"
f'APM: {avg.apm} ( x{avg.apl} )\n'
f'ADPM: {avg.adpm} ( x{avg.adpl} ) ( {avg.vs}vs )\n'
'\n'
'最低数据:\n'
f"L'PM: {low_pps.lpm} ( {low_pps.pps} pps ) By: {latest_data.low_pps[0]['name'].upper()}\n"
f'APM: {latest_data.low_apm[1]} By: {latest_data.low_apm[0]["name"].upper()}\n'
f'ADPM: {low_vs.adpm} ( {low_vs.vs}vs ) By: {latest_data.low_vs[0]["name"].upper()}\n'
'\n'
'最高数据:\n'
f"L'PM: {max_pps.lpm} ( {max_pps.pps} pps ) By: {latest_data.high_pps[0]['name'].upper()}\n"
f'APM: {latest_data.high_apm[1]} By: {latest_data.high_apm[0]["name"].upper()}\n'
f'ADPM: {max_vs.adpm} ( {max_vs.vs}vs ) By: {latest_data.high_vs[0]["name"].upper()}\n'
'\n'
f'数据更新时间: {latest_data.update_time.replace(tzinfo=UTC).astimezone(ZoneInfo("Asia/Shanghai")).strftime("%Y-%m-%d %H:%M:%S")}'
)
return message

View File

@@ -68,14 +68,14 @@ async def _(account: Player, event_session: EventSession):
async def make_blitz_image(player: Player) -> bytes:
user, user_info, blitz = await gather(player.user, player.get_info(), player.blitz)
if blitz.record is None:
user, blitz = await gather(player.user, player.blitz)
if blitz.data.record is None:
msg = f'未找到用户 {user.name.upper()} 的 Blitz 记录'
raise RecordNotFoundError(msg)
endcontext = blitz.record.endcontext
clears = endcontext.clears
duration = timedelta(milliseconds=endcontext.final_time).total_seconds()
metrics = get_metrics(pps=endcontext.piecesplaced / duration)
stats = blitz.data.record.results.stats
clears = stats.clears
duration = timedelta(milliseconds=stats.finaltime).total_seconds()
metrics = get_metrics(pps=stats.piecesplaced / duration)
netloc = get_self_netloc()
async with HostPage(
page=await render(
@@ -84,30 +84,30 @@ async def make_blitz_image(player: Player) -> bytes:
user=User(
id=user.ID,
name=user.name.upper(),
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": user_info.data.user.avatar_revision})}'
if user_info.data.user.avatar_revision is not None and user_info.data.user.avatar_revision != 0
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
),
),
replay_id=blitz.record.replayid,
rank=blitz.rank,
replay_id=blitz.data.record.replayid,
rank=blitz.data.rank,
statistic=Statistic(
keys=endcontext.inputs,
kpp=round(endcontext.inputs / endcontext.piecesplaced, 2),
kps=round(endcontext.inputs / duration, 2),
keys=stats.inputs,
kpp=round(stats.inputs / stats.piecesplaced, 2),
kps=round(stats.inputs / duration, 2),
max=Max(
combo=max((0, endcontext.topcombo - 1)),
btb=max((0, endcontext.topbtb - 1)),
combo=max((0, stats.topcombo - 1)),
btb=max((0, stats.topbtb - 1)),
),
pieces=endcontext.piecesplaced,
pieces=stats.piecesplaced,
pps=metrics.pps,
lines=endcontext.lines,
lines=stats.lines,
lpm=metrics.lpm,
holds=endcontext.holds,
score=endcontext.score,
spp=round(endcontext.score / endcontext.piecesplaced, 2),
holds=stats.holds,
score=stats.score,
spp=round(stats.score / stats.piecesplaced, 2),
single=clears.singles,
double=clears.doubles,
triple=clears.triples,
@@ -125,12 +125,12 @@ async def make_blitz_image(player: Player) -> bytes:
),
all_clear=clears.allclear,
finesse=Finesse(
faults=endcontext.finesse.faults,
accuracy=round(endcontext.finesse.perfectpieces / endcontext.piecesplaced * 100, 2),
faults=stats.finesse.faults,
accuracy=round(stats.finesse.perfectpieces / stats.piecesplaced * 100, 2),
),
level=endcontext.level,
level=stats.level,
),
play_at=blitz.record.ts,
play_at=blitz.data.record.ts,
),
)
) as page_hash:

View File

@@ -68,15 +68,15 @@ async def _(account: Player, event_session: EventSession):
async def make_sprint_image(player: Player) -> bytes:
user, user_info, sprint = await gather(player.user, player.get_info(), player.sprint)
if sprint.record is None:
user, sprint = await gather(player.user, player.sprint)
if sprint.data.record is None:
msg = f'未找到用户 {user.name.upper()} 的 40L 记录'
raise RecordNotFoundError(msg)
endcontext = sprint.record.endcontext
clears = endcontext.clears
duration = timedelta(milliseconds=endcontext.final_time).total_seconds()
stats = sprint.data.record.results.stats
clears = stats.clears
duration = timedelta(milliseconds=stats.finaltime).total_seconds()
sprint_value = f'{duration:.3f}s' if duration < 60 else f'{duration // 60:.0f}m {duration % 60:.3f}s' # noqa: PLR2004
metrics = get_metrics(pps=endcontext.piecesplaced / duration)
metrics = get_metrics(pps=stats.piecesplaced / duration)
netloc = get_self_netloc()
async with HostPage(
page=await render(
@@ -85,30 +85,30 @@ async def make_sprint_image(player: Player) -> bytes:
user=User(
id=user.ID,
name=user.name.upper(),
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": user_info.data.user.avatar_revision})}'
if user_info.data.user.avatar_revision is not None and user_info.data.user.avatar_revision != 0
avatar=f'http://{netloc}/host/resource/tetrio/avatars/{user.ID}?{urlencode({"revision": avatar_revision})}'
if (avatar_revision := (await player.avatar_revision)) is not None and avatar_revision != 0
else Avatar(
type='identicon',
hash=md5(user.ID.encode()).hexdigest(), # noqa: S324
),
),
time=sprint_value,
replay_id=sprint.record.replayid,
rank=sprint.rank,
replay_id=sprint.data.record.replayid,
rank=sprint.data.rank,
statistic=Statistic(
keys=endcontext.inputs,
kpp=round(endcontext.inputs / endcontext.piecesplaced, 2),
kps=round(endcontext.inputs / duration, 2),
keys=stats.inputs,
kpp=round(stats.inputs / stats.piecesplaced, 2),
kps=round(stats.inputs / duration, 2),
max=Max(
combo=max((0, endcontext.topcombo - 1)),
btb=max((0, endcontext.topbtb - 1)),
combo=max((0, stats.topcombo - 1)),
btb=max((0, stats.topbtb - 1)),
),
pieces=endcontext.piecesplaced,
pieces=stats.piecesplaced,
pps=metrics.pps,
lines=endcontext.lines,
lines=stats.lines,
lpm=metrics.lpm,
holds=endcontext.holds,
score=endcontext.score,
holds=stats.holds,
score=stats.score,
single=clears.singles,
double=clears.doubles,
triple=clears.triples,
@@ -126,11 +126,11 @@ async def make_sprint_image(player: Player) -> bytes:
),
all_clear=clears.allclear,
finesse=Finesse(
faults=endcontext.finesse.faults,
accuracy=round(endcontext.finesse.perfectpieces / endcontext.piecesplaced * 100, 2),
faults=stats.finesse.faults,
accuracy=round(stats.finesse.perfectpieces / stats.piecesplaced * 100, 2),
),
),
play_at=sprint.record.ts,
play_at=sprint.data.record.ts,
),
)
) as page_hash:

22
poetry.lock generated
View File

@@ -286,6 +286,20 @@ files = [
arclet-alconna = ">=1.8.15"
nepattern = ">=0.7.3,<1.0.0"
[[package]]
name = "async-lru"
version = "2.0.4"
description = "Simple LRU cache for asyncio"
optional = false
python-versions = ">=3.8"
files = [
{file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"},
{file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"},
]
[package.dependencies]
typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
[[package]]
name = "async-timeout"
version = "4.0.3"
@@ -1224,13 +1238,9 @@ files = [
{file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"},
{file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"},
{file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"},
{file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"},
{file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"},
{file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"},
{file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"},
{file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"},
{file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"},
{file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"},
{file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"},
{file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"},
{file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"},
@@ -2072,7 +2082,6 @@ optional = false
python-versions = ">=3.9"
files = [
{file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
{file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
{file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
{file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
@@ -2093,7 +2102,6 @@ files = [
{file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
{file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
{file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
{file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
{file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
{file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
@@ -3761,4 +3769,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "e113b54aa85e884a536bdc47c71884ca21d31441f651b96a94ce3b400e9de1d5"
content-hash = "9090ada80aca0dc6618cb58cd052cff4bc4d691c936bf9cd748655e8c31c658a"

View File

@@ -22,6 +22,7 @@ nonebot-plugin-userinfo = "^0.2.4"
aiocache = "^0.12.2"
aiofiles = ">=23.2.1,<25.0.0"
arclet-alconna = "^1.8.19"
async-lru = "^2.0.4"
httpx = "^0.27.0"
jinja2 = "^3.1.3"
lxml = '^5.1.0'