Compare commits

..

14 Commits

Author SHA1 Message Date
7bbdeacc5e 🔖 1.0.0.a8 2023-11-22 16:11:57 +08:00
dependabot[bot]
782792e455 ⬆️ Bump nonebot-plugin-orm from 0.5.1 to 0.6.0 (#203) 2023-11-22 08:11:15 +00:00
dependabot[bot]
bd10549b4c ⬆️ Bump ruff from 0.1.5 to 0.1.6 (#202) 2023-11-22 08:02:43 +00:00
dependabot[bot]
035e6d4782 ⬆️ Bump nonebot-plugin-alconna from 0.33.3 to 0.33.6 (#201) 2023-11-22 08:02:33 +00:00
003e6619d8 iorank 指令不再去尝试更新数据 2023-11-22 15:58:55 +08:00
c0fa92df30 🚨 fix Incompatible overrides 2023-11-22 15:57:04 +08:00
7cdb0f3547 为 IO Rank 添加重试机制 2023-11-22 15:49:33 +08:00
b773fb44a1 ️ 为 IO 添加缓存 2023-11-22 13:22:18 +08:00
c75c6b73bd 🙈 更新.gitignore 2023-11-22 13:02:17 +08:00
67782c3156 添加依赖 aiocache 2023-11-22 13:01:41 +08:00
1e02858913 💥 🗃️ 将 pydantic 模型序列化后再存数据库 2023-11-21 20:47:56 +08:00
60605d0dca 🐛 修复 IO Z段位 不显示glicko和rd的bug 2023-11-21 13:58:39 +08:00
0d589450bd 将处理过程中的 dataclass 换成 pydantic 2023-11-21 00:50:32 +08:00
dependabot[bot]
2f144acf0c ⬆️ Bump nonebot-adapter-satori from 0.7.0 to 0.8.0 (#200)
Bumps [nonebot-adapter-satori](https://github.com/nonebot/adapter-satori) from 0.7.0 to 0.8.0.
- [Release notes](https://github.com/nonebot/adapter-satori/releases)
- [Commits](https://github.com/nonebot/adapter-satori/compare/v0.7.0...v0.8.0)

---
updated-dependencies:
- dependency-name: nonebot-adapter-satori
  dependency-type: direct:development
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-17 01:49:57 +08:00
17 changed files with 378 additions and 159 deletions

2
.gitignore vendored
View File

@@ -5,6 +5,6 @@ Untitled*
*copy*
.vscode
*dev*
*cache*
*_cache*
*backup*
*.pyc

View File

@@ -0,0 +1,112 @@
"""Recreate HistoricalData
迁移 ID: 9f6582279ce2
父迁移: 9cd1647db502
创建时间: 2023-11-21 08:35:50.393246
"""
from __future__ import annotations
from collections.abc import Sequence
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import sqlite
from nonebot_plugin_tetris_stats.db.models import PydanticType
revision: str = '9f6582279ce2'
down_revision: str | Sequence[str] | None = '9cd1647db502'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_account')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
op.create_table(
'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', PydanticType(), nullable=False),
sa.Column('processed_data', PydanticType(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nonebot_plugin_tetris_stats_historicaldata')),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'), ['command_type'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'), ['game_platform'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'), ['source_account'], unique=False
)
batch_op.create_index(
batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'), ['source_type'], unique=False
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_account'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_game_platform'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_command_type'))
op.drop_table('nonebot_plugin_tetris_stats_historicaldata')
op.create_table(
'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('trigger_time', sa.DATETIME(), nullable=False),
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True),
sa.Column('bot_account', sa.VARCHAR(), nullable=True),
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True),
sa.Column('source_account', sa.VARCHAR(), nullable=True),
sa.Column('message', sa.BLOB(), nullable=True),
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False),
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False),
sa.Column('command_args', sqlite.JSON(), nullable=False),
sa.Column('game_user', sa.BLOB(), nullable=False),
sa.Column('processed_data', sa.BLOB(), nullable=False),
sa.Column('finish_time', sa.DATETIME(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
)
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_type', ['source_type'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_source_account', ['source_account'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_game_platform', ['game_platform'], unique=False
)
batch_op.create_index(
'ix_nonebot_plugin_tetris_stats_historicaldata_command_type', ['command_type'], unique=False
)
# ### end Alembic commands ###

View File

@@ -1,14 +1,32 @@
from datetime import datetime
from typing import Any
from nonebot.adapters import Message
from nonebot_plugin_orm import Model
from sqlalchemy import JSON, DateTime, PickleType, String
from pydantic import BaseModel
from sqlalchemy import JSON, DateTime, Dialect, PickleType, String, TypeDecorator
from sqlalchemy.orm import Mapped, MappedAsDataclass, mapped_column
from ..game_data_processor import ProcessedData, User
from ..game_data_processor.schemas import BaseProcessedData, BaseUser
from ..utils.typing import CommandType, GameType
class PydanticType(TypeDecorator):
impl = JSON
def process_bind_param(self, value: Any | None, dialect: Dialect) -> str: # noqa: ANN401
# 将 Pydantic 模型实例转换为 JSON
if isinstance(value, BaseModel):
return value.json()
raise TypeError
def process_result_value(self, value: Any | None, dialect: Dialect) -> BaseModel: # noqa: ANN401
# 将 JSON 转换回 Pydantic 模型实例
if isinstance(value, str | bytes):
return BaseModel.parse_raw(value)
raise TypeError
class Bind(MappedAsDataclass, Model):
id: Mapped[int] = mapped_column(init=False, primary_key=True)
chat_platform: Mapped[str] = mapped_column(String(32), index=True)
@@ -28,6 +46,6 @@ class HistoricalData(MappedAsDataclass, Model):
game_platform: Mapped[GameType] = mapped_column(String(32), index=True, init=False)
command_type: Mapped[CommandType] = mapped_column(String(16), index=True, init=False)
command_args: Mapped[list[str]] = mapped_column(JSON, init=False)
game_user: Mapped[User] = mapped_column(PickleType, init=False)
processed_data: Mapped[ProcessedData] = mapped_column(PickleType, init=False)
game_user: Mapped[BaseUser] = mapped_column(PydanticType, init=False)
processed_data: Mapped[BaseProcessedData] = mapped_column(PydanticType, init=False)
finish_time: Mapped[datetime] = mapped_column(DateTime, init=False)

View File

@@ -1,5 +1,4 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
from datetime import UTC, datetime
from typing import Any
@@ -7,25 +6,11 @@ from nonebot.matcher import Matcher
from nonebot_plugin_alconna import AlcMatches, AlconnaMatcher
from ..utils.exception import MessageFormatError
from ..utils.recorder import Recorder
from ..utils.typing import CommandType, GameType
@dataclass
class User:
"""游戏用户"""
@dataclass
class RawResponse:
"""原始请求数据"""
@dataclass
class ProcessedData:
"""处理/验证后的数据"""
from ..utils.recorder import Recorder # noqa: E402 避免循环导入
from .schemas import BaseProcessedData as ProcessedData
from .schemas import BaseRawResponse as RawResponse
from .schemas import BaseUser as User
class Processor(ABC):

View File

@@ -1,4 +1,4 @@
from datetime import timedelta
from datetime import UTC, datetime, timedelta
from zoneinfo import ZoneInfo
from arclet.alconna import Alconna, AllParam, Arg, ArgFlag, Args, CommandMeta, Option
@@ -17,7 +17,7 @@ from .. import add_default_handlers
from ..constant import BIND_COMMAND, QUERY_COMMAND
from .constant import GAME_TYPE
from .model import IORank
from .processor import Processor, User, check_rank_data, identify_user_info
from .processor import Processor, User, identify_user_info
from .typing import Rank
alc = on_alconna(
@@ -136,10 +136,6 @@ async def _(event: Event, matcher: Matcher, account: User):
async def _(matcher: Matcher, rank: Rank):
if rank == 'z':
await matcher.finish('暂不支持查询未知段位')
try:
await check_rank_data()
except NeedCatchError as e:
await matcher.finish(str(f'段位信息获取失败\n{e}'))
async with get_session() as session:
latest_data = (
await session.scalars(select(IORank).where(IORank.rank == rank).order_by(IORank.id.desc()).limit(1))
@@ -157,7 +153,10 @@ async def _(matcher: Matcher, rank: Rank):
.limit(1)
)
).one()
message = f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
message = ''
if (datetime.now(UTC) - latest_data.create_time) > timedelta(hours=7):
message += 'Warning: 数据超过7小时未更新, 请联系Bot主人查看后台\n'
message += f'{rank.upper()} 段 分数线 {latest_data.tr_line:.2f} TR, {latest_data.player_count} 名玩家\n'
if compare_data.id != latest_data.id:
message += f'对比 {(latest_data.create_time-compare_data.create_time).total_seconds()/3600:.2f} 小时前趋势: {f"{difference:.2f}" if (difference:=latest_data.tr_line-compare_data.tr_line) > 0 else f"{-difference:.2f}" if difference < 0 else ""}'
else:

View File

@@ -0,0 +1,28 @@
from datetime import UTC, datetime
from aiocache import Cache as ACache # type: ignore[import-untyped]
from nonebot.log import logger
from pydantic import parse_raw_as
from ...utils.request import Request
from .schemas.base import FailedModel, SuccessModel
class Cache:
cache = ACache(ACache.MEMORY)
@classmethod
async def get(cls, url: str) -> bytes:
cached_data = await cls.cache.get(url)
if cached_data is None:
response_data = await Request.request(url)
parsed_data: SuccessModel | FailedModel = parse_raw_as(SuccessModel | FailedModel, response_data) # type: ignore[arg-type]
if isinstance(parsed_data, SuccessModel):
await cls.cache.add(
url,
response_data,
(parsed_data.cache.cached_until - datetime.now(UTC)).total_seconds(),
)
return response_data
logger.debug(f'{url}: Cache hit!')
return cached_data

View File

@@ -1,6 +1,5 @@
from collections import defaultdict
from collections.abc import Callable
from dataclasses import asdict, dataclass
from datetime import UTC, datetime, timedelta
from math import floor
from re import match
@@ -14,23 +13,20 @@ from sqlalchemy import select
from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError, WhatTheFuckError
from ...utils.request import Request, splice_url
from ...utils.request import splice_url
from ...utils.retry import retry
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from .cache import Cache
from .constant import BASE_URL, GAME_TYPE, RANK_PERCENTILE
from .model import IORank
from .schemas.league_all import FailedModel as LeagueAllFailed
from .schemas.league_all import LeagueAll
from .schemas.league_all import ValidUser as LeagueAllUser
from .schemas.response import ProcessedData, RawResponse
from .schemas.user import User
from .schemas.user_info import FailedModel as InfoFailed
from .schemas.user_info import (
NeverPlayedLeague,
NeverRatedLeague,
UserInfo,
)
from .schemas.user_info import NeverPlayedLeague, NeverRatedLeague, UserInfo
from .schemas.user_info import SuccessModel as InfoSuccess
from .schemas.user_records import FailedModel as RecordsFailed
from .schemas.user_records import SoloRecord, UserRecords
@@ -40,24 +36,6 @@ from .typing import Rank
driver = get_driver()
@dataclass
class User(UserMeta):
ID: str | None = None
name: str | None = None
@dataclass
class RawResponse(RawResponseMeta):
user_info: bytes | None = None
user_records: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_info: InfoSuccess | None = None
user_records: RecordsSuccess | None = None
def identify_user_info(info: str) -> User | MessageFormatError:
if match(r'^[a-f0-9]{24}$', info):
return User(ID=info)
@@ -113,7 +91,7 @@ class Processor(ProcessorMeta):
async def get_user_info(self) -> InfoSuccess:
"""获取用户数据"""
if self.processed_data.user_info is None:
self.raw_response.user_info = await Request.request(
self.raw_response.user_info = await Cache.get(
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}'])
)
user_info: UserInfo = parse_raw_as(UserInfo, self.raw_response.user_info) # type: ignore[arg-type]
@@ -125,20 +103,10 @@ class Processor(ProcessorMeta):
async def get_user_records(self) -> RecordsSuccess:
"""获取Solo数据"""
if self.processed_data.user_records is None:
self.raw_response.user_records = await Request.request(
splice_url(
[
BASE_URL,
'users/',
f'{self.user.ID or self.user.name}/',
'records',
]
)
)
user_records: UserRecords = parse_raw_as(
UserRecords, # type: ignore[arg-type]
self.raw_response.user_records,
self.raw_response.user_records = await Cache.get(
splice_url([BASE_URL, 'users/', f'{self.user.ID or self.user.name}/', 'records'])
)
user_records: UserRecords = parse_raw_as(UserRecords, self.raw_response.user_records) # type: ignore[arg-type]
if isinstance(user_records, RecordsFailed):
raise RequestError(f'用户Solo数据请求错误:\n{user_records.error}')
self.processed_data.user_records = user_records
@@ -155,12 +123,13 @@ class Processor(ProcessorMeta):
else:
if isinstance(league, NeverRatedLeague):
ret_message += f'用户 {user_name} 暂未完成定级赛, 最近十场的数据:'
elif league.rank == 'z':
ret_message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
else:
ret_message += (
f'{league.rank.upper()}用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
)
if league.rank == 'z':
ret_message += f'用户 {user_name} 暂无段位, {round(league.rating,2)} TR'
else:
ret_message += (
f'{league.rank.upper()} 段用户 {user_name} {round(league.rating,2)} TR (#{league.standing})'
)
ret_message += f', 段位分 {round(league.glicko,2)}±{round(league.rd,2)}, 最近十场的数据:'
lpm = league.pps * 24
ret_message += f"\nL'PM: {round(lpm, 2)} ( {league.pps} pps )"
@@ -185,13 +154,11 @@ class Processor(ProcessorMeta):
@scheduler.scheduled_job('cron', hour='0,6,12,18', minute=0)
@retry(exception_type=RequestError, delay=timedelta(minutes=15))
async def get_io_rank_data() -> None:
league_all: LeagueAll = parse_raw_as(
LeagueAll, # type: ignore[arg-type]
await Request.request(splice_url([BASE_URL, 'users/lists/league/all'])),
)
league_all: LeagueAll = parse_raw_as(LeagueAll, await Cache.get(splice_url([BASE_URL, 'users/lists/league/all']))) # type: ignore[arg-type]
if isinstance(league_all, LeagueAllFailed):
raise RequestError(f'用户Solo数据请求错误:\n{league_all.error}')
raise RequestError(f'排行榜数据请求错误:\n{league_all.error}')
def pps(user: LeagueAllUser) -> float:
return user.league.pps
@@ -214,7 +181,7 @@ async def get_io_rank_data() -> None:
sort: Callable[[list[LeagueAllUser], Callable[[LeagueAllUser], float]], LeagueAllUser],
) -> tuple[dict[str, str], float]:
user = sort(users, field)
return asdict(User(ID=user.id, name=user.username)), field(user)
return User(ID=user.id, name=user.username).dict(), field(user)
users = [i for i in league_all.data.users if isinstance(i, LeagueAllUser)]
rank_to_users: defaultdict[Rank, list[LeagueAllUser]] = defaultdict(list)
@@ -247,8 +214,8 @@ async def get_io_rank_data() -> None:
@driver.on_startup
async def check_rank_data() -> None:
async def _() -> None:
async with get_session() as session:
latest_time = await session.scalar(select(IORank.create_time).order_by(IORank.id.desc()).limit(1))
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
await get_io_rank_data()
if latest_time is None or datetime.now(tz=UTC) - latest_time.replace(tzinfo=UTC) > timedelta(hours=6):
await get_io_rank_data()

View File

@@ -0,0 +1,14 @@
from ... import ProcessedData as ProcessedDataMeta
from ... import RawResponse as RawResponseMeta
from .user_info import SuccessModel as InfoSuccess
from .user_records import SuccessModel as RecordsSuccess
class RawResponse(RawResponseMeta):
user_info: bytes | None = None
user_records: bytes | None = None
class ProcessedData(ProcessedDataMeta):
user_info: InfoSuccess | None = None
user_records: RecordsSuccess | None = None

View File

@@ -0,0 +1,12 @@
from ...schemas import BaseUser
class User(BaseUser):
ID: str | None = None
name: str | None = None
@property
def unique_identifier(self) -> str:
if self.ID is None:
raise ValueError('不完整的User!')
return self.ID

View File

@@ -0,0 +1,25 @@
from abc import ABC, abstractmethod
from pydantic import BaseModel
class BaseUser(ABC, BaseModel):
"""游戏用户"""
def __eq__(self, __value: object) -> bool:
if isinstance(__value, BaseUser):
return self.unique_identifier == __value.unique_identifier
return False
@property
@abstractmethod
def unique_identifier(self) -> str:
raise NotImplementedError
class BaseRawResponse(BaseModel):
"""原始请求数据"""
class BaseProcessedData(BaseModel):
"""处理/验证后的数据"""

View File

@@ -13,26 +13,18 @@ from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError
from ...utils.request import Request, splice_url
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from ..schemas import BaseUser
from .constant import BASE_URL, GAME_TYPE
from .schemas.response import ProcessedData, RawResponse
@dataclass
class User(UserMeta):
class User(BaseUser):
name: str
@dataclass
class RawResponse(RawResponseMeta):
user_profile: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_profile: str | None = None
@property
def unique_identifier(self) -> str:
return self.name
@dataclass

View File

@@ -0,0 +1,9 @@
from ...schemas import BaseProcessedData, BaseRawResponse
class RawResponse(BaseRawResponse):
user_profile: bytes | None = None
class ProcessedData(BaseProcessedData):
user_profile: str | None = None

View File

@@ -1,6 +1,5 @@
from dataclasses import dataclass
from re import match
from typing import Any
from urllib.parse import urlencode
from nonebot_plugin_orm import get_session
@@ -10,32 +9,24 @@ from ...db import create_or_update_bind
from ...utils.exception import MessageFormatError, RequestError
from ...utils.request import Request, splice_url
from ...utils.typing import GameType
from .. import ProcessedData as ProcessedDataMeta
from .. import Processor as ProcessorMeta
from .. import RawResponse as RawResponseMeta
from .. import User as UserMeta
from ..schemas import BaseUser
from .constant import BASE_URL, GAME_TYPE
from .schemas.response import ProcessedData, RawResponse
from .schemas.user_info import SuccessModel as InfoSuccess
from .schemas.user_info import UserInfo
from .schemas.user_profile import UserProfile
@dataclass
class User(UserMeta):
class User(BaseUser):
teaid: str | None = None
name: str | None = None
@dataclass
class RawResponse(RawResponseMeta):
user_profile: dict[frozenset[tuple[str, Any]], bytes]
user_info: bytes | None = None
@dataclass
class ProcessedData(ProcessedDataMeta):
user_profile: dict[frozenset[tuple[str, Any]], UserProfile]
user_info: InfoSuccess | None = None
@property
def unique_identifier(self) -> str:
if self.teaid is None:
raise ValueError('不完整的User!')
return self.teaid
@dataclass
@@ -135,23 +126,23 @@ class Processor(ProcessorMeta):
self.processed_data.user_info = user_info
return self.processed_data.user_info
async def get_user_profile(self, other_parameter: dict[str, Any] | None = None) -> UserProfile:
async def get_user_profile(self, other_parameter: dict[str, str | bytes] | None = None) -> UserProfile:
"""获取用户数据"""
if other_parameter is None:
other_parameter = {}
fset = frozenset(other_parameter.items())
if self.processed_data.user_profile.get(fset) is None:
self.raw_response.user_profile[fset] = await Request.request(
params = urlencode(dict(sorted(other_parameter.items())))
if self.processed_data.user_profile.get(params) is None:
self.raw_response.user_profile[params] = await Request.request(
splice_url(
[
BASE_URL,
'getProfile',
f'?{urlencode({"id":self.user.teaid or self.user.name},**other_parameter)}',
f'?{urlencode({"id":self.user.teaid or self.user.name,**other_parameter})}',
]
)
)
self.processed_data.user_profile[fset] = UserProfile.parse_raw(self.raw_response.user_profile[fset])
return self.processed_data.user_profile[fset]
self.processed_data.user_profile[params] = UserProfile.parse_raw(self.raw_response.user_profile[params])
return self.processed_data.user_profile[params]
async def get_game_data(self) -> GameData | None:
"""获取游戏数据"""

View File

@@ -0,0 +1,13 @@
from ...schemas import BaseProcessedData, BaseRawResponse
from .user_info import SuccessModel as InfoSuccess
from .user_profile import UserProfile
class RawResponse(BaseRawResponse):
user_profile: dict[str, bytes]
user_info: bytes | None = None
class ProcessedData(BaseProcessedData):
user_profile: dict[str, UserProfile]
user_info: InfoSuccess | None = None

View File

@@ -0,0 +1,37 @@
from asyncio import sleep
from collections.abc import Awaitable, Callable
from datetime import timedelta
from functools import wraps
from typing import TypeVar, cast
from nonebot.log import logger
T = TypeVar('T')
def retry(
max_attempts: int = 3,
exception_type: type[BaseException] | tuple[type[BaseException], ...] = Exception,
delay: timedelta | None = None,
) -> Callable[[Callable[..., Awaitable[T]]], Callable[..., Awaitable[T]]]:
def decorator(func: Callable[..., Awaitable[T]]) -> Callable[..., Awaitable[T]]:
@wraps(func)
async def wrapper(*args, **kwargs) -> T: # noqa: ANN002, ANN003
attempts = 0
while attempts < max_attempts + 1:
try:
return await func(*args, **kwargs)
except exception_type as e: # noqa: PERF203
logger.exception(e)
attempts += 1
if attempts <= max_attempts:
if delay is not None:
await sleep(delay.total_seconds())
logger.debug(f'Retrying: {func.__name__} ({attempts}/{max_attempts})')
continue
raise
raise RuntimeError('Unexpectedly reached the end of the retry loop')
return cast(Callable[..., Awaitable[T]], wrapper)
return decorator

74
poetry.lock generated
View File

@@ -1,5 +1,21 @@
# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand.
[[package]]
name = "aiocache"
version = "0.12.2"
description = "multi backend asyncio cache"
optional = false
python-versions = "*"
files = [
{file = "aiocache-0.12.2-py2.py3-none-any.whl", hash = "sha256:9b6fa30634ab0bfc3ecc44928a91ff07c6ea16d27d55469636b296ebc6eb5918"},
{file = "aiocache-0.12.2.tar.gz", hash = "sha256:b41c9a145b050a5dcbae1599f847db6dd445193b1f3bd172d8e0fe0cb9e96684"},
]
[package.extras]
memcached = ["aiomcache (>=0.5.2)"]
msgpack = ["msgpack (>=0.5.5)"]
redis = ["redis (>=4.2.0)"]
[[package]]
name = "aiofiles"
version = "23.2.1"
@@ -864,13 +880,13 @@ typing-extensions = ">=4.0.0,<5.0.0"
[[package]]
name = "nonebot-adapter-satori"
version = "0.7.0"
version = "0.8.0"
description = "Satori Protocol Adapter for Nonebot2"
optional = false
python-versions = ">=3.8"
files = [
{file = "nonebot_adapter_satori-0.7.0-py3-none-any.whl", hash = "sha256:14a6e846c0f4e46077c28ca403e5f16d18162b63b6aa4eb68f9e6f52153ab138"},
{file = "nonebot_adapter_satori-0.7.0.tar.gz", hash = "sha256:0f62b87182359beaf477e77b12a0ca665273de7a24833695514ff18817b23618"},
{file = "nonebot_adapter_satori-0.8.0-py3-none-any.whl", hash = "sha256:25d9726a961b73b6900a4dba3a8ec4f5d228a6cb96078b542379c9d6c9d3cefe"},
{file = "nonebot_adapter_satori-0.8.0.tar.gz", hash = "sha256:267c5b708ec2dd77b405d3ec779e817506e6d6914cc06688b529b43fe4288727"},
]
[package.dependencies]
@@ -878,13 +894,13 @@ nonebot2 = ">=2.1.0"
[[package]]
name = "nonebot-plugin-alconna"
version = "0.33.3"
version = "0.33.6"
description = "Alconna Adapter for Nonebot"
optional = false
python-versions = ">=3.8"
files = [
{file = "nonebot_plugin_alconna-0.33.3-py3-none-any.whl", hash = "sha256:f04baa7b97a431c815b070bf12abfd5590d2763531eec4464798af2a1e907608"},
{file = "nonebot_plugin_alconna-0.33.3.tar.gz", hash = "sha256:8be755008fcd7277b23f9594135c9f5c2de3bf0856cede60f5baee6f22053afc"},
{file = "nonebot_plugin_alconna-0.33.6-py3-none-any.whl", hash = "sha256:89989dd6dc1bccd8a1603159b57e9e96da438eb0bbbdbbcd2bfcf747594e12c3"},
{file = "nonebot_plugin_alconna-0.33.6.tar.gz", hash = "sha256:54e761ef621c8285cd9ea2f81324f9f52f6fb6292d3ddb87387dbdcaeaa8dc1b"},
]
[package.dependencies]
@@ -926,13 +942,13 @@ typing-extensions = ">=4.0.0"
[[package]]
name = "nonebot-plugin-orm"
version = "0.5.1"
version = "0.6.0"
description = "SQLAlchemy ORM support for nonebot"
optional = false
python-versions = "<4.0,>=3.8"
files = [
{file = "nonebot_plugin_orm-0.5.1-py3-none-any.whl", hash = "sha256:5f1f74aed93a56ea3743b40b9a6b4ffb1d53e6359928f51ef7ffad1ae3d83181"},
{file = "nonebot_plugin_orm-0.5.1.tar.gz", hash = "sha256:7e3f84fcc932f28f4c235bf2f1bd26da11988f4245bc499dfb781d379816b2d9"},
{file = "nonebot_plugin_orm-0.6.0-py3-none-any.whl", hash = "sha256:482f35d102749eda93cd635608ec79e81718d080a80fa325c24d8fa7316f7d83"},
{file = "nonebot_plugin_orm-0.6.0.tar.gz", hash = "sha256:e1b4db52da60b53f33eac95bb4b9875d7000d71f47ec6a034aa908adde41c33e"},
]
[package.dependencies]
@@ -943,7 +959,7 @@ importlib-resources = {version = ">=6.1,<7.0", markers = "python_version < \"3.1
nonebot-plugin-localstore = ">=0.5,<1.0"
nonebot2 = ">=2.1,<3.0"
sqlalchemy = ">=2.0,<3.0"
typing-extensions = {version = ">=4.8,<5.0", markers = "python_version < \"3.12\""}
typing-extensions = {version = ">=4.8,<5.0", markers = "python_version < \"3.11\""}
[package.extras]
aiomysql = ["aiomysql (>=0.2,<1.0)"]
@@ -1325,28 +1341,28 @@ files = [
[[package]]
name = "ruff"
version = "0.1.5"
version = "0.1.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.1.5-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:32d47fc69261c21a4c48916f16ca272bf2f273eb635d91c65d5cd548bf1f3d96"},
{file = "ruff-0.1.5-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:171276c1df6c07fa0597fb946139ced1c2978f4f0b8254f201281729981f3c17"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ef33cd0bb7316ca65649fc748acc1406dfa4da96a3d0cde6d52f2e866c7b39"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b2c205827b3f8c13b4a432e9585750b93fd907986fe1aec62b2a02cf4401eee6"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb408e3a2ad8f6881d0f2e7ad70cddb3ed9f200eb3517a91a245bbe27101d379"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f20dc5e5905ddb407060ca27267c7174f532375c08076d1a953cf7bb016f5a24"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aafb9d2b671ed934998e881e2c0f5845a4295e84e719359c71c39a5363cccc91"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4894dddb476597a0ba4473d72a23151b8b3b0b5f958f2cf4d3f1c572cdb7af7"},
{file = "ruff-0.1.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00a7ec893f665ed60008c70fe9eeb58d210e6b4d83ec6654a9904871f982a2a"},
{file = "ruff-0.1.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a8c11206b47f283cbda399a654fd0178d7a389e631f19f51da15cbe631480c5b"},
{file = "ruff-0.1.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fa29e67b3284b9a79b1a85ee66e293a94ac6b7bb068b307a8a373c3d343aa8ec"},
{file = "ruff-0.1.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9b97fd6da44d6cceb188147b68db69a5741fbc736465b5cea3928fdac0bc1aeb"},
{file = "ruff-0.1.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:721f4b9d3b4161df8dc9f09aa8562e39d14e55a4dbaa451a8e55bdc9590e20f4"},
{file = "ruff-0.1.5-py3-none-win32.whl", hash = "sha256:f80c73bba6bc69e4fdc73b3991db0b546ce641bdcd5b07210b8ad6f64c79f1ab"},
{file = "ruff-0.1.5-py3-none-win_amd64.whl", hash = "sha256:c21fe20ee7d76206d290a76271c1af7a5096bc4c73ab9383ed2ad35f852a0087"},
{file = "ruff-0.1.5-py3-none-win_arm64.whl", hash = "sha256:82bfcb9927e88c1ed50f49ac6c9728dab3ea451212693fe40d08d314663e412f"},
{file = "ruff-0.1.5.tar.gz", hash = "sha256:5cbec0ef2ae1748fb194f420fb03fb2c25c3258c86129af7172ff8f198f125ab"},
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:88b8cdf6abf98130991cbc9f6438f35f6e8d41a02622cc5ee130a02a0ed28703"},
{file = "ruff-0.1.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5c549ed437680b6105a1299d2cd30e4964211606eeb48a0ff7a93ef70b902248"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cf5f701062e294f2167e66d11b092bba7af6a057668ed618a9253e1e90cfd76"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05991ee20d4ac4bb78385360c684e4b417edd971030ab12a4fbd075ff535050e"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87455a0c1f739b3c069e2f4c43b66479a54dea0276dd5d4d67b091265f6fd1dc"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:683aa5bdda5a48cb8266fcde8eea2a6af4e5700a392c56ea5fb5f0d4bfdc0240"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:137852105586dcbf80c1717facb6781555c4e99f520c9c827bd414fac67ddfb6"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd98138a98d48a1c36c394fd6b84cd943ac92a08278aa8ac8c0fdefcf7138f35"},
{file = "ruff-0.1.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0cd909d25f227ac5c36d4e7e681577275fb74ba3b11d288aff7ec47e3ae745"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e8fd1c62a47aa88a02707b5dd20c5ff20d035d634aa74826b42a1da77861b5ff"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:fd89b45d374935829134a082617954120d7a1470a9f0ec0e7f3ead983edc48cc"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:491262006e92f825b145cd1e52948073c56560243b55fb3b4ecb142f6f0e9543"},
{file = "ruff-0.1.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ea284789861b8b5ca9d5443591a92a397ac183d4351882ab52f6296b4fdd5462"},
{file = "ruff-0.1.6-py3-none-win32.whl", hash = "sha256:1610e14750826dfc207ccbcdd7331b6bd285607d4181df9c1c6ae26646d6848a"},
{file = "ruff-0.1.6-py3-none-win_amd64.whl", hash = "sha256:4558b3e178145491e9bc3b2ee3c4b42f19d19384eaa5c59d10acf6e8f8b57e33"},
{file = "ruff-0.1.6-py3-none-win_arm64.whl", hash = "sha256:03910e81df0d8db0e30050725a5802441c2022ea3ae4fe0609b76081731accbc"},
{file = "ruff-0.1.6.tar.gz", hash = "sha256:1b09f29b16c6ead5ea6b097ef2764b42372aebe363722f1605ecbcd2b9207184"},
]
[[package]]
@@ -2108,4 +2124,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "90fd550637cb1f381015128bbdeb8e644ab56ca493ed1751bead31a75999b2de"
content-hash = "e523f1239c9c0373499c2ee7a9c04a7400334ea94bec5105a8497bdb7da29f98"

View File

@@ -1,6 +1,6 @@
[tool.poetry]
name = 'nonebot-plugin-tetris-stats'
version = '1.0.0.a7'
version = '1.0.0.a8'
description = '一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件'
authors = ['scdhh <wallfjjd@gmail.com>']
readme = 'README.md'
@@ -16,23 +16,24 @@ pandas = '>=1.4.3,<3.0.0'
playwright = '^1.24.1'
ujson = '^5.4.0'
aiofiles = "^23.2.1"
nonebot-plugin-orm = ">=0.1.1,<0.6.0"
nonebot-plugin-orm = ">=0.1.1,<0.7.0"
nonebot-plugin-localstore = "^0.5.1"
httpx = "^0.25.0"
nonebot-plugin-alconna = ">=0.30,<0.34"
nonebot-plugin-apscheduler = "^0.3.0"
aiocache = "^0.12.2"
[tool.poetry.group.dev.dependencies]
mypy = '>=0.991,<1.8'
types-ujson = '^5.7.0'
pandas-stubs = '>=1.5.2,<3.0.0'
ruff = '>=0.0.239,<0.1.6'
ruff = '>=0.0.239,<0.1.7'
types-aiofiles = "^23.2.0.0"
nonebot2 = { extras = ["fastapi"], version = "^2.1.1" }
types-lxml = "^2023.3.28"
nonebot-plugin-orm = { extras = ["default"], version = ">=0.3,<0.6" }
nonebot-plugin-orm = { extras = ["default"], version = ">=0.3,<0.7" }
nonebot-adapter-onebot = "^2.3.1"
nonebot-adapter-satori = "^0.7.0"
nonebot-adapter-satori = "^0.8.0"
[tool.poetry.group.debug.dependencies]
objprint = '^0.2.2'