Compare commits

...

10 Commits

Author SHA1 Message Date
144c223fe9 🔖 1.10.2
Some checks failed
Code Coverage / Test (macos-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.12) (push) Has been cancelled
TypeCheck / TypeCheck (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
Code Coverage / check (push) Has been cancelled
2025-07-19 22:40:47 +08:00
呵呵です
52a6d95434 🐛 移除 julianday 的使用,兼容更多数据库 (#550) 2025-07-19 22:40:04 +08:00
d8255756ca 🔖 1.10.1 2025-07-19 19:55:54 +08:00
呵呵です
13c6d53b6a 🐛 修改用户唯一标识符字段长度 (#549) 2025-07-19 19:54:40 +08:00
6493aba7e0 🔖 1.10.0
Some checks failed
Code Coverage / Test (macos-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.12) (push) Has been cancelled
TypeCheck / TypeCheck (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
Code Coverage / check (push) Has been cancelled
2025-07-18 05:53:41 +08:00
pre-commit-ci[bot]
b82053be11 ⬆️ auto update by pre-commit hooks (#548)
* ⬆️ auto update by pre-commit hooks

updates:
- [github.com/astral-sh/ruff-pre-commit: v0.11.13 → v0.12.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.13...v0.12.3)

* 🚨 auto fix by pre-commit hooks

* ⬆️ Upgrade dependency ruff to v0.12.4

* 🚨 修复 lint 警告

* 🚨 添加一个 noqa(

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: 呵呵です <51957264+shoucandanghehe@users.noreply.github.com>
Co-authored-by: shoucandanghehe <wallfjjd@gmail.com>
2025-07-17 21:49:16 +00:00
renovate[bot]
11bc486420 ⬆️ Upgrade dependency prettier to v3.6.2 (#547)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-17 21:35:20 +00:00
呵呵です
9916902c10 🐛 修复 postgresql 标识符大于63字符的错误 (#545)
* 🗃️ 自定义表名

*  添加开发依赖 nonebot-plugin-orm[postgresql]

* 🗃️ postgresql 跳过所有旧迁移脚本

* 🗃️ 修正方言

* 🗃️ 添加迁移脚本

* 🚨 auto fix by pre-commit hooks

* 🚨 添加一个 noqa(

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
2025-07-18 05:32:50 +08:00
pre-commit-ci[bot]
e347b41ba6 ⬆️ auto update by pre-commit hooks (#546)
Some checks failed
Code Coverage / Test (macos-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.12) (push) Has been cancelled
TypeCheck / TypeCheck (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
Code Coverage / check (push) Has been cancelled
updates:
- [github.com/astral-sh/ruff-pre-commit: v0.11.12 → v0.11.13](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.12...v0.11.13)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
2025-06-10 20:28:52 +08:00
pre-commit-ci[bot]
40d0bf06bb ⬆️ auto update by pre-commit hooks (#544)
Some checks failed
Code Coverage / Test (macos-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (macos-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (ubuntu-latest, 3.12) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.10) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.11) (push) Has been cancelled
Code Coverage / Test (windows-latest, 3.12) (push) Has been cancelled
TypeCheck / TypeCheck (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
Code Coverage / check (push) Has been cancelled
updates:
- [github.com/astral-sh/ruff-pre-commit: v0.11.11 → v0.11.12](https://github.com/astral-sh/ruff-pre-commit/compare/v0.11.11...v0.11.12)

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
2025-06-07 19:32:27 +08:00
43 changed files with 2578 additions and 1177 deletions

View File

@@ -7,7 +7,7 @@ ci:
autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks' autoupdate_commit_msg: ':arrow_up: auto update by pre-commit hooks'
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.11 rev: v0.12.4
hooks: hooks:
- id: ruff - id: ruff
args: [--fix, --exit-non-zero-on-fix] args: [--fix, --exit-non-zero-on-fix]

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.alter_column('create_time', new_column_name='update_time', existing_type=sa.DateTime()) batch_op.alter_column('create_time', new_column_name='update_time', existing_type=sa.DateTime())
@@ -41,6 +43,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.alter_column('update_time', new_column_name='create_time') batch_op.alter_column('update_time', new_column_name='create_time')

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.add_column(sa.Column('file_hash', sa.String(length=128), nullable=True)) batch_op.add_column(sa.Column('file_hash', sa.String(length=128), nullable=True))
@@ -38,6 +40,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_file_hash'))

View File

@@ -0,0 +1,52 @@
"""modify field length
迁移 ID: 3588702dd3a4
父迁移: bc6abd57928f
创建时间: 2025-07-19 17:21:17.927162
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '3588702dd3a4'
down_revision: str | Sequence[str] | None = 'bc6abd57928f'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.alter_column(
'user_unique_identifier',
existing_type=sa.VARCHAR(length=24),
type_=sa.String(length=256),
existing_nullable=False,
)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.alter_column(
'user_unique_identifier',
existing_type=sa.String(length=256),
type_=sa.VARCHAR(length=24),
existing_nullable=False,
)
# ### end Alembic commands ###

View File

@@ -16,7 +16,6 @@ from alembic import op
from nonebot.log import logger from nonebot.log import logger
from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn from rich.progress import BarColumn, MofNCompleteColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn
from sqlalchemy import desc, select from sqlalchemy import desc, select
from sqlalchemy.dialects import sqlite
from sqlalchemy.ext.automap import automap_base from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
@@ -30,7 +29,7 @@ depends_on: str | Sequence[str] | None = None
def migrate_old_data() -> None: # noqa: C901 def migrate_old_data() -> None: # noqa: C901
from json import dumps, loads from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806 Base = automap_base() # noqa: N806
Base.prepare(autoload_with=op.get_bind()) Base.prepare(autoload_with=op.get_bind())
@@ -109,6 +108,8 @@ def migrate_old_data() -> None: # noqa: C901
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_tetriohistoricaldata', 'nonebot_plugin_tetris_stats_tetriohistoricaldata',
@@ -219,23 +220,25 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_historicaldata', 'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DATETIME(), nullable=False), sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True), sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.VARCHAR(), nullable=True), sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True), sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.VARCHAR(), nullable=True), sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.BLOB(), nullable=True), sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False), sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False), sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sqlite.JSON(), nullable=False), sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', sqlite.JSON(), nullable=False), sa.Column('game_user', sa.JSON(), nullable=False),
sa.Column('processed_data', sqlite.JSON(), nullable=False), sa.Column('processed_data', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DATETIME(), nullable=False), sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.Column('user_unique_identifier', sa.VARCHAR(length=32), nullable=False), sa.Column('user_unique_identifier', sa.String(length=32), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'), sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
) )
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:

View File

@@ -0,0 +1,82 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguestats
迁移 ID: 3d900bb0e8d4
父迁移: 405c6936a164
创建时间: 2025-07-18 02:22:03.771903
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '3d900bb0e8d4'
down_revision: str | Sequence[str] | None = '405c6936a164'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguestats' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguestats # noqa: N806
New = Base.classes.nb_t_io_tl_stats # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -0,0 +1,85 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguehistorical
迁移 ID: 405c6936a164
父迁移: bbbdfd94e6fa
创建时间: 2025-07-18 01:55:27.406032
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '405c6936a164'
down_revision: str | Sequence[str] | None = 'bbbdfd94e6fa'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguehistorical' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguehistorical # noqa: N806
New = Base.classes.nb_t_io_tl_hist # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
request_id=i.request_id,
data=i.data,
update_time=i.update_time,
stats_id=i.stats_id,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_tetrioleaguestats', 'nonebot_plugin_tetris_stats_tetrioleaguestats',
@@ -102,6 +104,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_tetrioleaguestatsfield', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_tetrioleaguestatsfield_rank'))

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_triggerhistoricaldatav2', 'nonebot_plugin_tetris_stats_triggerhistoricaldatav2',
sa.Column('id', sa.Integer(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
@@ -53,6 +55,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldatav2', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_triggerhistoricaldatav2', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_game_platform')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_game_platform'))
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_command_type')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_triggerhistoricaldatav2_command_type'))

View File

@@ -26,7 +26,9 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
from json import dumps, loads if op.get_bind().dialect.name == 'postgresql':
return
from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806 Base = automap_base() # noqa: N806
connection = op.get_bind() connection = op.get_bind()
@@ -50,7 +52,9 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
from json import dumps, loads if op.get_bind().dialect.name == 'postgresql':
return
from json import dumps, loads # noqa: PLC0415
Base = automap_base() # noqa: N806 Base = automap_base() # noqa: N806
connection = op.get_bind() connection = op.get_bind()

View File

@@ -45,7 +45,10 @@ def data_migrate() -> None:
return return
try: try:
from nonebot_session_to_uninfo import check_tables, get_id_map # type: ignore[import-untyped] from nonebot_session_to_uninfo import ( # type: ignore[import-untyped] # noqa: PLC0415
check_tables,
get_id_map,
)
except ImportError as err: except ImportError as err:
msg = '请安装 `nonebot-session-to-uninfo` 以迁移数据' msg = '请安装 `nonebot-session-to-uninfo` 以迁移数据'
raise ValueError(msg) from err raise ValueError(msg) from err
@@ -105,9 +108,13 @@ def data_migrate() -> None:
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
data_migrate() data_migrate()
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -0,0 +1,94 @@
"""migrate nonebot_plugin_tetris_stats_tetrioleaguestatsfield
迁移 ID: 8459b2a4b7a3
父迁移: 3d900bb0e8d4
创建时间: 2025-07-18 02:24:59.560252
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = '8459b2a4b7a3'
down_revision: str | Sequence[str] | None = '3d900bb0e8d4'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetrioleaguestatsfield' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetrioleaguestatsfield # noqa: N806
New = Base.classes.nb_t_io_tl_stats_field # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
rank=i.rank,
tr_line=i.tr_line,
player_count=i.player_count,
low_pps=i.low_pps,
low_apm=i.low_apm,
low_vs=i.low_vs,
avg_pps=i.avg_pps,
avg_apm=i.avg_apm,
avg_vs=i.avg_vs,
high_pps=i.high_pps,
high_apm=i.high_apm,
high_vs=i.high_vs,
stats_id=i.stats_id,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -28,10 +28,12 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: # noqa: C901 def upgrade(name: str = '') -> None: # noqa: C901
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
from nonebot.compat import PYDANTIC_V2, type_validate_json from nonebot.compat import PYDANTIC_V2, type_validate_json # noqa: PLC0415
from pydantic import BaseModel, ValidationError from pydantic import BaseModel, ValidationError # noqa: PLC0415
from rich.progress import ( from rich.progress import ( # noqa: PLC0415
BarColumn, BarColumn,
MofNCompleteColumn, MofNCompleteColumn,
Progress, Progress,
@@ -58,14 +60,14 @@ def upgrade(name: str = '') -> None: # noqa: C901
logger.info('空表, 跳过') logger.info('空表, 跳过')
return return
from nonebot_plugin_tetris_stats.version import __version__ from nonebot_plugin_tetris_stats.version import __version__ # noqa: PLC0415
if __version__ != '1.0.3': if __version__ != '1.0.3':
msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移' msg = '本迁移需要1.0.3版本, 请先锁定版本至1.0.3版本再执行本迁移'
logger.critical(msg) logger.critical(msg)
raise RuntimeError(msg) raise RuntimeError(msg)
from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped] from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped] # noqa: PLC0415
BaseProcessedData, BaseProcessedData,
) )
@@ -101,3 +103,5 @@ def upgrade(name: str = '') -> None: # noqa: C901
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_bind', 'nonebot_plugin_tetris_stats_bind',
@@ -122,6 +124,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_rank')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_iorank_rank'))

View File

@@ -63,6 +63,8 @@ def migrate_old_data(connection: Connection) -> None:
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
try: try:
db_path = Path(config.db_url) db_path = Path(config.db_url)
except AttributeError: except AttributeError:
@@ -91,3 +93,5 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -12,7 +12,6 @@ from typing import TYPE_CHECKING
import sqlalchemy as sa import sqlalchemy as sa
from alembic import op from alembic import op
from sqlalchemy.dialects import sqlite
if TYPE_CHECKING: if TYPE_CHECKING:
from collections.abc import Sequence from collections.abc import Sequence
@@ -26,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type') batch_op.drop_index('ix_nonebot_plugin_tetris_stats_historicaldata_command_type')
@@ -71,6 +72,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_source_type'))
@@ -82,19 +85,19 @@ def downgrade(name: str = '') -> None:
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_historicaldata', 'nonebot_plugin_tetris_stats_historicaldata',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DATETIME(), nullable=False), sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('bot_platform', sa.VARCHAR(length=32), nullable=True), sa.Column('bot_platform', sa.String(length=32), nullable=True),
sa.Column('bot_account', sa.VARCHAR(), nullable=True), sa.Column('bot_account', sa.String(), nullable=True),
sa.Column('source_type', sa.VARCHAR(length=32), nullable=True), sa.Column('source_type', sa.String(length=32), nullable=True),
sa.Column('source_account', sa.VARCHAR(), nullable=True), sa.Column('source_account', sa.String(), nullable=True),
sa.Column('message', sa.BLOB(), nullable=True), sa.Column('message', sa.PickleType(), nullable=True),
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False), sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.VARCHAR(length=16), nullable=False), sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sqlite.JSON(), nullable=False), sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('game_user', sa.BLOB(), nullable=False), sa.Column('game_user', sa.PickleType(), nullable=False),
sa.Column('processed_data', sa.BLOB(), nullable=False), sa.Column('processed_data', sa.PickleType(), nullable=False),
sa.Column('finish_time', sa.DATETIME(), nullable=False), sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'), sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_historicaldata'),
) )
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_tetriouserconfig', 'nonebot_plugin_tetris_stats_tetriouserconfig',
@@ -39,6 +41,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
op.drop_table('nonebot_plugin_tetris_stats_tetriouserconfig') op.drop_table('nonebot_plugin_tetris_stats_tetriouserconfig')
# ### end Alembic commands ### # ### end Alembic commands ###

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_account') batch_op.drop_index('ix_nonebot_plugin_tetris_stats_bind_chat_account')
@@ -49,6 +51,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_bind_user_id'))
@@ -57,11 +61,11 @@ def downgrade(name: str = '') -> None:
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_bind', 'nonebot_plugin_tetris_stats_bind',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('chat_platform', sa.VARCHAR(length=32), nullable=False), sa.Column('chat_platform', sa.String(length=32), nullable=False),
sa.Column('chat_account', sa.VARCHAR(), nullable=False), sa.Column('chat_account', sa.String(), nullable=False),
sa.Column('game_platform', sa.VARCHAR(length=32), nullable=False), sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('game_account', sa.VARCHAR(), nullable=False), sa.Column('game_account', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_bind'), sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_bind'),
) )
with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_bind', schema=None) as batch_op:

View File

@@ -0,0 +1,215 @@
"""create new tables
迁移 ID: b2075a5ce371
父迁移: 766cc7e75a62
创建时间: 2025-07-17 22:57:32.245327
"""
from __future__ import annotations
from typing import TYPE_CHECKING
import sqlalchemy as sa
from alembic import op
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b2075a5ce371'
down_revision: str | Sequence[str] | None = '766cc7e75a62'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'nb_t_bind',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('game_account', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_bind')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_bind', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_bind_user_id'), ['user_id'], unique=False)
op.create_table(
'nb_t_io_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=32), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_io_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_io_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_io_tl_stats',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_stats')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_stats', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_stats_update_time'), ['update_time'], unique=False)
op.create_table(
'nb_t_io_u_cfg',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('query_template', sa.String(length=2), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_u_cfg')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
op.create_table(
'nb_t_top_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_top_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_top_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_top_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_top_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_top_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_tos_hist_data',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_unique_identifier', sa.String(length=24), nullable=False),
sa.Column('api_type', sa.String(length=16), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_tos_hist_data')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_tos_hist_data_api_type'), ['api_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_tos_hist_data_update_time'), ['update_time'], unique=False)
batch_op.create_index(
batch_op.f('ix_nb_t_tos_hist_data_user_unique_identifier'), ['user_unique_identifier'], unique=False
)
op.create_table(
'nb_t_trigger_hist_v2',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('trigger_time', sa.DateTime(), nullable=False),
sa.Column('session_persist_id', sa.Integer(), nullable=False),
sa.Column('game_platform', sa.String(length=32), nullable=False),
sa.Column('command_type', sa.String(length=16), nullable=False),
sa.Column('command_args', sa.JSON(), nullable=False),
sa.Column('finish_time', sa.DateTime(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_trigger_hist_v2')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_trigger_hist_v2', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_trigger_hist_v2_command_type'), ['command_type'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_trigger_hist_v2_game_platform'), ['game_platform'], unique=False)
op.create_table(
'nb_t_io_tl_hist',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('request_id', sa.Uuid(), nullable=False),
sa.Column('data', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'], ['nb_t_io_tl_stats.id'], name=op.f('fk_nb_t_io_tl_hist_stats_id_nb_t_io_tl_stats')
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_hist')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_hist', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_hist_request_id'), ['request_id'], unique=False)
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_hist_update_time'), ['update_time'], unique=False)
op.create_table(
'nb_t_io_tl_stats_field',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rank', sa.String(length=2), nullable=False),
sa.Column('tr_line', sa.Float(), nullable=False),
sa.Column('player_count', sa.Integer(), nullable=False),
sa.Column('low_pps', sa.JSON(), nullable=False),
sa.Column('low_apm', sa.JSON(), nullable=False),
sa.Column('low_vs', sa.JSON(), nullable=False),
sa.Column('avg_pps', sa.Float(), nullable=False),
sa.Column('avg_apm', sa.Float(), nullable=False),
sa.Column('avg_vs', sa.Float(), nullable=False),
sa.Column('high_pps', sa.JSON(), nullable=False),
sa.Column('high_apm', sa.JSON(), nullable=False),
sa.Column('high_vs', sa.JSON(), nullable=False),
sa.Column('stats_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['stats_id'], ['nb_t_io_tl_stats.id'], name=op.f('fk_nb_t_io_tl_stats_field_stats_id_nb_t_io_tl_stats')
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_nb_t_io_tl_stats_field')),
info={'bind_key': 'nonebot_plugin_tetris_stats'},
)
with op.batch_alter_table('nb_t_io_tl_stats_field', schema=None) as batch_op:
batch_op.create_index(batch_op.f('ix_nb_t_io_tl_stats_field_rank'), ['rank'], unique=False)
# ### end Alembic commands ###
def downgrade(name: str = '') -> None:
if name:
return
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nb_t_io_tl_stats_field', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_stats_field_rank'))
op.drop_table('nb_t_io_tl_stats_field')
with op.batch_alter_table('nb_t_io_tl_hist', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_hist_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_hist_request_id'))
op.drop_table('nb_t_io_tl_hist')
with op.batch_alter_table('nb_t_trigger_hist_v2', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_trigger_hist_v2_game_platform'))
batch_op.drop_index(batch_op.f('ix_nb_t_trigger_hist_v2_command_type'))
op.drop_table('nb_t_trigger_hist_v2')
with op.batch_alter_table('nb_t_tos_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_tos_hist_data_api_type'))
op.drop_table('nb_t_tos_hist_data')
with op.batch_alter_table('nb_t_top_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_top_hist_data_api_type'))
op.drop_table('nb_t_top_hist_data')
op.drop_table('nb_t_io_u_cfg')
with op.batch_alter_table('nb_t_io_tl_stats', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_tl_stats_update_time'))
op.drop_table('nb_t_io_tl_stats')
with op.batch_alter_table('nb_t_io_hist_data', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_user_unique_identifier'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_update_time'))
batch_op.drop_index(batch_op.f('ix_nb_t_io_hist_data_api_type'))
op.drop_table('nb_t_io_hist_data')
with op.batch_alter_table('nb_t_bind', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nb_t_bind_user_id'))
op.drop_table('nb_t_bind')
# ### end Alembic commands ###

View File

@@ -23,13 +23,15 @@ branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None: # noqa: C901
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
from nonebot.compat import type_validate_json from nonebot.compat import type_validate_json # noqa: PLC0415
from pydantic import ValidationError from pydantic import ValidationError # noqa: PLC0415
from rich.progress import ( from rich.progress import ( # noqa: PLC0415
BarColumn, BarColumn,
MofNCompleteColumn, MofNCompleteColumn,
Progress, Progress,
@@ -37,9 +39,9 @@ def upgrade(name: str = '') -> None:
TextColumn, TextColumn,
TimeRemainingColumn, TimeRemainingColumn,
) )
from sqlalchemy import select from sqlalchemy import select # noqa: PLC0415
from sqlalchemy.ext.automap import automap_base from sqlalchemy.ext.automap import automap_base # noqa: PLC0415
from sqlalchemy.orm import Session from sqlalchemy.orm import Session # noqa: PLC0415
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True)) batch_op.add_column(sa.Column('user_unique_identifier', sa.String(length=32), nullable=True))
@@ -58,13 +60,15 @@ def upgrade(name: str = '') -> None:
if count == 0: if count == 0:
logger.info('空表, 跳过') logger.info('空表, 跳过')
else: else:
from nonebot_plugin_tetris_stats.version import __version__ from nonebot_plugin_tetris_stats.version import __version__ # noqa: PLC0415
if __version__ != '1.0.4': if __version__ != '1.0.4':
msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移' msg = '本迁移需要1.0.4版本, 请先锁定版本至1.0.4版本再执行本迁移'
logger.critical(msg) logger.critical(msg)
raise RuntimeError(msg) raise RuntimeError(msg)
from nonebot_plugin_tetris_stats.game_data_processor.schemas import BaseUser # type: ignore[import-untyped] from nonebot_plugin_tetris_stats.game_data_processor.schemas import ( # type: ignore[import-untyped] # noqa: PLC0415
BaseUser,
)
models: list[type[BaseUser]] = BaseUser.__subclasses__() models: list[type[BaseUser]] = BaseUser.__subclasses__()
@@ -103,6 +107,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_historicaldata', schema=None) as batch_op:
batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier')) batch_op.drop_index(batch_op.f('ix_nonebot_plugin_tetris_stats_historicaldata_user_unique_identifier'))

View File

@@ -0,0 +1,82 @@
"""migrate nonebot_plugin_tetris_stats_tetriouserconfig
迁移 ID: b96c8c18b79a
父迁移: 8459b2a4b7a3
创建时间: 2025-07-18 04:25:44.190319
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'b96c8c18b79a'
down_revision: str | Sequence[str] | None = '8459b2a4b7a3'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetriouserconfig' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetriouserconfig # noqa: N806
New = Base.classes.nb_t_io_u_cfg # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
query_template=i.query_template,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -26,6 +26,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
Base = automap_base() # noqa: N806 Base = automap_base() # noqa: N806
connection = op.get_bind() connection = op.get_bind()
@@ -40,3 +42,5 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return

View File

@@ -0,0 +1,85 @@
"""migrate nonebot_plugin_tetris_stats_tetriohistoricaldata
迁移 ID: bbbdfd94e6fa
父迁移: d61e6ae36586
创建时间: 2025-07-18 00:42:33.730885
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'bbbdfd94e6fa'
down_revision: str | Sequence[str] | None = 'd61e6ae36586'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tetriohistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tetriohistoricaldata # noqa: N806
New = Base.classes.nb_t_io_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -0,0 +1,87 @@
"""migrate nonebot_plugin_tetris_stats_triggerhistoricaldatav2
迁移 ID: bc6abd57928f
父迁移: ee76ae37d70a
创建时间: 2025-07-18 04:33:04.222045
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'bc6abd57928f'
down_revision: str | Sequence[str] | None = 'ee76ae37d70a'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_triggerhistoricaldatav2' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_triggerhistoricaldatav2 # noqa: N806
New = Base.classes.nb_t_trigger_hist_v2 # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
trigger_time=i.trigger_time,
session_persist_id=i.session_persist_id,
game_platform=i.game_platform,
command_type=i.command_type,
command_args=i.command_args,
finish_time=i.finish_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -0,0 +1,85 @@
"""migrate nonebot_plugin_tetris_stats_tophistoricaldata
迁移 ID: ce073d279d19
父迁移: b96c8c18b79a
创建时间: 2025-07-18 04:28:13.820635
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'ce073d279d19'
down_revision: str | Sequence[str] | None = 'b96c8c18b79a'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_tophistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_tophistoricaldata # noqa: N806
New = Base.classes.nb_t_top_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -26,6 +26,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_tetriohistoricaldata', schema=None) as batch_op:
batch_op.alter_column( batch_op.alter_column(
@@ -38,6 +40,8 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
# ### commands auto generated by Alembic - please adjust! ### # ### commands auto generated by Alembic - please adjust! ###
logger.warning('新数据可能不支持降级!') logger.warning('新数据可能不支持降级!')
logger.warning('请确认数据库内数据可以迁移到旧版本!') logger.warning('请确认数据库内数据可以迁移到旧版本!')

View File

@@ -0,0 +1,84 @@
"""migrate nonebot_plugin_tetris_stats_bind
迁移 ID: d61e6ae36586
父迁移: b2075a5ce371
创建时间: 2025-07-17 23:58:13.408384
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'd61e6ae36586'
down_revision: str | Sequence[str] | None = 'b2075a5ce371'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_bind' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_bind # noqa: N806
New = Base.classes.nb_t_bind # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(100):
db_session.add(
New(
id=i.id,
user_id=i.user_id,
game_platform=i.game_platform,
game_account=i.game_account,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -0,0 +1,85 @@
"""migrate nonebot_plugin_tetris_stats_toshistoricaldata
迁移 ID: ee76ae37d70a
父迁移: ce073d279d19
创建时间: 2025-07-18 04:29:52.976624
"""
from __future__ import annotations
from typing import TYPE_CHECKING
from alembic import op
from nonebot.log import logger
from rich.progress import BarColumn, Progress, SpinnerColumn, TaskProgressColumn, TextColumn
from sqlalchemy import inspect
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
if TYPE_CHECKING:
from collections.abc import Sequence
revision: str = 'ee76ae37d70a'
down_revision: str | Sequence[str] | None = 'ce073d279d19'
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def data_migrate() -> None:
conn = op.get_bind()
insp = inspect(conn)
table_names = insp.get_table_names()
if 'nonebot_plugin_tetris_stats_toshistoricaldata' not in table_names:
return
Base = automap_base() # noqa: N806
Base.prepare(autoload_with=conn)
Old = Base.classes.nonebot_plugin_tetris_stats_toshistoricaldata # noqa: N806
New = Base.classes.nb_t_tos_hist_data # noqa: N806
with Session(conn) as db_session:
count = db_session.query(Old).count()
if count == 0:
return
logger.warning('tetris_stats: 正在迁移数据, 请不要关闭程序...')
with Progress(
SpinnerColumn(),
TextColumn('[progress.description]{task.description}'),
BarColumn(),
TaskProgressColumn(),
) as progress:
task = progress.add_task('迁移数据...', total=count)
for i in db_session.query(Old).yield_per(1):
db_session.add(
New(
id=i.id,
user_unique_identifier=i.user_unique_identifier,
api_type=i.api_type,
data=i.data,
update_time=i.update_time,
)
)
progress.update(task, advance=1)
if progress.tasks[task].completed % 100 == 0:
db_session.commit()
db_session.commit()
logger.success('tetris_stats: 数据迁移完成!')
def upgrade(name: str = '') -> None:
if name:
return
data_migrate()
def downgrade(name: str = '') -> None:
if name:
return

View File

@@ -25,6 +25,8 @@ depends_on: str | Sequence[str] | None = None
def upgrade(name: str = '') -> None: def upgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash') batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_file_hash')
batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_rank') batch_op.drop_index('ix_nonebot_plugin_tetris_stats_iorank_rank')
@@ -66,23 +68,25 @@ def upgrade(name: str = '') -> None:
def downgrade(name: str = '') -> None: def downgrade(name: str = '') -> None:
if name: if name:
return return
if op.get_bind().dialect.name == 'postgresql':
return
op.create_table( op.create_table(
'nonebot_plugin_tetris_stats_iorank', 'nonebot_plugin_tetris_stats_iorank',
sa.Column('id', sa.INTEGER(), nullable=False), sa.Column('id', sa.Integer(), nullable=False),
sa.Column('rank', sa.VARCHAR(length=2), nullable=False), sa.Column('rank', sa.String(length=2), nullable=False),
sa.Column('tr_line', sa.FLOAT(), nullable=False), sa.Column('tr_line', sa.Float(), nullable=False),
sa.Column('player_count', sa.INTEGER(), nullable=False), sa.Column('player_count', sa.Integer(), nullable=False),
sa.Column('low_pps', sa.JSON(), nullable=False), sa.Column('low_pps', sa.JSON(), nullable=False),
sa.Column('low_apm', sa.JSON(), nullable=False), sa.Column('low_apm', sa.JSON(), nullable=False),
sa.Column('low_vs', sa.JSON(), nullable=False), sa.Column('low_vs', sa.JSON(), nullable=False),
sa.Column('avg_pps', sa.FLOAT(), nullable=False), sa.Column('avg_pps', sa.Float(), nullable=False),
sa.Column('avg_apm', sa.FLOAT(), nullable=False), sa.Column('avg_apm', sa.Float(), nullable=False),
sa.Column('avg_vs', sa.FLOAT(), nullable=False), sa.Column('avg_vs', sa.Float(), nullable=False),
sa.Column('high_pps', sa.JSON(), nullable=False), sa.Column('high_pps', sa.JSON(), nullable=False),
sa.Column('high_apm', sa.JSON(), nullable=False), sa.Column('high_apm', sa.JSON(), nullable=False),
sa.Column('high_vs', sa.JSON(), nullable=False), sa.Column('high_vs', sa.JSON(), nullable=False),
sa.Column('update_time', sa.DATETIME(), nullable=False), sa.Column('update_time', sa.DateTime(), nullable=False),
sa.Column('file_hash', sa.VARCHAR(length=128), nullable=True), sa.Column('file_hash', sa.String(length=128), nullable=True),
sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_iorank'), sa.PrimaryKeyConstraint('id', name='pk_nonebot_plugin_tetris_stats_iorank'),
) )
with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op: with op.batch_alter_table('nonebot_plugin_tetris_stats_iorank', schema=None) as batch_op:

View File

@@ -65,6 +65,8 @@ class PydanticType(TypeDecorator):
class Bind(MappedAsDataclass, Model): class Bind(MappedAsDataclass, Model):
__tablename__ = 'nb_t_bind'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_id: Mapped[int] = mapped_column(index=True) user_id: Mapped[int] = mapped_column(index=True)
game_platform: Mapped[GameType] = mapped_column(String(32)) game_platform: Mapped[GameType] = mapped_column(String(32))
@@ -72,6 +74,8 @@ class Bind(MappedAsDataclass, Model):
class TriggerHistoricalDataV2(MappedAsDataclass, Model): class TriggerHistoricalDataV2(MappedAsDataclass, Model):
__tablename__ = 'nb_t_trigger_hist_v2'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
trigger_time: Mapped[datetime] = mapped_column(DateTime) trigger_time: Mapped[datetime] = mapped_column(DateTime)
session_persist_id: Mapped[int] session_persist_id: Mapped[int]

View File

@@ -22,3 +22,5 @@ class BaseUser(BaseModel, ABC, Generic[T]):
@abstractmethod @abstractmethod
def unique_identifier(self) -> str: def unique_identifier(self) -> str:
raise NotImplementedError raise NotImplementedError
__hash__ = BaseModel.__hash__

View File

@@ -11,6 +11,8 @@ from .typedefs import Records, Summaries
class TETRIOHistoricalData(MappedAsDataclass, Model): class TETRIOHistoricalData(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_hist_data'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True) user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
api_type: Mapped[Literal['User Info', Records, Summaries]] = mapped_column(String(32), index=True) api_type: Mapped[Literal['User Info', Records, Summaries]] = mapped_column(String(32), index=True)

View File

@@ -12,11 +12,15 @@ from .typedefs import Template
class TETRIOUserConfig(MappedAsDataclass, Model): class TETRIOUserConfig(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_u_cfg'
id: Mapped[int] = mapped_column(primary_key=True) id: Mapped[int] = mapped_column(primary_key=True)
query_template: Mapped[Template] = mapped_column(String(2)) query_template: Mapped[Template] = mapped_column(String(2))
class TETRIOLeagueStats(MappedAsDataclass, Model): class TETRIOLeagueStats(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_stats'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
raw: Mapped[list['TETRIOLeagueHistorical']] = relationship(back_populates='stats', lazy='noload') raw: Mapped[list['TETRIOLeagueHistorical']] = relationship(back_populates='stats', lazy='noload')
fields: Mapped[list['TETRIOLeagueStatsField']] = relationship(back_populates='stats') fields: Mapped[list['TETRIOLeagueStatsField']] = relationship(back_populates='stats')
@@ -24,11 +28,13 @@ class TETRIOLeagueStats(MappedAsDataclass, Model):
class TETRIOLeagueHistorical(MappedAsDataclass, Model): class TETRIOLeagueHistorical(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_hist'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
request_id: Mapped[UUID] = mapped_column(index=True) request_id: Mapped[UUID] = mapped_column(index=True)
data: Mapped[BySuccessModel] = mapped_column(PydanticType([], {BySuccessModel})) data: Mapped[BySuccessModel] = mapped_column(PydanticType([], {BySuccessModel}))
update_time: Mapped[datetime] = mapped_column(DateTime, index=True) update_time: Mapped[datetime] = mapped_column(DateTime, index=True)
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False) stats_id: Mapped[int] = mapped_column(ForeignKey('nb_t_io_tl_stats.id'), init=False)
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='raw') stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='raw')
@@ -36,6 +42,8 @@ entry_type = PydanticType([], {Entry})
class TETRIOLeagueStatsField(MappedAsDataclass, Model): class TETRIOLeagueStatsField(MappedAsDataclass, Model):
__tablename__ = 'nb_t_io_tl_stats_field'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
rank: Mapped[ValidRank] = mapped_column(String(2), index=True) rank: Mapped[ValidRank] = mapped_column(String(2), index=True)
tr_line: Mapped[float] tr_line: Mapped[float]
@@ -49,5 +57,5 @@ class TETRIOLeagueStatsField(MappedAsDataclass, Model):
high_pps: Mapped[Entry] = mapped_column(entry_type) high_pps: Mapped[Entry] = mapped_column(entry_type)
high_apm: Mapped[Entry] = mapped_column(entry_type) high_apm: Mapped[Entry] = mapped_column(entry_type)
high_vs: Mapped[Entry] = mapped_column(entry_type) high_vs: Mapped[Entry] = mapped_column(entry_type)
stats_id: Mapped[int] = mapped_column(ForeignKey('nonebot_plugin_tetris_stats_tetrioleaguestats.id'), init=False) stats_id: Mapped[int] = mapped_column(ForeignKey('nb_t_io_tl_stats.id'), init=False)
stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='fields') stats: Mapped['TETRIOLeagueStats'] = relationship(back_populates='fields')

View File

@@ -5,7 +5,7 @@ from nonebot_plugin_alconna import Option, Subcommand, UniMessage
from nonebot_plugin_orm import get_session from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import Uninfo from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id from nonebot_plugin_uninfo.orm import get_session_persist_id
from sqlalchemy import func, select from sqlalchemy import select
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from ....db import trigger from ....db import trigger
@@ -41,6 +41,7 @@ async def _(event_session: Uninfo, template: Template | None = None):
command_args=['--all'] + ([f'--template {template}'] if template is not None else []), command_args=['--all'] + ([f'--template {template}'] if template is not None else []),
): ):
async with get_session() as session: async with get_session() as session:
# 获取最新记录
latest_data = ( latest_data = (
await session.scalars( await session.scalars(
select(TETRIOLeagueStats) select(TETRIOLeagueStats)
@@ -49,19 +50,42 @@ async def _(event_session: Uninfo, template: Template | None = None):
.options(selectinload(TETRIOLeagueStats.fields)) .options(selectinload(TETRIOLeagueStats.fields))
) )
).one() ).one()
compare_data = (
await session.scalars( # 计算目标时间点 (24小时前)
target_time = latest_data.update_time - timedelta(hours=24)
# 查询目标时间点之前的最近记录
before = (
await session.scalar(
select(TETRIOLeagueStats) select(TETRIOLeagueStats)
.order_by( .where(TETRIOLeagueStats.update_time <= target_time)
func.abs( .order_by(TETRIOLeagueStats.update_time.desc())
func.julianday(TETRIOLeagueStats.update_time)
- func.julianday(latest_data.update_time - timedelta(hours=24))
)
)
.limit(1) .limit(1)
.options(selectinload(TETRIOLeagueStats.fields)) .options(selectinload(TETRIOLeagueStats.fields))
) )
).one() or latest_data
)
# 查询目标时间点之后的最近记录
after = (
await session.scalar(
select(TETRIOLeagueStats)
.where(TETRIOLeagueStats.update_time >= target_time) # 使用 >= 避免间隙
.order_by(TETRIOLeagueStats.update_time.asc())
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
or latest_data
)
# 确定最接近的记录
compare_data = (
before
if abs((target_time - before.update_time).total_seconds())
< abs((target_time - after.update_time).total_seconds())
else after
)
match template: match template:
case 'v1' | None: case 'v1' | None:
await UniMessage.image(raw=await make_image_v1(latest_data, compare_data)).finish() await UniMessage.image(raw=await make_image_v1(latest_data, compare_data)).finish()

View File

@@ -7,7 +7,7 @@ from nonebot_plugin_alconna import Option, UniMessage
from nonebot_plugin_orm import get_session from nonebot_plugin_orm import get_session
from nonebot_plugin_uninfo import Uninfo from nonebot_plugin_uninfo import Uninfo
from nonebot_plugin_uninfo.orm import get_session_persist_id from nonebot_plugin_uninfo.orm import get_session_persist_id
from sqlalchemy import func, select from sqlalchemy import select
from sqlalchemy.orm import selectinload from sqlalchemy.orm import selectinload
from ....db import trigger from ....db import trigger
@@ -39,6 +39,7 @@ async def _(rank: ValidRank, event_session: Uninfo):
command_args=[f'--detail {rank}'], command_args=[f'--detail {rank}'],
): ):
async with get_session() as session: async with get_session() as session:
# 获取最新记录
latest_data = ( latest_data = (
await session.scalars( await session.scalars(
select(TETRIOLeagueStats) select(TETRIOLeagueStats)
@@ -47,19 +48,41 @@ async def _(rank: ValidRank, event_session: Uninfo):
.options(selectinload(TETRIOLeagueStats.fields)) .options(selectinload(TETRIOLeagueStats.fields))
) )
).one() ).one()
compare_data = (
await session.scalars( # 计算目标时间点 (24小时前)
target_time = latest_data.update_time - timedelta(hours=24)
# 查询目标时间点之前的最近记录
before = (
await session.scalar(
select(TETRIOLeagueStats) select(TETRIOLeagueStats)
.order_by( .where(TETRIOLeagueStats.update_time <= target_time)
func.abs( .order_by(TETRIOLeagueStats.update_time.desc())
func.julianday(TETRIOLeagueStats.update_time)
- func.julianday(latest_data.update_time - timedelta(hours=24))
)
)
.limit(1) .limit(1)
.options(selectinload(TETRIOLeagueStats.fields)) .options(selectinload(TETRIOLeagueStats.fields))
) )
).one() or latest_data # 回退到最新记录
)
# 查询目标时间点之后的最近记录
after = (
await session.scalar(
select(TETRIOLeagueStats)
.where(TETRIOLeagueStats.update_time >= target_time)
.order_by(TETRIOLeagueStats.update_time.asc())
.limit(1)
.options(selectinload(TETRIOLeagueStats.fields))
)
or latest_data # 回退到最新记录
)
# 确定最接近的记录
compare_data = (
before
if abs((target_time - before.update_time).total_seconds())
< abs((target_time - after.update_time).total_seconds())
else after
)
await UniMessage.image( await UniMessage.image(
raw=await make_image( raw=await make_image(
rank, rank,

View File

@@ -10,6 +10,8 @@ from .schemas.user_profile import UserProfile
class TOPHistoricalData(MappedAsDataclass, Model): class TOPHistoricalData(MappedAsDataclass, Model):
__tablename__ = 'nb_t_top_hist_data'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True) user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True)
api_type: Mapped[Literal['User Profile']] = mapped_column(String(16), index=True) api_type: Mapped[Literal['User Profile']] = mapped_column(String(16), index=True)

View File

@@ -11,8 +11,10 @@ from .schemas.user_profile import UserProfile
class TOSHistoricalData(MappedAsDataclass, Model): class TOSHistoricalData(MappedAsDataclass, Model):
__tablename__ = 'nb_t_tos_hist_data'
id: Mapped[int] = mapped_column(init=False, primary_key=True) id: Mapped[int] = mapped_column(init=False, primary_key=True)
user_unique_identifier: Mapped[str] = mapped_column(String(24), index=True) user_unique_identifier: Mapped[str] = mapped_column(String(256), index=True)
api_type: Mapped[Literal['User Info', 'User Profile']] = mapped_column(String(16), index=True) api_type: Mapped[Literal['User Info', 'User Profile']] = mapped_column(String(16), index=True)
data: Mapped[UserInfoSuccess | UserProfile] = mapped_column( data: Mapped[UserInfoSuccess | UserProfile] = mapped_column(
PydanticType(get_model=[], models={UserInfoSuccess, UserProfile}) PydanticType(get_model=[], models={UserInfoSuccess, UserProfile})

View File

@@ -1,7 +1,7 @@
# This file is @generated by tarina.lang CLI tool # This file is @generated by tarina.lang CLI tool
# It is not intended for manual editing. # It is not intended for manual editing.
# ruff: noqa: E402, F401, PLC0414 # ruff: noqa: E402
from pathlib import Path from pathlib import Path

View File

@@ -43,8 +43,8 @@ class Piece(Enum):
) )
I5 = ( I5 = (
(True, True, True, True, True), # fmt: skip (True, True, True, True, True),
) ) # fmt: skip
V = ( V = (
(True, False, False), (True, False, False),

8
pnpm-lock.yaml generated
View File

@@ -10,15 +10,15 @@ importers:
devDependencies: devDependencies:
prettier: prettier:
specifier: ^3.3.3 specifier: ^3.3.3
version: 3.5.3 version: 3.6.2
packages: packages:
prettier@3.5.3: prettier@3.6.2:
resolution: {integrity: sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==} resolution: {integrity: sha512-I7AIg5boAr5R0FFtJ6rCfD+LFsWHp81dolrFD8S79U9tb8Az2nGrJncnMSnys+bpQJfRUzqs9hnA81OAA3hCuQ==}
engines: {node: '>=14'} engines: {node: '>=14'}
hasBin: true hasBin: true
snapshots: snapshots:
prettier@3.5.3: {} prettier@3.6.2: {}

View File

@@ -2,7 +2,7 @@
[project] [project]
name = "nonebot-plugin-tetris-stats" name = "nonebot-plugin-tetris-stats"
version = "1.9.0" version = "1.10.2"
description = "一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件" description = "一款基于 NoneBot2 的用于查询 Tetris 相关游戏数据的插件"
readme = "README.md" readme = "README.md"
authors = [{ name = "shoucandanghehe", email = "wallfjjd@gmail.com" }] authors = [{ name = "shoucandanghehe", email = "wallfjjd@gmail.com" }]
@@ -59,6 +59,7 @@ dev = [
"nonebot-adapter-kaiheila>=0.3.4", "nonebot-adapter-kaiheila>=0.3.4",
"nonebot-adapter-onebot>=2.4.6", "nonebot-adapter-onebot>=2.4.6",
"nonebot-adapter-qq>=1.5.3", "nonebot-adapter-qq>=1.5.3",
"nonebot-plugin-orm[postgresql]>=0.8.2",
"nonebot-plugin-tarina-lang-turbo>=0.1.1", "nonebot-plugin-tarina-lang-turbo>=0.1.1",
"ruff>=0.7.1", "ruff>=0.7.1",
] ]
@@ -161,7 +162,7 @@ defineConstant = { PYDANTIC_V2 = true }
typeCheckingMode = "standard" typeCheckingMode = "standard"
[tool.bumpversion] [tool.bumpversion]
current_version = "1.9.0" current_version = "1.10.2"
tag = true tag = true
sign_tags = true sign_tags = true
tag_name = "{new_version}" tag_name = "{new_version}"

View File

@@ -7,7 +7,7 @@ from tests.fake_event import FakeGroupMessageEvent
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_invalid_name(app: App) -> None: async def test_invalid_name(app: App) -> None:
from nonebot_plugin_tetris_stats.games import alc from nonebot_plugin_tetris_stats.games import alc # noqa: PLC0415
raw_message = 'tstats tetrio bind 芜湖' raw_message = 'tstats tetrio bind 芜湖'
message = Message(raw_message) message = Message(raw_message)

2390
uv.lock generated

File diff suppressed because it is too large Load Diff