♻️ 清理代码和依赖,并调整目录结构 (#181)

* 删除 alembic 依赖,并提高 datastore 依赖至 0.5.6 及以上

* 调整迁移脚本目录位置,同时删除不需要的 alembic 文件

* 删除 old_sched
This commit is contained in:
uy/sun 2023-02-01 17:51:39 +08:00 committed by GitHub
parent 3d9d3b435f
commit 840bfb1bbd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 12 additions and 245 deletions

21
poetry.lock generated
View File

@ -78,14 +78,14 @@ files = [
[[package]]
name = "apscheduler"
version = "3.9.1.post1"
version = "3.10.0"
description = "In-process task scheduler with Cron-like capabilities"
category = "main"
optional = false
python-versions = "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
python-versions = ">=3.6"
files = [
{file = "APScheduler-3.9.1.post1-py2.py3-none-any.whl", hash = "sha256:c8c618241dbb2785ed5a687504b14cb1851d6f7b5a4edf3a51e39cc6a069967a"},
{file = "APScheduler-3.9.1.post1.tar.gz", hash = "sha256:b2bea0309569da53a7261bfa0ce19c67ddbfe151bda776a6a907579fdbd3eb2a"},
{file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"},
{file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"},
]
[package.dependencies]
@ -95,14 +95,13 @@ six = ">=1.4.0"
tzlocal = ">=2.0,<3.0.0 || >=4.0.0"
[package.extras]
asyncio = ["trollius"]
doc = ["sphinx", "sphinx-rtd-theme"]
gevent = ["gevent"]
mongodb = ["pymongo (>=3.0)"]
redis = ["redis (>=3.0)"]
rethinkdb = ["rethinkdb (>=2.4.0)"]
sqlalchemy = ["sqlalchemy (>=0.8)"]
testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"]
sqlalchemy = ["sqlalchemy (>=1.4)"]
testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"]
tornado = ["tornado (>=4.3)"]
twisted = ["twisted"]
zookeeper = ["kazoo"]
@ -188,14 +187,14 @@ files = [
[[package]]
name = "beautifulsoup4"
version = "4.11.1"
version = "4.11.2"
description = "Screen-scraping library"
category = "main"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"},
{file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"},
{file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"},
{file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"},
]
[package.dependencies]
@ -2785,4 +2784,4 @@ multidict = ">=4.0"
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<4.0.0"
content-hash = "c1940bc7a8ca387d00a96f337aaf2743391b5930a3e286a63576c50f909df136"
content-hash = "753472a44dab408e42aa060752185f8c198fe9da9cd69a3f6fc8491ce273b8dd"

View File

@ -39,8 +39,7 @@ aiofiles = "^0.8.0"
python-socketio = "^5.4.0"
nonebot-adapter-onebot = "^2.0.0-beta.1"
nonebot-plugin-htmlrender = ">=0.2.0"
nonebot-plugin-datastore = "^0.5.2"
alembic = "^1.7.6"
nonebot-plugin-datastore = ">=0.5.6"
[tool.poetry.dev-dependencies]
ipdb = "^0.13.4"

View File

@ -8,7 +8,7 @@ from sqlmodel import JSON, Column, Field, Relationship, UniqueConstraint
from ..types import Category, Tag
Model = get_plugin_data().Model
get_plugin_data().set_migration_dir(Path(__file__).parent / "migrate" / "versions")
get_plugin_data().set_migration_dir(Path(__file__).parent / "migrations")
class User(Model, table=True):

View File

@ -1 +0,0 @@
Generic single-database configuration.

View File

@ -1,113 +0,0 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
from sqlalchemy.engine.base import Connection
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name:
fileConfig(config.config_file_name) # type:ignore
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
import nonebot
try:
nonebot.get_driver()
__as_plugin = True
target_metadata = None
except:
__as_plugin = False
nonebot.init()
from nonebot_bison.config.db_model import Base
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migration(connection: Connection):
if __as_plugin:
context.configure(connection=connection)
else:
context.configure(
connection=connection,
target_metadata=target_metadata,
render_as_batch=True,
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()
async def run_migrations_async():
from nonebot_plugin_datastore.db import get_engine
connectable = get_engine()
async with connectable.connect() as connection:
await connection.run_sync(do_run_migration)
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
if not __as_plugin:
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
do_run_migration(connection)
else:
# asyncio.run(run_migrations_async())
asyncio.create_task(run_migrations_async())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -1,24 +0,0 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade():
${upgrades if upgrades else "pass"}
def downgrade():
${downgrades if downgrades else "pass"}

View File

@ -1,93 +0,0 @@
import logging
import nonebot
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from nonebot import get_driver
from nonebot.adapters.onebot.v11.bot import Bot
from nonebot.log import LoguruHandler, logger
from .config import config
from .platform import platform_manager
from .plugin_config import plugin_config
from .send import do_send_msgs, send_msgs
from .types import UserSubInfo
scheduler = AsyncIOScheduler(timezone="Asia/Shanghai")
@get_driver().on_startup
async def _start():
for platform_name, platform in platform_manager.items():
if platform.schedule_type in ["cron", "interval", "date"]:
logger.info(
f"start scheduler for {platform_name} with {platform.schedule_type} {platform.schedule_kw}"
)
scheduler.add_job(
fetch_and_send,
platform.schedule_type,
**platform.schedule_kw,
args=(platform_name,),
)
scheduler.configure({"apscheduler.timezone": "Asia/Shanghai"})
scheduler.start()
# get_driver().on_startup(_start)
async def fetch_and_send(target_type: str):
target = config.get_next_target(target_type)
if not target:
return
logger.debug(
"try to fecth new posts from {}, target: {}".format(target_type, target)
)
send_user_list = config.target_user_cache[target_type][target]
send_userinfo_list = list(
map(
lambda user: UserSubInfo(
user,
lambda target: config.get_sub_category(
target_type, target, user.user_type, user.user
),
lambda target: config.get_sub_tags(
target_type, target, user.user_type, user.user
),
),
send_user_list,
)
)
to_send = await platform_manager[target_type].do_fetch_new_post(
target, send_userinfo_list
)
if not to_send:
return
bot = nonebot.get_bot()
assert isinstance(bot, Bot)
for user, send_list in to_send:
for send_post in send_list:
logger.info("send to {}: {}".format(user, send_post))
if not bot:
logger.warning("no bot connected")
else:
await send_msgs(
bot, user.user, user.user_type, await send_post.generate_messages()
)
class CustomLogHandler(LoguruHandler):
def filter(self, record: logging.LogRecord):
return record.msg != (
'Execution of job "%s" '
"skipped: maximum number of running instances reached (%d)"
)
if plugin_config.bison_use_queue:
scheduler.add_job(do_send_msgs, "interval", seconds=0.3, coalesce=True)
aps_logger = logging.getLogger("apscheduler")
aps_logger.setLevel(30)
aps_logger.handlers.clear()
aps_logger.addHandler(CustomLogHandler())