big update

This commit is contained in:
felinae98 2022-05-31 22:06:21 +08:00
parent 05f1edd7fa
commit cf2ea83528
No known key found for this signature in database
GPG Key ID: 00C8B010587FF610
6 changed files with 80 additions and 98 deletions

View File

@ -3,7 +3,7 @@ from datetime import datetime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy.sql.schema import Column, ForeignKey, UniqueConstraint
from sqlalchemy.sql.sqltypes import JSON, DateTime, Integer, String
from sqlalchemy.sql.sqltypes import JSON, DateTime, Integer, String, Time
Base = declarative_base()
@ -29,11 +29,22 @@ class Target(Base):
platform_name = Column(String(20), nullable=False)
target = Column(String(1024), nullable=False)
target_name = Column(String(1024), nullable=False)
last_schedule_time = Column(
DateTime(timezone=True), default=datetime(year=2000, month=1, day=1)
)
default_schedule_weight = Column(Integer, default=10)
subscribes = relationship("Subscribe", back_populates="target")
time_weight = relationship("ScheduleTimeWeight", back_populates="target")
class ScheduleTimeWeight(Base):
__tablename__ = "schedule_time_weight"
id = Column(Integer, primary_key=True, autoincrement=True)
target_id = Column(Integer, ForeignKey(Target.id))
start_time = Column(Time)
end_time = Column(Time)
weight = Column(Integer)
target = relationship("Target", back_populates="time_weight")
class Subscribe(Base):

View File

@ -0,0 +1,51 @@
"""add time-weight table
Revision ID: 5f3370328e44
Revises: a333d6224193
Create Date: 2022-05-31 22:05:13.235981
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "5f3370328e44"
down_revision = "a333d6224193"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"schedule_time_weight",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("target_id", sa.Integer(), nullable=True),
sa.Column("start_time", sa.Time(), nullable=True),
sa.Column("end_time", sa.Time(), nullable=True),
sa.Column("weight", sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
["target_id"],
["target.id"],
),
sa.PrimaryKeyConstraint("id"),
)
with op.batch_alter_table("target", schema=None) as batch_op:
batch_op.add_column(
sa.Column("default_schedule_weight", sa.Integer(), nullable=True)
)
batch_op.drop_column("last_schedule_time")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table("target", schema=None) as batch_op:
batch_op.add_column(
sa.Column("last_schedule_time", sa.DATETIME(), nullable=True)
)
batch_op.drop_column("default_schedule_weight")
op.drop_table("schedule_time_weight")
# ### end Alembic commands ###

View File

@ -1,93 +0,0 @@
import logging
import nonebot
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from nonebot import get_driver
from nonebot.adapters.onebot.v11.bot import Bot
from nonebot.log import LoguruHandler, logger
from .config import config
from .platform import platform_manager
from .plugin_config import plugin_config
from .send import do_send_msgs, send_msgs
from .types import UserSubInfo
scheduler = AsyncIOScheduler(timezone="Asia/Shanghai")
@get_driver().on_startup
async def _start():
for platform_name, platform in platform_manager.items():
if platform.schedule_type in ["cron", "interval", "date"]:
logger.info(
f"start scheduler for {platform_name} with {platform.schedule_type} {platform.schedule_kw}"
)
scheduler.add_job(
fetch_and_send,
platform.schedule_type,
**platform.schedule_kw,
args=(platform_name,),
)
scheduler.configure({"apscheduler.timezone": "Asia/Shanghai"})
scheduler.start()
# get_driver().on_startup(_start)
async def fetch_and_send(target_type: str):
target = config.get_next_target(target_type)
if not target:
return
logger.debug(
"try to fecth new posts from {}, target: {}".format(target_type, target)
)
send_user_list = config.target_user_cache[target_type][target]
send_userinfo_list = list(
map(
lambda user: UserSubInfo(
user,
lambda target: config.get_sub_category(
target_type, target, user.user_type, user.user
),
lambda target: config.get_sub_tags(
target_type, target, user.user_type, user.user
),
),
send_user_list,
)
)
to_send = await platform_manager[target_type].do_fetch_new_post(
target, send_userinfo_list
)
if not to_send:
return
bot = nonebot.get_bot()
assert isinstance(bot, Bot)
for user, send_list in to_send:
for send_post in send_list:
logger.info("send to {}: {}".format(user, send_post))
if not bot:
logger.warning("no bot connected")
else:
await send_msgs(
bot, user.user, user.user_type, await send_post.generate_messages()
)
class CustomLogHandler(LoguruHandler):
def filter(self, record: logging.LogRecord):
return record.msg != (
'Execution of job "%s" '
"skipped: maximum number of running instances reached (%d)"
)
if plugin_config.bison_use_queue:
scheduler.add_job(do_send_msgs, "interval", seconds=0.3, coalesce=True)
aps_logger = logging.getLogger("apscheduler")
aps_logger.setLevel(30)
aps_logger.handlers.clear()
aps_logger.addHandler(CustomLogHandler())

View File

@ -0,0 +1 @@
from .scheduler_config import SchedulerConfig

View File

@ -58,7 +58,7 @@ async def fetch_and_send(target_type: str):
send_user_list,
)
)
to_send = await platform_manager[target_type].fetch_new_post(
to_send = await platform_manager[target_type].do_fetch_new_post(
target, send_userinfo_list
)
if not to_send:

View File

@ -0,0 +1,12 @@
from typing import Literal, Type
class SchedulerConfig:
schedule_type: Literal["date", "interval", "cron"]
schedule_setting: dict
registry: dict[str, Type["SchedulerConfig"]] = {}
def __init_subclass__(cls, *, name, **kwargs):
super().__init_subclass__(**kwargs)
cls.registry[name] = cls