mirror of
https://github.com/suyiiyii/nonebot-bison.git
synced 2026-05-09 18:27:56 +08:00
🚚 修改 nonebot_bison 项目结构 (#211)
* 🎨 修改 nonebot_bison 目录位置 * auto fix by pre-commit hooks * 🚚 fix frontend build target * 🚚 use soft link * Revert "🚚 use soft link" This reverts commit de21f79d5ae1bd5515b04f42a4138cb25ddf3e62. * 🚚 modify dockerfile --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: felinae98 <731499577@qq.com>
This commit is contained in:
@@ -0,0 +1,2 @@
|
||||
from .db_config import config
|
||||
from .utils import NoSuchSubscribeException, NoSuchTargetException, NoSuchUserException
|
||||
@@ -0,0 +1,273 @@
|
||||
import json
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from typing import DefaultDict, Literal, Mapping, TypedDict
|
||||
|
||||
import nonebot
|
||||
from nonebot.log import logger
|
||||
from tinydb import Query, TinyDB
|
||||
|
||||
from ..platform import platform_manager
|
||||
from ..plugin_config import plugin_config
|
||||
from ..types import Target, User
|
||||
from ..utils import Singleton
|
||||
from .utils import NoSuchSubscribeException, NoSuchUserException
|
||||
|
||||
supported_target_type = platform_manager.keys()
|
||||
|
||||
|
||||
def get_config_path() -> tuple[str, str]:
|
||||
if plugin_config.bison_config_path:
|
||||
data_dir = plugin_config.bison_config_path
|
||||
else:
|
||||
working_dir = os.getcwd()
|
||||
data_dir = path.join(working_dir, "data")
|
||||
if not path.isdir(data_dir):
|
||||
os.makedirs(data_dir)
|
||||
old_path = path.join(data_dir, "hk_reporter.json")
|
||||
new_path = path.join(data_dir, "bison.json")
|
||||
deprecated_maker_path = path.join(data_dir, "bison.json.deprecated")
|
||||
if os.path.exists(old_path) and not os.path.exists(new_path):
|
||||
os.rename(old_path, new_path)
|
||||
return new_path, deprecated_maker_path
|
||||
|
||||
|
||||
def drop():
|
||||
if plugin_config.bison_config_path:
|
||||
data_dir = plugin_config.bison_config_path
|
||||
else:
|
||||
working_dir = os.getcwd()
|
||||
data_dir = path.join(working_dir, "data")
|
||||
old_path = path.join(data_dir, "bison.json")
|
||||
deprecated_marker_path = path.join(data_dir, "bison.json.deprecated")
|
||||
if os.path.exists(old_path):
|
||||
config.db.close()
|
||||
config.available = False
|
||||
with open(deprecated_marker_path, "w") as file:
|
||||
content = {
|
||||
"migration_time": datetime.now().isoformat(),
|
||||
}
|
||||
file.write(json.dumps(content))
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class SubscribeContent(TypedDict):
|
||||
target: str
|
||||
target_type: str
|
||||
target_name: str
|
||||
cats: list[int]
|
||||
tags: list[str]
|
||||
|
||||
|
||||
class ConfigContent(TypedDict):
|
||||
user: int
|
||||
user_type: Literal["group", "private"]
|
||||
subs: list[SubscribeContent]
|
||||
|
||||
|
||||
class Config(metaclass=Singleton):
|
||||
"Dropping it!"
|
||||
|
||||
migrate_version = 2
|
||||
|
||||
def __init__(self):
|
||||
self._do_init()
|
||||
|
||||
def _do_init(self):
|
||||
path, deprecated_marker_path = get_config_path()
|
||||
if Path(deprecated_marker_path).exists():
|
||||
self.available = False
|
||||
elif Path(path).exists():
|
||||
self.available = True
|
||||
self.db = TinyDB(path, encoding="utf-8")
|
||||
self.kv_config = self.db.table("kv")
|
||||
self.user_target = self.db.table("user_target")
|
||||
self.target_user_cache: dict[str, defaultdict[Target, list[User]]] = {}
|
||||
self.target_user_cat_cache = {}
|
||||
self.target_user_tag_cache = {}
|
||||
self.target_list = {}
|
||||
self.next_index: DefaultDict[str, int] = defaultdict(lambda: 0)
|
||||
else:
|
||||
self.available = False
|
||||
|
||||
def add_subscribe(
|
||||
self, user, user_type, target, target_name, target_type, cats, tags
|
||||
):
|
||||
user_query = Query()
|
||||
query = (user_query.user == user) & (user_query.user_type == user_type)
|
||||
if user_data := self.user_target.get(query):
|
||||
# update
|
||||
subs: list = user_data.get("subs", [])
|
||||
subs.append(
|
||||
{
|
||||
"target": target,
|
||||
"target_type": target_type,
|
||||
"target_name": target_name,
|
||||
"cats": cats,
|
||||
"tags": tags,
|
||||
}
|
||||
)
|
||||
self.user_target.update({"subs": subs}, query)
|
||||
else:
|
||||
# insert
|
||||
self.user_target.insert(
|
||||
{
|
||||
"user": user,
|
||||
"user_type": user_type,
|
||||
"subs": [
|
||||
{
|
||||
"target": target,
|
||||
"target_type": target_type,
|
||||
"target_name": target_name,
|
||||
"cats": cats,
|
||||
"tags": tags,
|
||||
}
|
||||
],
|
||||
}
|
||||
)
|
||||
self.update_send_cache()
|
||||
|
||||
def list_subscribe(self, user, user_type) -> list[SubscribeContent]:
|
||||
query = Query()
|
||||
if user_sub := self.user_target.get(
|
||||
(query.user == user) & (query.user_type == user_type)
|
||||
):
|
||||
return user_sub["subs"]
|
||||
return []
|
||||
|
||||
def get_all_subscribe(self):
|
||||
return self.user_target
|
||||
|
||||
def del_subscribe(self, user, user_type, target, target_type):
|
||||
user_query = Query()
|
||||
query = (user_query.user == user) & (user_query.user_type == user_type)
|
||||
if not (query_res := self.user_target.get(query)):
|
||||
raise NoSuchUserException()
|
||||
subs = query_res.get("subs", [])
|
||||
for idx, sub in enumerate(subs):
|
||||
if sub.get("target") == target and sub.get("target_type") == target_type:
|
||||
subs.pop(idx)
|
||||
self.user_target.update({"subs": subs}, query)
|
||||
self.update_send_cache()
|
||||
return
|
||||
raise NoSuchSubscribeException()
|
||||
|
||||
def update_subscribe(
|
||||
self, user, user_type, target, target_name, target_type, cats, tags
|
||||
):
|
||||
user_query = Query()
|
||||
query = (user_query.user == user) & (user_query.user_type == user_type)
|
||||
if user_data := self.user_target.get(query):
|
||||
# update
|
||||
subs: list = user_data.get("subs", [])
|
||||
find_flag = False
|
||||
for item in subs:
|
||||
if item["target"] == target and item["target_type"] == target_type:
|
||||
item["target_name"], item["cats"], item["tags"] = (
|
||||
target_name,
|
||||
cats,
|
||||
tags,
|
||||
)
|
||||
find_flag = True
|
||||
break
|
||||
if not find_flag:
|
||||
raise NoSuchSubscribeException()
|
||||
self.user_target.update({"subs": subs}, query)
|
||||
else:
|
||||
raise NoSuchUserException()
|
||||
self.update_send_cache()
|
||||
|
||||
def update_send_cache(self):
|
||||
res = {target_type: defaultdict(list) for target_type in supported_target_type}
|
||||
cat_res = {
|
||||
target_type: defaultdict(lambda: defaultdict(list))
|
||||
for target_type in supported_target_type
|
||||
}
|
||||
tag_res = {
|
||||
target_type: defaultdict(lambda: defaultdict(list))
|
||||
for target_type in supported_target_type
|
||||
}
|
||||
# res = {target_type: defaultdict(lambda: defaultdict(list)) for target_type in supported_target_type}
|
||||
to_del = []
|
||||
for user in self.user_target.all():
|
||||
for sub in user.get("subs", []):
|
||||
if not sub.get("target_type") in supported_target_type:
|
||||
to_del.append(
|
||||
{
|
||||
"user": user["user"],
|
||||
"user_type": user["user_type"],
|
||||
"target": sub["target"],
|
||||
"target_type": sub["target_type"],
|
||||
}
|
||||
)
|
||||
continue
|
||||
res[sub["target_type"]][sub["target"]].append(
|
||||
User(user["user"], user["user_type"])
|
||||
)
|
||||
cat_res[sub["target_type"]][sub["target"]][
|
||||
"{}-{}".format(user["user_type"], user["user"])
|
||||
] = sub["cats"]
|
||||
tag_res[sub["target_type"]][sub["target"]][
|
||||
"{}-{}".format(user["user_type"], user["user"])
|
||||
] = sub["tags"]
|
||||
self.target_user_cache = res
|
||||
self.target_user_cat_cache = cat_res
|
||||
self.target_user_tag_cache = tag_res
|
||||
for target_type in self.target_user_cache:
|
||||
self.target_list[target_type] = list(
|
||||
self.target_user_cache[target_type].keys()
|
||||
)
|
||||
|
||||
logger.info(f"Deleting {to_del}")
|
||||
for d in to_del:
|
||||
self.del_subscribe(**d)
|
||||
|
||||
def get_sub_category(self, target_type, target, user_type, user):
|
||||
return self.target_user_cat_cache[target_type][target][
|
||||
"{}-{}".format(user_type, user)
|
||||
]
|
||||
|
||||
def get_sub_tags(self, target_type, target, user_type, user):
|
||||
return self.target_user_tag_cache[target_type][target][
|
||||
"{}-{}".format(user_type, user)
|
||||
]
|
||||
|
||||
def get_next_target(self, target_type):
|
||||
# FIXME 插入或删除target后对队列的影响(但是并不是大问题
|
||||
if not self.target_list[target_type]:
|
||||
return None
|
||||
self.next_index[target_type] %= len(self.target_list[target_type])
|
||||
res = self.target_list[target_type][self.next_index[target_type]]
|
||||
self.next_index[target_type] += 1
|
||||
return res
|
||||
|
||||
|
||||
def start_up():
|
||||
config = Config()
|
||||
if not config.available:
|
||||
return
|
||||
if not (search_res := config.kv_config.search(Query().name == "version")):
|
||||
config.kv_config.insert({"name": "version", "value": config.migrate_version})
|
||||
elif search_res[0].get("value") < config.migrate_version:
|
||||
query = Query()
|
||||
version_query = query.name == "version"
|
||||
cur_version = search_res[0].get("value")
|
||||
if cur_version == 1:
|
||||
cur_version = 2
|
||||
for user_conf in config.user_target.all():
|
||||
conf_id = user_conf.doc_id
|
||||
subs = user_conf["subs"]
|
||||
for sub in subs:
|
||||
sub["cats"] = []
|
||||
sub["tags"] = []
|
||||
config.user_target.update({"subs": subs}, doc_ids=[conf_id])
|
||||
config.kv_config.update({"value": config.migrate_version}, version_query)
|
||||
# do migration
|
||||
config.update_send_cache()
|
||||
|
||||
|
||||
config = Config()
|
||||
@@ -0,0 +1,70 @@
|
||||
from nonebot.log import logger
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
|
||||
from .config_legacy import ConfigContent, config, drop
|
||||
from .db_model import Subscribe, Target, User
|
||||
|
||||
|
||||
async def data_migrate():
|
||||
if config.available:
|
||||
logger.warning("You are still using legacy db, migrating to sqlite")
|
||||
all_subs: list[ConfigContent] = list(
|
||||
map(lambda item: ConfigContent(**item), config.get_all_subscribe().all())
|
||||
)
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
user_to_create = []
|
||||
subscribe_to_create = []
|
||||
platform_target_map: dict[str, tuple[Target, str, int]] = {}
|
||||
for user in all_subs:
|
||||
db_user = User(uid=user["user"], type=user["user_type"])
|
||||
user_to_create.append(db_user)
|
||||
user_sub_set = set()
|
||||
for sub in user["subs"]:
|
||||
target = sub["target"]
|
||||
platform_name = sub["target_type"]
|
||||
target_name = sub["target_name"]
|
||||
key = f"{target}-{platform_name}"
|
||||
if key in user_sub_set:
|
||||
# a user subscribe a target twice
|
||||
logger.error(
|
||||
f"用户 {user['user_type']}-{user['user']} 订阅了 {platform_name}-{target_name} 两次,"
|
||||
"随机采用了一个订阅"
|
||||
)
|
||||
continue
|
||||
user_sub_set.add(key)
|
||||
if key in platform_target_map.keys():
|
||||
target_obj, ext_user_type, ext_user = platform_target_map[key]
|
||||
if target_obj.target_name != target_name:
|
||||
# GG
|
||||
logger.error(
|
||||
f"你的旧版本数据库中存在数据不一致问题,请完成迁移后执行重新添加{platform_name}平台的{target}"
|
||||
f"它的名字可能为{target_obj.target_name}或{target_name}"
|
||||
)
|
||||
|
||||
else:
|
||||
target_obj = Target(
|
||||
platform_name=platform_name,
|
||||
target_name=target_name,
|
||||
target=target,
|
||||
)
|
||||
platform_target_map[key] = (
|
||||
target_obj,
|
||||
user["user_type"],
|
||||
user["user"],
|
||||
)
|
||||
subscribe_obj = Subscribe(
|
||||
user=db_user,
|
||||
target=target_obj,
|
||||
categories=sub["cats"],
|
||||
tags=sub["tags"],
|
||||
)
|
||||
subscribe_to_create.append(subscribe_obj)
|
||||
sess.add_all(
|
||||
user_to_create
|
||||
+ list(map(lambda x: x[0], platform_target_map.values()))
|
||||
+ subscribe_to_create
|
||||
)
|
||||
await sess.commit()
|
||||
drop()
|
||||
logger.info("migrate success")
|
||||
@@ -0,0 +1,303 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, time
|
||||
from typing import Awaitable, Callable, Optional, Sequence
|
||||
|
||||
from nonebot_plugin_datastore import create_session
|
||||
from sqlalchemy import delete, func, select
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import selectinload
|
||||
|
||||
from ..types import Category, PlatformWeightConfigResp, Tag
|
||||
from ..types import Target as T_Target
|
||||
from ..types import TimeWeightConfig
|
||||
from ..types import User as T_User
|
||||
from ..types import UserSubInfo, WeightConfig
|
||||
from .db_model import ScheduleTimeWeight, Subscribe, Target, User
|
||||
from .utils import NoSuchTargetException
|
||||
|
||||
|
||||
def _get_time():
|
||||
dt = datetime.now()
|
||||
cur_time = time(hour=dt.hour, minute=dt.minute, second=dt.second)
|
||||
return cur_time
|
||||
|
||||
|
||||
class SubscribeDupException(Exception):
|
||||
...
|
||||
|
||||
|
||||
class DBConfig:
|
||||
def __init__(self):
|
||||
self.add_target_hook: Optional[Callable[[str, T_Target], Awaitable]] = None
|
||||
self.delete_target_hook: Optional[Callable[[str, T_Target], Awaitable]] = None
|
||||
|
||||
def register_add_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
|
||||
self.add_target_hook = fun
|
||||
|
||||
def register_delete_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
|
||||
self.delete_target_hook = fun
|
||||
|
||||
async def add_subscribe(
|
||||
self,
|
||||
user: int,
|
||||
user_type: str,
|
||||
target: T_Target,
|
||||
target_name: str,
|
||||
platform_name: str,
|
||||
cats: list[Category],
|
||||
tags: list[Tag],
|
||||
):
|
||||
async with create_session() as session:
|
||||
db_user_stmt = (
|
||||
select(User).where(User.uid == user).where(User.type == user_type)
|
||||
)
|
||||
db_user: Optional[User] = await session.scalar(db_user_stmt)
|
||||
if not db_user:
|
||||
db_user = User(uid=user, type=user_type)
|
||||
session.add(db_user)
|
||||
db_target_stmt = (
|
||||
select(Target)
|
||||
.where(Target.platform_name == platform_name)
|
||||
.where(Target.target == target)
|
||||
)
|
||||
db_target: Optional[Target] = await session.scalar(db_target_stmt)
|
||||
if not db_target:
|
||||
db_target = Target(
|
||||
target=target, platform_name=platform_name, target_name=target_name
|
||||
)
|
||||
if self.add_target_hook:
|
||||
await self.add_target_hook(platform_name, target)
|
||||
else:
|
||||
db_target.target_name = target_name
|
||||
subscribe = Subscribe(
|
||||
categories=cats,
|
||||
tags=tags,
|
||||
user=db_user,
|
||||
target=db_target,
|
||||
)
|
||||
session.add(subscribe)
|
||||
try:
|
||||
await session.commit()
|
||||
except IntegrityError as e:
|
||||
if len(e.args) > 0 and "UNIQUE constraint failed" in e.args[0]:
|
||||
raise SubscribeDupException()
|
||||
raise e
|
||||
|
||||
async def list_subscribe(self, user: int, user_type: str) -> Sequence[Subscribe]:
|
||||
async with create_session() as session:
|
||||
query_stmt = (
|
||||
select(Subscribe)
|
||||
.where(User.type == user_type, User.uid == user)
|
||||
.join(User)
|
||||
.options(selectinload(Subscribe.target)) # type:ignore
|
||||
)
|
||||
subs = (await session.scalars(query_stmt)).all()
|
||||
return subs
|
||||
|
||||
async def del_subscribe(
|
||||
self, user: int, user_type: str, target: str, platform_name: str
|
||||
):
|
||||
async with create_session() as session:
|
||||
user_obj = await session.scalar(
|
||||
select(User).where(User.uid == user, User.type == user_type)
|
||||
)
|
||||
target_obj = await session.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
await session.execute(
|
||||
delete(Subscribe).where(
|
||||
Subscribe.user == user_obj, Subscribe.target == target_obj
|
||||
)
|
||||
)
|
||||
target_count = await session.scalar(
|
||||
select(func.count())
|
||||
.select_from(Subscribe)
|
||||
.where(Subscribe.target == target_obj)
|
||||
)
|
||||
if target_count == 0:
|
||||
# delete empty target
|
||||
# await session.delete(target_obj)
|
||||
if self.delete_target_hook:
|
||||
await self.delete_target_hook(platform_name, T_Target(target))
|
||||
await session.commit()
|
||||
|
||||
async def update_subscribe(
|
||||
self,
|
||||
user: int,
|
||||
user_type: str,
|
||||
target: str,
|
||||
target_name: str,
|
||||
platform_name: str,
|
||||
cats: list,
|
||||
tags: list,
|
||||
):
|
||||
async with create_session() as sess:
|
||||
subscribe_obj: Subscribe = await sess.scalar(
|
||||
select(Subscribe)
|
||||
.where(
|
||||
User.uid == user,
|
||||
User.type == user_type,
|
||||
Target.target == target,
|
||||
Target.platform_name == platform_name,
|
||||
)
|
||||
.join(User)
|
||||
.join(Target)
|
||||
.options(selectinload(Subscribe.target)) # type:ignore
|
||||
)
|
||||
subscribe_obj.tags = tags # type:ignore
|
||||
subscribe_obj.categories = cats # type:ignore
|
||||
subscribe_obj.target.target_name = target_name
|
||||
await sess.commit()
|
||||
|
||||
async def get_platform_target(self, platform_name: str) -> Sequence[Target]:
|
||||
async with create_session() as sess:
|
||||
subq = select(Subscribe.target_id).distinct().subquery()
|
||||
query = (
|
||||
select(Target).join(subq).where(Target.platform_name == platform_name)
|
||||
)
|
||||
return (await sess.scalars(query)).all()
|
||||
|
||||
async def get_time_weight_config(
|
||||
self, target: T_Target, platform_name: str
|
||||
) -> WeightConfig:
|
||||
async with create_session() as sess:
|
||||
time_weight_conf = (
|
||||
await sess.scalars(
|
||||
select(ScheduleTimeWeight)
|
||||
.where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
.join(Target)
|
||||
)
|
||||
).all()
|
||||
targetObj = await sess.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
return WeightConfig(
|
||||
default=targetObj.default_schedule_weight,
|
||||
time_config=[
|
||||
TimeWeightConfig(
|
||||
start_time=time_conf.start_time,
|
||||
end_time=time_conf.end_time,
|
||||
weight=time_conf.weight,
|
||||
)
|
||||
for time_conf in time_weight_conf
|
||||
],
|
||||
)
|
||||
|
||||
async def update_time_weight_config(
|
||||
self, target: T_Target, platform_name: str, conf: WeightConfig
|
||||
):
|
||||
async with create_session() as sess:
|
||||
targetObj = await sess.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
if not targetObj:
|
||||
raise NoSuchTargetException()
|
||||
target_id = targetObj.id
|
||||
targetObj.default_schedule_weight = conf.default
|
||||
delete_statement = delete(ScheduleTimeWeight).where(
|
||||
ScheduleTimeWeight.target_id == target_id
|
||||
)
|
||||
await sess.execute(delete_statement)
|
||||
for time_conf in conf.time_config:
|
||||
new_conf = ScheduleTimeWeight(
|
||||
start_time=time_conf.start_time,
|
||||
end_time=time_conf.end_time,
|
||||
weight=time_conf.weight,
|
||||
target=targetObj,
|
||||
)
|
||||
sess.add(new_conf)
|
||||
|
||||
await sess.commit()
|
||||
|
||||
async def get_current_weight_val(self, platform_list: list[str]) -> dict[str, int]:
|
||||
res = {}
|
||||
cur_time = _get_time()
|
||||
async with create_session() as sess:
|
||||
targets = (
|
||||
await sess.scalars(
|
||||
select(Target)
|
||||
.where(Target.platform_name.in_(platform_list))
|
||||
.options(selectinload(Target.time_weight))
|
||||
)
|
||||
).all()
|
||||
for target in targets:
|
||||
key = f"{target.platform_name}-{target.target}"
|
||||
weight = target.default_schedule_weight
|
||||
for time_conf in target.time_weight:
|
||||
if (
|
||||
time_conf.start_time <= cur_time
|
||||
and time_conf.end_time > cur_time
|
||||
):
|
||||
weight = time_conf.weight
|
||||
break
|
||||
res[key] = weight
|
||||
return res
|
||||
|
||||
async def get_platform_target_subscribers(
|
||||
self, platform_name: str, target: T_Target
|
||||
) -> list[UserSubInfo]:
|
||||
async with create_session() as sess:
|
||||
query = (
|
||||
select(Subscribe)
|
||||
.join(Target)
|
||||
.where(Target.platform_name == platform_name, Target.target == target)
|
||||
.options(selectinload(Subscribe.user))
|
||||
)
|
||||
subsribes = (await sess.scalars(query)).all()
|
||||
return list(
|
||||
map(
|
||||
lambda subscribe: UserSubInfo(
|
||||
T_User(subscribe.user.uid, subscribe.user.type),
|
||||
subscribe.categories,
|
||||
subscribe.tags,
|
||||
),
|
||||
subsribes,
|
||||
)
|
||||
)
|
||||
|
||||
async def get_all_weight_config(
|
||||
self,
|
||||
) -> dict[str, dict[str, PlatformWeightConfigResp]]:
|
||||
res: dict[str, dict[str, PlatformWeightConfigResp]] = defaultdict(dict)
|
||||
async with create_session() as sess:
|
||||
query = select(Target)
|
||||
targets = (await sess.scalars(query)).all()
|
||||
query = select(ScheduleTimeWeight).options(
|
||||
selectinload(ScheduleTimeWeight.target)
|
||||
)
|
||||
time_weights = (await sess.scalars(query)).all()
|
||||
|
||||
for target in targets:
|
||||
platform_name = target.platform_name
|
||||
if platform_name not in res.keys():
|
||||
res[platform_name][target.target] = PlatformWeightConfigResp(
|
||||
target=T_Target(target.target),
|
||||
target_name=target.target_name,
|
||||
platform_name=platform_name,
|
||||
weight=WeightConfig(
|
||||
default=target.default_schedule_weight, time_config=[]
|
||||
),
|
||||
)
|
||||
|
||||
for time_weight_config in time_weights:
|
||||
platform_name = time_weight_config.target.platform_name
|
||||
target = time_weight_config.target.target
|
||||
res[platform_name][target].weight.time_config.append(
|
||||
TimeWeightConfig(
|
||||
start_time=time_weight_config.start_time,
|
||||
end_time=time_weight_config.end_time,
|
||||
weight=time_weight_config.weight,
|
||||
)
|
||||
)
|
||||
return res
|
||||
|
||||
|
||||
config = DBConfig()
|
||||
@@ -0,0 +1,66 @@
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from nonebot_plugin_datastore import get_plugin_data
|
||||
from sqlalchemy import JSON, ForeignKey, String, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from ..types import Category, Tag
|
||||
|
||||
Model = get_plugin_data().Model
|
||||
get_plugin_data().set_migration_dir(Path(__file__).parent / "migrations")
|
||||
|
||||
|
||||
class User(Model):
|
||||
__table_args__ = (UniqueConstraint("type", "uid", name="unique-user-constraint"),)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
type: Mapped[str] = mapped_column(String(20))
|
||||
uid: Mapped[int]
|
||||
|
||||
subscribes: Mapped[list["Subscribe"]] = relationship(back_populates="user")
|
||||
|
||||
|
||||
class Target(Model):
|
||||
__table_args__ = (
|
||||
UniqueConstraint("target", "platform_name", name="unique-target-constraint"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
platform_name: Mapped[str] = mapped_column(String(20))
|
||||
target: Mapped[str] = mapped_column(String(1024))
|
||||
target_name: Mapped[str] = mapped_column(String(1024))
|
||||
default_schedule_weight: Mapped[int | None] = mapped_column(default=10)
|
||||
|
||||
subscribes: Mapped[list["Subscribe"]] = relationship(back_populates="target")
|
||||
time_weight: Mapped[list["ScheduleTimeWeight"]] = relationship(
|
||||
back_populates="target"
|
||||
)
|
||||
|
||||
|
||||
class ScheduleTimeWeight(Model):
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
target_id: Mapped[int | None] = mapped_column(ForeignKey("nonebot_bison_target.id"))
|
||||
start_time: Mapped[datetime.time | None]
|
||||
end_time: Mapped[datetime.time | None]
|
||||
weight: Mapped[int | None]
|
||||
|
||||
target: Mapped[Target] = relationship(back_populates="time_weight")
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
|
||||
class Subscribe(Model):
|
||||
__table_args__ = (
|
||||
UniqueConstraint("target_id", "user_id", name="unique-subscribe-constraint"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
target_id: Mapped[int | None] = mapped_column(ForeignKey("nonebot_bison_target.id"))
|
||||
user_id: Mapped[int | None] = mapped_column(ForeignKey("nonebot_bison_user.id"))
|
||||
categories: Mapped[list[Category]] = mapped_column(JSON)
|
||||
tags: Mapped[list[Tag]] = mapped_column(JSON)
|
||||
|
||||
target: Mapped[Target] = relationship(back_populates="subscribes")
|
||||
user: Mapped[User] = relationship(back_populates="subscribes")
|
||||
@@ -0,0 +1,60 @@
|
||||
"""init db
|
||||
|
||||
Revision ID: 0571870f5222
|
||||
Revises:
|
||||
Create Date: 2022-03-21 19:18:13.762626
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0571870f5222"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"target",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("platform_name", sa.String(length=20), nullable=False),
|
||||
sa.Column("target", sa.String(length=1024), nullable=False),
|
||||
sa.Column("target_name", sa.String(length=1024), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("type", sa.String(length=20), nullable=False),
|
||||
sa.Column("uid", sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"subscribe",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("target_id", sa.Integer(), nullable=True),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("categories", sa.String(length=1024), nullable=True),
|
||||
sa.Column("tags", sa.String(length=1024), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["target_id"],
|
||||
["target.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("subscribe")
|
||||
op.drop_table("user")
|
||||
op.drop_table("target")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,55 @@
|
||||
"""alter type
|
||||
|
||||
Revision ID: 4a46ba54a3f3
|
||||
Revises: c97c445e2bdb
|
||||
Create Date: 2022-03-27 21:50:10.911649
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4a46ba54a3f3"
|
||||
down_revision = "c97c445e2bdb"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
"categories",
|
||||
existing_type=sa.VARCHAR(length=1024),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True,
|
||||
postgresql_using="categories::json",
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"tags",
|
||||
existing_type=sa.VARCHAR(length=1024),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True,
|
||||
postgresql_using="tags::json",
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
"tags",
|
||||
existing_type=sa.JSON(),
|
||||
type_=sa.VARCHAR(length=1024),
|
||||
existing_nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"categories",
|
||||
existing_type=sa.JSON(),
|
||||
type_=sa.VARCHAR(length=1024),
|
||||
existing_nullable=True,
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,33 @@
|
||||
"""rename tables
|
||||
|
||||
Revision ID: 5da28f6facb3
|
||||
Revises: 5f3370328e44
|
||||
Create Date: 2023-01-15 19:04:54.987491
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5da28f6facb3"
|
||||
down_revision = "5f3370328e44"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.rename_table("target", "nonebot_bison_target")
|
||||
op.rename_table("user", "nonebot_bison_user")
|
||||
op.rename_table("schedule_time_weight", "nonebot_bison_scheduletimeweight")
|
||||
op.rename_table("subscribe", "nonebot_bison_subscribe")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.rename_table("nonebot_bison_subscribe", "subscribe")
|
||||
op.rename_table("nonebot_bison_scheduletimeweight", "schedule_time_weight")
|
||||
op.rename_table("nonebot_bison_user", "user")
|
||||
op.rename_table("nonebot_bison_target", "target")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,51 @@
|
||||
"""add time-weight table
|
||||
|
||||
Revision ID: 5f3370328e44
|
||||
Revises: a333d6224193
|
||||
Create Date: 2022-05-31 22:05:13.235981
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5f3370328e44"
|
||||
down_revision = "a333d6224193"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"schedule_time_weight",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("target_id", sa.Integer(), nullable=True),
|
||||
sa.Column("start_time", sa.Time(), nullable=True),
|
||||
sa.Column("end_time", sa.Time(), nullable=True),
|
||||
sa.Column("weight", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["target_id"],
|
||||
["target.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("default_schedule_weight", sa.Integer(), nullable=True)
|
||||
)
|
||||
batch_op.drop_column("last_schedule_time")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("last_schedule_time", sa.DATETIME(), nullable=True)
|
||||
)
|
||||
batch_op.drop_column("default_schedule_weight")
|
||||
|
||||
op.drop_table("schedule_time_weight")
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,33 @@
|
||||
"""add last scheduled time
|
||||
|
||||
Revision ID: a333d6224193
|
||||
Revises: 4a46ba54a3f3
|
||||
Create Date: 2022-03-29 21:01:38.213153
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a333d6224193"
|
||||
down_revision = "4a46ba54a3f3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("last_schedule_time", sa.DateTime(timezone=True), nullable=True)
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.drop_column("last_schedule_time")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,52 @@
|
||||
"""alter_json_not_null
|
||||
|
||||
Revision ID: bd92923c218f
|
||||
Revises: 5da28f6facb3
|
||||
Create Date: 2023-03-02 14:04:16.492133
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "bd92923c218f"
|
||||
down_revision = "5da28f6facb3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def set_default_value():
|
||||
Base = automap_base()
|
||||
Base.prepare(autoload_with=op.get_bind())
|
||||
Subscribe = Base.classes.nonebot_bison_subscribe
|
||||
with Session(op.get_bind()) as session:
|
||||
select_statement = select(Subscribe)
|
||||
results = session.scalars(select_statement)
|
||||
for subscribe in results:
|
||||
if subscribe.categories is None:
|
||||
subscribe.categories = []
|
||||
if subscribe.tags is None:
|
||||
subscribe.tags = []
|
||||
session.commit()
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
set_default_value()
|
||||
with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column("categories", existing_type=sa.JSON(), nullable=False)
|
||||
batch_op.alter_column("tags", existing_type=sa.JSON(), nullable=False)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("nonebot_bison_subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column("tags", existing_type=sa.JSON(), nullable=True)
|
||||
batch_op.alter_column("categories", existing_type=sa.JSON(), nullable=True)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,47 @@
|
||||
"""add constraint
|
||||
|
||||
Revision ID: c97c445e2bdb
|
||||
Revises: 0571870f5222
|
||||
Create Date: 2022-03-26 19:46:50.910721
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c97c445e2bdb"
|
||||
down_revision = "0571870f5222"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
"unique-subscribe-constraint", ["target_id", "user_id"]
|
||||
)
|
||||
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
"unique-target-constraint", ["target", "platform_name"]
|
||||
)
|
||||
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint("unique-user-constraint", ["type", "uid"])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-user-constraint", type_="unique")
|
||||
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-target-constraint", type_="unique")
|
||||
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-subscribe-constraint", type_="unique")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,10 @@
|
||||
class NoSuchUserException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoSuchSubscribeException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoSuchTargetException(Exception):
|
||||
pass
|
||||
Reference in New Issue
Block a user