mirror of
https://github.com/suyiiyii/nonebot-bison.git
synced 2025-06-05 19:36:43 +08:00
commit
f2397aa51c
109
alembic.ini
Normal file
109
alembic.ini
Normal file
@ -0,0 +1,109 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = src/plugins/nonebot_bison/config/migrate
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = ./src/plugins
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to src/plugins/nonebot_bison/config/migrate/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:src/plugins/nonebot_bison/config/migrate/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = sqlite:///data/data.db
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
hooks = pre-commit
|
||||
|
||||
pre-commit.type = console_scripts
|
||||
pre-commit.entrypoint = pre-commit
|
||||
pre-commit.options = run --files REVISION_SCRIPT_FILENAME
|
||||
pre-commit.cwd = %(here)s
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
707
poetry.lock
generated
707
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@ -37,6 +37,8 @@ aiofiles = "^0.8.0"
|
||||
python-socketio = "^5.4.0"
|
||||
nonebot-adapter-onebot = "^2.0.0-beta.1"
|
||||
nonebot-plugin-htmlrender = "^0.0.4"
|
||||
nonebot-plugin-datastore = "^0.3.0"
|
||||
alembic = "^1.7.6"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
ipdb = "^0.13.4"
|
||||
@ -50,6 +52,7 @@ isort = "^5.10.1"
|
||||
pre-commit = "^2.17.0"
|
||||
nb-cli = "^0.6.6"
|
||||
flaky = "^3.7.0"
|
||||
sqlalchemy-stubs = "^0.4"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry>=0.12"]
|
||||
|
@ -1,5 +1,8 @@
|
||||
from nonebot.plugin import require
|
||||
|
||||
from . import (
|
||||
admin_page,
|
||||
bootstrap,
|
||||
config,
|
||||
config_manager,
|
||||
platform,
|
||||
@ -11,6 +14,20 @@ from . import (
|
||||
)
|
||||
from .plugin_config import plugin_config
|
||||
|
||||
require("nonebot_plugin_localstore")
|
||||
|
||||
__help__version__ = "0.4.3"
|
||||
__help__plugin__name__ = "nonebot_bison"
|
||||
__usage__ = f"本bot可以提供b站、微博等社交媒体的消息订阅,详情请查看本bot文档,或者{'at本bot' if plugin_config.bison_to_me else '' }发送“添加订阅”订阅第一个帐号,发送“查询订阅”或“删除订阅”管理订阅"
|
||||
|
||||
__all__ = [
|
||||
"admin_page",
|
||||
"config",
|
||||
"config_manager",
|
||||
"post",
|
||||
"scheduler",
|
||||
"send",
|
||||
"platform",
|
||||
"types",
|
||||
"utils",
|
||||
]
|
||||
|
@ -1,7 +1,7 @@
|
||||
import nonebot
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
|
||||
from ..config import Config, NoSuchSubscribeException, NoSuchUserException
|
||||
from ..config import NoSuchSubscribeException, NoSuchUserException, config
|
||||
from ..platform import check_sub_target, platform_manager
|
||||
from .jwt import pack_jwt
|
||||
from .token_manager import token_manager
|
||||
@ -83,7 +83,6 @@ async def get_subs_info(jwt_obj: dict):
|
||||
res = {}
|
||||
for group in groups:
|
||||
group_id = group["id"]
|
||||
config = Config()
|
||||
subs = list(
|
||||
map(
|
||||
lambda sub: {
|
||||
@ -112,7 +111,6 @@ async def add_group_sub(
|
||||
cats: list[int],
|
||||
tags: list[str],
|
||||
):
|
||||
config = Config()
|
||||
config.add_subscribe(
|
||||
int(group_number), "group", target, target_name, platform_name, cats, tags
|
||||
)
|
||||
@ -120,7 +118,6 @@ async def add_group_sub(
|
||||
|
||||
|
||||
async def del_group_sub(group_number: str, platform_name: str, target: str):
|
||||
config = Config()
|
||||
try:
|
||||
config.del_subscribe(int(group_number), "group", target, platform_name)
|
||||
except (NoSuchUserException, NoSuchSubscribeException):
|
||||
@ -136,7 +133,6 @@ async def update_group_sub(
|
||||
cats: list[int],
|
||||
tags: list[str],
|
||||
):
|
||||
config = Config()
|
||||
try:
|
||||
config.update_subscribe(
|
||||
int(group_number), "group", target, target_name, platform_name, cats, tags
|
||||
|
20
src/plugins/nonebot_bison/bootstrap.py
Normal file
20
src/plugins/nonebot_bison/bootstrap.py
Normal file
@ -0,0 +1,20 @@
|
||||
from nonebot import get_driver
|
||||
from nonebot.log import logger
|
||||
|
||||
from .config.config_legacy import start_up as legacy_db_startup
|
||||
from .config.db import upgrade_db
|
||||
from .scheduler.aps import start_scheduler
|
||||
from .scheduler.manager import init_scheduler
|
||||
|
||||
|
||||
@get_driver().on_startup
|
||||
async def bootstrap():
|
||||
# legacy db
|
||||
legacy_db_startup()
|
||||
# new db
|
||||
await upgrade_db()
|
||||
# init scheduler
|
||||
await init_scheduler()
|
||||
# start scheduler
|
||||
start_scheduler()
|
||||
logger.info("nonebot-bison bootstrap done")
|
3
src/plugins/nonebot_bison/config/__init__.py
Normal file
3
src/plugins/nonebot_bison/config/__init__.py
Normal file
@ -0,0 +1,3 @@
|
||||
from .config_legacy import NoSuchSubscribeException, NoSuchUserException
|
||||
from .db import DATA
|
||||
from .db_config import config
|
@ -1,16 +1,18 @@
|
||||
import os
|
||||
from collections import defaultdict
|
||||
from os import path
|
||||
from pathlib import Path
|
||||
from typing import DefaultDict, Literal, Mapping, TypedDict
|
||||
|
||||
import nonebot
|
||||
from nonebot.log import logger
|
||||
from tinydb import Query, TinyDB
|
||||
|
||||
from .platform import platform_manager
|
||||
from .plugin_config import plugin_config
|
||||
from .types import Target, User
|
||||
from .utils import Singleton
|
||||
from ..platform import platform_manager
|
||||
from ..plugin_config import plugin_config
|
||||
from ..types import Target, User
|
||||
from ..utils import Singleton
|
||||
from .utils import NoSuchSubscribeException, NoSuchUserException
|
||||
|
||||
supported_target_type = platform_manager.keys()
|
||||
|
||||
@ -30,12 +32,20 @@ def get_config_path() -> str:
|
||||
return new_path
|
||||
|
||||
|
||||
class NoSuchUserException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoSuchSubscribeException(Exception):
|
||||
pass
|
||||
def drop():
|
||||
if plugin_config.bison_config_path:
|
||||
data_dir = plugin_config.bison_config_path
|
||||
else:
|
||||
working_dir = os.getcwd()
|
||||
data_dir = path.join(working_dir, "data")
|
||||
old_path = path.join(data_dir, "bison.json")
|
||||
new_path = path.join(data_dir, "bison-legacy.json")
|
||||
if os.path.exists(old_path):
|
||||
config.db.close()
|
||||
config.available = False
|
||||
os.rename(old_path, new_path)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class SubscribeContent(TypedDict):
|
||||
@ -47,24 +57,33 @@ class SubscribeContent(TypedDict):
|
||||
|
||||
|
||||
class ConfigContent(TypedDict):
|
||||
user: str
|
||||
user: int
|
||||
user_type: Literal["group", "private"]
|
||||
subs: list[SubscribeContent]
|
||||
|
||||
|
||||
class Config(metaclass=Singleton):
|
||||
"Dropping it!"
|
||||
|
||||
migrate_version = 2
|
||||
|
||||
def __init__(self):
|
||||
self.db = TinyDB(get_config_path(), encoding="utf-8")
|
||||
self.kv_config = self.db.table("kv")
|
||||
self.user_target = self.db.table("user_target")
|
||||
self.target_user_cache: dict[str, defaultdict[Target, list[User]]] = {}
|
||||
self.target_user_cat_cache = {}
|
||||
self.target_user_tag_cache = {}
|
||||
self.target_list = {}
|
||||
self.next_index: DefaultDict[str, int] = defaultdict(lambda: 0)
|
||||
self._do_init()
|
||||
|
||||
def _do_init(self):
|
||||
path = get_config_path()
|
||||
if Path(path).exists():
|
||||
self.available = True
|
||||
self.db = TinyDB(get_config_path(), encoding="utf-8")
|
||||
self.kv_config = self.db.table("kv")
|
||||
self.user_target = self.db.table("user_target")
|
||||
self.target_user_cache: dict[str, defaultdict[Target, list[User]]] = {}
|
||||
self.target_user_cat_cache = {}
|
||||
self.target_user_tag_cache = {}
|
||||
self.target_list = {}
|
||||
self.next_index: DefaultDict[str, int] = defaultdict(lambda: 0)
|
||||
else:
|
||||
self.available = False
|
||||
|
||||
def add_subscribe(
|
||||
self, user, user_type, target, target_name, target_type, cats, tags
|
||||
@ -220,6 +239,8 @@ class Config(metaclass=Singleton):
|
||||
|
||||
def start_up():
|
||||
config = Config()
|
||||
if not config.available:
|
||||
return
|
||||
if not (search_res := config.kv_config.search(Query().name == "version")):
|
||||
config.kv_config.insert({"name": "version", "value": config.migrate_version})
|
||||
elif search_res[0].get("value") < config.migrate_version:
|
||||
@ -240,4 +261,4 @@ def start_up():
|
||||
config.update_send_cache()
|
||||
|
||||
|
||||
nonebot.get_driver().on_startup(start_up)
|
||||
config = Config()
|
101
src/plugins/nonebot_bison/config/db.py
Normal file
101
src/plugins/nonebot_bison/config/db.py
Normal file
@ -0,0 +1,101 @@
|
||||
from pathlib import Path
|
||||
|
||||
import nonebot
|
||||
from alembic.config import Config
|
||||
from alembic.runtime.environment import EnvironmentContext
|
||||
from alembic.script.base import ScriptDirectory
|
||||
from nonebot.log import logger
|
||||
from nonebot_plugin_datastore import PluginData, create_session, db
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
from sqlalchemy.engine.base import Connection
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
|
||||
from .config_legacy import ConfigContent, config, drop
|
||||
from .db_model import Base, Subscribe, Target, User
|
||||
|
||||
DATA = PluginData("bison")
|
||||
|
||||
|
||||
async def data_migrate():
|
||||
if config.available:
|
||||
logger.warning("You are still using legacy db, migrating to sqlite")
|
||||
all_subs: list[ConfigContent] = list(
|
||||
map(lambda item: ConfigContent(**item), config.get_all_subscribe().all())
|
||||
)
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
user_to_create = []
|
||||
subscribe_to_create = []
|
||||
platform_target_map: dict[str, tuple[Target, str, int]] = {}
|
||||
for user in all_subs:
|
||||
db_user = User(uid=user["user"], type=user["user_type"])
|
||||
user_to_create.append(db_user)
|
||||
for sub in user["subs"]:
|
||||
target = sub["target"]
|
||||
platform_name = sub["target_type"]
|
||||
target_name = sub["target_name"]
|
||||
key = f"{target}-{platform_name}"
|
||||
if key in platform_target_map.keys():
|
||||
target_obj, ext_user_type, ext_user = platform_target_map[key]
|
||||
if target_obj.target_name != target_name:
|
||||
# GG
|
||||
logger.error(
|
||||
f"你的旧版本数据库中存在数据不一致问题,请完成迁移后执行重新添加{platform_name}平台的{target}"
|
||||
f"它的名字可能为{target_obj.target_name}或{target_name}"
|
||||
)
|
||||
|
||||
else:
|
||||
target_obj = Target(
|
||||
platform_name=platform_name,
|
||||
target_name=target_name,
|
||||
target=target,
|
||||
)
|
||||
platform_target_map[key] = (
|
||||
target_obj,
|
||||
user["user_type"],
|
||||
user["user"],
|
||||
)
|
||||
subscribe_obj = Subscribe(
|
||||
user=db_user,
|
||||
target=target_obj,
|
||||
categories=sub["cats"],
|
||||
tags=sub["tags"],
|
||||
)
|
||||
subscribe_to_create.append(subscribe_obj)
|
||||
sess.add_all(
|
||||
user_to_create
|
||||
+ list(map(lambda x: x[0], platform_target_map.values()))
|
||||
+ subscribe_to_create
|
||||
)
|
||||
await sess.commit()
|
||||
drop()
|
||||
logger.info("migrate success")
|
||||
|
||||
|
||||
async def upgrade_db():
|
||||
alembic_cfg = Config()
|
||||
alembic_cfg.set_main_option(
|
||||
"script_location", str(Path(__file__).parent.joinpath("migrate"))
|
||||
)
|
||||
|
||||
script = ScriptDirectory.from_config(alembic_cfg)
|
||||
engine = db.get_engine()
|
||||
env = EnvironmentContext(alembic_cfg, script)
|
||||
|
||||
def migrate_fun(revision, context):
|
||||
return script._upgrade_revs("head", revision)
|
||||
|
||||
def do_run_migration(connection: Connection):
|
||||
env.configure(
|
||||
connection,
|
||||
target_metadata=Base.metadata,
|
||||
fn=migrate_fun,
|
||||
render_as_batch=True,
|
||||
)
|
||||
with env.begin_transaction():
|
||||
env.run_migrations()
|
||||
logger.info("Finish auto migrate")
|
||||
|
||||
async with engine.connect() as connection:
|
||||
await connection.run_sync(do_run_migration)
|
||||
|
||||
await data_migrate()
|
266
src/plugins/nonebot_bison/config/db_config.py
Normal file
266
src/plugins/nonebot_bison/config/db_config.py
Normal file
@ -0,0 +1,266 @@
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, time
|
||||
from typing import Any, Awaitable, Callable, Optional
|
||||
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
from sqlalchemy.orm import selectinload
|
||||
from sqlalchemy.sql.expression import delete, select
|
||||
from sqlalchemy.sql.functions import func
|
||||
|
||||
from ..types import Category, Tag
|
||||
from ..types import Target as T_Target
|
||||
from ..types import User as T_User
|
||||
from ..types import UserSubInfo
|
||||
from .db_model import ScheduleTimeWeight, Subscribe, Target, User
|
||||
|
||||
|
||||
def _get_time():
|
||||
dt = datetime.now()
|
||||
cur_time = time(hour=dt.hour, minute=dt.minute, second=dt.second)
|
||||
return cur_time
|
||||
|
||||
|
||||
@dataclass
|
||||
class TimeWeightConfig:
|
||||
start_time: time
|
||||
end_time: time
|
||||
weight: int
|
||||
|
||||
|
||||
@dataclass
|
||||
class WeightConfig:
|
||||
|
||||
default: int
|
||||
time_config: list[TimeWeightConfig]
|
||||
|
||||
|
||||
class DBConfig:
|
||||
def __init__(self):
|
||||
self.add_target_hook: Optional[Callable[[str, T_Target], Awaitable]] = None
|
||||
self.delete_target_hook: Optional[Callable[[str, T_Target], Awaitable]] = None
|
||||
|
||||
def register_add_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
|
||||
self.add_target_hook = fun
|
||||
|
||||
def register_delete_target_hook(self, fun: Callable[[str, T_Target], Awaitable]):
|
||||
self.delete_target_hook = fun
|
||||
|
||||
async def add_subscribe(
|
||||
self,
|
||||
user: int,
|
||||
user_type: str,
|
||||
target: T_Target,
|
||||
target_name: str,
|
||||
platform_name: str,
|
||||
cats: list[Category],
|
||||
tags: list[Tag],
|
||||
):
|
||||
async with AsyncSession(get_engine()) as session:
|
||||
db_user_stmt = (
|
||||
select(User).where(User.uid == user).where(User.type == user_type)
|
||||
)
|
||||
db_user: Optional[User] = await session.scalar(db_user_stmt)
|
||||
if not db_user:
|
||||
db_user = User(uid=user, type=user_type)
|
||||
session.add(db_user)
|
||||
db_target_stmt = (
|
||||
select(Target)
|
||||
.where(Target.platform_name == platform_name)
|
||||
.where(Target.target == target)
|
||||
)
|
||||
db_target: Optional[Target] = await session.scalar(db_target_stmt)
|
||||
if not db_target:
|
||||
db_target = Target(
|
||||
target=target, platform_name=platform_name, target_name=target_name
|
||||
)
|
||||
if self.add_target_hook:
|
||||
await self.add_target_hook(platform_name, target)
|
||||
else:
|
||||
db_target.target_name = target_name # type: ignore
|
||||
subscribe = Subscribe(
|
||||
categories=cats,
|
||||
tags=tags,
|
||||
user=db_user,
|
||||
target=db_target,
|
||||
)
|
||||
session.add(subscribe)
|
||||
await session.commit()
|
||||
|
||||
async def list_subscribe(self, user: int, user_type: str) -> list[Subscribe]:
|
||||
async with AsyncSession(get_engine()) as session:
|
||||
query_stmt = (
|
||||
select(Subscribe)
|
||||
.where(User.type == user_type, User.uid == user)
|
||||
.join(User)
|
||||
.options(selectinload(Subscribe.target)) # type:ignore
|
||||
)
|
||||
subs: list[Subscribe] = (await session.scalars(query_stmt)).all()
|
||||
return subs
|
||||
|
||||
async def del_subscribe(
|
||||
self, user: int, user_type: str, target: str, platform_name: str
|
||||
):
|
||||
async with AsyncSession(get_engine()) as session:
|
||||
user_obj = await session.scalar(
|
||||
select(User).where(User.uid == user, User.type == user_type)
|
||||
)
|
||||
target_obj = await session.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
await session.execute(
|
||||
delete(Subscribe).where(
|
||||
Subscribe.user == user_obj, Subscribe.target == target_obj
|
||||
)
|
||||
)
|
||||
target_count = await session.scalar(
|
||||
select(func.count())
|
||||
.select_from(Subscribe)
|
||||
.where(Subscribe.target == target_obj)
|
||||
)
|
||||
if target_count == 0:
|
||||
# delete empty target
|
||||
# await session.delete(target_obj)
|
||||
if self.delete_target_hook:
|
||||
await self.delete_target_hook(platform_name, T_Target(target))
|
||||
await session.commit()
|
||||
|
||||
async def update_subscribe(
|
||||
self,
|
||||
user: int,
|
||||
user_type: str,
|
||||
target: str,
|
||||
target_name: str,
|
||||
platform_name: str,
|
||||
cats: list,
|
||||
tags: list,
|
||||
):
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
subscribe_obj: Subscribe = await sess.scalar(
|
||||
select(Subscribe)
|
||||
.where(
|
||||
User.uid == user,
|
||||
User.type == user_type,
|
||||
Target.target == target,
|
||||
Target.platform_name == platform_name,
|
||||
)
|
||||
.join(User)
|
||||
.join(Target)
|
||||
.options(selectinload(Subscribe.target)) # type:ignore
|
||||
)
|
||||
subscribe_obj.tags = tags # type:ignore
|
||||
subscribe_obj.categories = cats # type:ignore
|
||||
subscribe_obj.target.target_name = target_name
|
||||
await sess.commit()
|
||||
|
||||
async def get_platform_target(self, platform_name: str) -> list[Target]:
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
subq = select(Subscribe.target_id).distinct().subquery()
|
||||
query = (
|
||||
select(Target).join(subq).where(Target.platform_name == platform_name)
|
||||
)
|
||||
return (await sess.scalars(query)).all()
|
||||
|
||||
async def get_time_weight_config(
|
||||
self, target: T_Target, platform_name: str
|
||||
) -> WeightConfig:
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
time_weight_conf: list[ScheduleTimeWeight] = (
|
||||
await sess.scalars(
|
||||
select(ScheduleTimeWeight)
|
||||
.where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
.join(Target)
|
||||
)
|
||||
).all()
|
||||
targetObj: Target = await sess.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
return WeightConfig(
|
||||
default=targetObj.default_schedule_weight,
|
||||
time_config=[
|
||||
TimeWeightConfig(
|
||||
start_time=time_conf.start_time,
|
||||
end_time=time_conf.end_time,
|
||||
weight=time_conf.weight,
|
||||
)
|
||||
for time_conf in time_weight_conf
|
||||
],
|
||||
)
|
||||
|
||||
async def update_time_weight_config(
|
||||
self, target: T_Target, platform_name: str, conf: WeightConfig
|
||||
):
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
targetObj: Target = await sess.scalar(
|
||||
select(Target).where(
|
||||
Target.platform_name == platform_name, Target.target == target
|
||||
)
|
||||
)
|
||||
target_id = targetObj.id
|
||||
targetObj.default_schedule_weight = conf.default
|
||||
delete(ScheduleTimeWeight).where(ScheduleTimeWeight.target_id == target_id)
|
||||
for time_conf in conf.time_config:
|
||||
new_conf = ScheduleTimeWeight(
|
||||
start_time=time_conf.start_time,
|
||||
end_time=time_conf.end_time,
|
||||
weight=time_conf.weight,
|
||||
target=targetObj,
|
||||
)
|
||||
sess.add(new_conf)
|
||||
|
||||
await sess.commit()
|
||||
|
||||
async def get_current_weight_val(self, platform_list: list[str]) -> dict[str, int]:
|
||||
res = {}
|
||||
cur_time = _get_time()
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
targets: list[Target] = (
|
||||
await sess.scalars(
|
||||
select(Target)
|
||||
.where(Target.platform_name.in_(platform_list))
|
||||
.options(selectinload(Target.time_weight))
|
||||
)
|
||||
).all()
|
||||
for target in targets:
|
||||
key = f"{target.platform_name}-{target.target}"
|
||||
weight = target.default_schedule_weight
|
||||
for time_conf in target.time_weight:
|
||||
if (
|
||||
time_conf.start_time <= cur_time
|
||||
and time_conf.end_time > cur_time
|
||||
):
|
||||
weight = time_conf.weight
|
||||
break
|
||||
res[key] = weight
|
||||
return res
|
||||
|
||||
async def get_platform_target_subscribers(
|
||||
self, platform_name: str, target: T_Target
|
||||
) -> list[UserSubInfo]:
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
query = (
|
||||
select(Subscribe)
|
||||
.join(Target)
|
||||
.where(Target.platform_name == platform_name, Target.target == target)
|
||||
.options(selectinload(Subscribe.user))
|
||||
)
|
||||
subsribes: list[Subscribe] = (await sess.scalars(query)).all()
|
||||
return list(
|
||||
map(
|
||||
lambda subscribe: UserSubInfo(
|
||||
T_User(subscribe.user.uid, subscribe.user.type),
|
||||
subscribe.categories,
|
||||
subscribe.tags,
|
||||
),
|
||||
subsribes,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
config = DBConfig()
|
63
src/plugins/nonebot_bison/config/db_model.py
Normal file
63
src/plugins/nonebot_bison/config/db_model.py
Normal file
@ -0,0 +1,63 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql.schema import Column, ForeignKey, UniqueConstraint
|
||||
from sqlalchemy.sql.sqltypes import JSON, DateTime, Integer, String, Time
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
__table_args__ = (UniqueConstraint("type", "uid", name="unique-user-constraint"),)
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
type = Column(String(20), nullable=False)
|
||||
uid = Column(Integer, nullable=False)
|
||||
|
||||
subscribes = relationship("Subscribe", back_populates="user")
|
||||
|
||||
|
||||
class Target(Base):
|
||||
__tablename__ = "target"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("target", "platform_name", name="unique-target-constraint"),
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
platform_name = Column(String(20), nullable=False)
|
||||
target = Column(String(1024), nullable=False)
|
||||
target_name = Column(String(1024), nullable=False)
|
||||
default_schedule_weight = Column(Integer, default=10)
|
||||
|
||||
subscribes = relationship("Subscribe", back_populates="target")
|
||||
time_weight = relationship("ScheduleTimeWeight", back_populates="target")
|
||||
|
||||
|
||||
class ScheduleTimeWeight(Base):
|
||||
__tablename__ = "schedule_time_weight"
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
target_id = Column(Integer, ForeignKey(Target.id))
|
||||
start_time = Column(Time)
|
||||
end_time = Column(Time)
|
||||
weight = Column(Integer)
|
||||
|
||||
target = relationship("Target", back_populates="time_weight")
|
||||
|
||||
|
||||
class Subscribe(Base):
|
||||
__tablename__ = "subscribe"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("target_id", "user_id", name="unique-subscribe-constraint"),
|
||||
)
|
||||
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
target_id = Column(Integer, ForeignKey(Target.id))
|
||||
user_id = Column(Integer, ForeignKey(User.id))
|
||||
categories = Column(JSON)
|
||||
tags = Column(JSON)
|
||||
|
||||
target = relationship("Target", back_populates="subscribes")
|
||||
user = relationship("User", back_populates="subscribes")
|
1
src/plugins/nonebot_bison/config/migrate/README
Normal file
1
src/plugins/nonebot_bison/config/migrate/README
Normal file
@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
113
src/plugins/nonebot_bison/config/migrate/env.py
Normal file
113
src/plugins/nonebot_bison/config/migrate/env.py
Normal file
@ -0,0 +1,113 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
from sqlalchemy.engine.base import Connection
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name:
|
||||
fileConfig(config.config_file_name) # type:ignore
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
|
||||
import nonebot
|
||||
|
||||
try:
|
||||
nonebot.get_driver()
|
||||
__as_plugin = True
|
||||
target_metadata = None
|
||||
except:
|
||||
__as_plugin = False
|
||||
nonebot.init()
|
||||
from nonebot_bison.config.db_model import Base
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migration(connection: Connection):
|
||||
if __as_plugin:
|
||||
context.configure(connection=connection)
|
||||
else:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True,
|
||||
compare_type=True,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_async():
|
||||
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
connectable = get_engine()
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migration)
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
if not __as_plugin:
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
do_run_migration(connection)
|
||||
else:
|
||||
# asyncio.run(run_migrations_async())
|
||||
asyncio.create_task(run_migrations_async())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
src/plugins/nonebot_bison/config/migrate/script.py.mako
Normal file
24
src/plugins/nonebot_bison/config/migrate/script.py.mako
Normal file
@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
@ -0,0 +1,60 @@
|
||||
"""init db
|
||||
|
||||
Revision ID: 0571870f5222
|
||||
Revises:
|
||||
Create Date: 2022-03-21 19:18:13.762626
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "0571870f5222"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"target",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("platform_name", sa.String(length=20), nullable=False),
|
||||
sa.Column("target", sa.String(length=1024), nullable=False),
|
||||
sa.Column("target_name", sa.String(length=1024), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("type", sa.String(length=20), nullable=False),
|
||||
sa.Column("uid", sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"subscribe",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("target_id", sa.Integer(), nullable=True),
|
||||
sa.Column("user_id", sa.Integer(), nullable=True),
|
||||
sa.Column("categories", sa.String(length=1024), nullable=True),
|
||||
sa.Column("tags", sa.String(length=1024), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["target_id"],
|
||||
["target.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["user_id"],
|
||||
["user.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table("subscribe")
|
||||
op.drop_table("user")
|
||||
op.drop_table("target")
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,53 @@
|
||||
"""alter type
|
||||
|
||||
Revision ID: 4a46ba54a3f3
|
||||
Revises: c97c445e2bdb
|
||||
Create Date: 2022-03-27 21:50:10.911649
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4a46ba54a3f3"
|
||||
down_revision = "c97c445e2bdb"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
"categories",
|
||||
existing_type=sa.VARCHAR(length=1024),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"tags",
|
||||
existing_type=sa.VARCHAR(length=1024),
|
||||
type_=sa.JSON(),
|
||||
existing_nullable=True,
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.alter_column(
|
||||
"tags",
|
||||
existing_type=sa.JSON(),
|
||||
type_=sa.VARCHAR(length=1024),
|
||||
existing_nullable=True,
|
||||
)
|
||||
batch_op.alter_column(
|
||||
"categories",
|
||||
existing_type=sa.JSON(),
|
||||
type_=sa.VARCHAR(length=1024),
|
||||
existing_nullable=True,
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,51 @@
|
||||
"""add time-weight table
|
||||
|
||||
Revision ID: 5f3370328e44
|
||||
Revises: a333d6224193
|
||||
Create Date: 2022-05-31 22:05:13.235981
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5f3370328e44"
|
||||
down_revision = "a333d6224193"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"schedule_time_weight",
|
||||
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column("target_id", sa.Integer(), nullable=True),
|
||||
sa.Column("start_time", sa.Time(), nullable=True),
|
||||
sa.Column("end_time", sa.Time(), nullable=True),
|
||||
sa.Column("weight", sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(
|
||||
["target_id"],
|
||||
["target.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("default_schedule_weight", sa.Integer(), nullable=True)
|
||||
)
|
||||
batch_op.drop_column("last_schedule_time")
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("last_schedule_time", sa.DATETIME(), nullable=True)
|
||||
)
|
||||
batch_op.drop_column("default_schedule_weight")
|
||||
|
||||
op.drop_table("schedule_time_weight")
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,33 @@
|
||||
"""add last scheduled time
|
||||
|
||||
Revision ID: a333d6224193
|
||||
Revises: 4a46ba54a3f3
|
||||
Create Date: 2022-03-29 21:01:38.213153
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a333d6224193"
|
||||
down_revision = "4a46ba54a3f3"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column("last_schedule_time", sa.DateTime(timezone=True), nullable=True)
|
||||
)
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.drop_column("last_schedule_time")
|
||||
|
||||
# ### end Alembic commands ###
|
@ -0,0 +1,47 @@
|
||||
"""add constraint
|
||||
|
||||
Revision ID: c97c445e2bdb
|
||||
Revises: 0571870f5222
|
||||
Create Date: 2022-03-26 19:46:50.910721
|
||||
|
||||
"""
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "c97c445e2bdb"
|
||||
down_revision = "0571870f5222"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
"unique-subscribe-constraint", ["target_id", "user_id"]
|
||||
)
|
||||
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint(
|
||||
"unique-target-constraint", ["target", "platform_name"]
|
||||
)
|
||||
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.create_unique_constraint("unique-user-constraint", ["type", "uid"])
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table("user", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-user-constraint", type_="unique")
|
||||
|
||||
with op.batch_alter_table("target", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-target-constraint", type_="unique")
|
||||
|
||||
with op.batch_alter_table("subscribe", schema=None) as batch_op:
|
||||
batch_op.drop_constraint("unique-subscribe-constraint", type_="unique")
|
||||
|
||||
# ### end Alembic commands ###
|
6
src/plugins/nonebot_bison/config/utils.py
Normal file
6
src/plugins/nonebot_bison/config/utils.py
Normal file
@ -0,0 +1,6 @@
|
||||
class NoSuchUserException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NoSuchSubscribeException(Exception):
|
||||
pass
|
@ -1,6 +1,6 @@
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Optional, Type
|
||||
from typing import Optional, Type, cast
|
||||
|
||||
from nonebot import on_command
|
||||
from nonebot.adapters.onebot.v11 import Bot, Event, MessageEvent
|
||||
@ -9,14 +9,13 @@ from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER
|
||||
from nonebot.internal.params import ArgStr
|
||||
from nonebot.internal.rule import Rule
|
||||
from nonebot.log import logger
|
||||
from nonebot.matcher import Matcher
|
||||
from nonebot.params import Depends, EventPlainText, EventToMe
|
||||
from nonebot.permission import SUPERUSER
|
||||
from nonebot.rule import to_me
|
||||
from nonebot.typing import T_State
|
||||
|
||||
from .config import Config
|
||||
from .config import config
|
||||
from .platform import Platform, check_sub_target, platform_manager
|
||||
from .plugin_config import plugin_config
|
||||
from .types import Category, Target, User
|
||||
@ -197,17 +196,16 @@ def do_add_sub(add_sub: Type[Matcher]):
|
||||
|
||||
@add_sub.got("tags", _gen_prompt_template("{_prompt}"), [Depends(parser_tags)])
|
||||
async def add_sub_process(event: Event, state: T_State):
|
||||
config = Config()
|
||||
user = state.get("target_user_info")
|
||||
user = cast(User, state.get("target_user_info"))
|
||||
assert isinstance(user, User)
|
||||
config.add_subscribe(
|
||||
await config.add_subscribe(
|
||||
# state.get("_user_id") or event.group_id,
|
||||
# user_type="group",
|
||||
user=user.user,
|
||||
user_type=user.user_type,
|
||||
target=state["id"],
|
||||
target_name=state["name"],
|
||||
target_type=state["platform"],
|
||||
platform_name=state["platform"],
|
||||
cats=state.get("cats", []),
|
||||
tags=state.get("tags", []),
|
||||
)
|
||||
@ -219,10 +217,9 @@ def do_query_sub(query_sub: Type[Matcher]):
|
||||
|
||||
@query_sub.handle()
|
||||
async def _(state: T_State):
|
||||
config: Config = Config()
|
||||
user_info = state["target_user_info"]
|
||||
assert isinstance(user_info, User)
|
||||
sub_list = config.list_subscribe(
|
||||
sub_list = await config.list_subscribe(
|
||||
# state.get("_user_id") or event.group_id, "group"
|
||||
user_info.user,
|
||||
user_info.user_type,
|
||||
@ -230,17 +227,20 @@ def do_query_sub(query_sub: Type[Matcher]):
|
||||
res = "订阅的帐号为:\n"
|
||||
for sub in sub_list:
|
||||
res += "{} {} {}".format(
|
||||
sub["target_type"], sub["target_name"], sub["target"]
|
||||
# sub["target_type"], sub["target_name"], sub["target"]
|
||||
sub.target.platform_name,
|
||||
sub.target.target_name,
|
||||
sub.target.target,
|
||||
)
|
||||
platform = platform_manager[sub["target_type"]]
|
||||
platform = platform_manager[sub.target.platform_name]
|
||||
if platform.categories:
|
||||
res += " [{}]".format(
|
||||
", ".join(
|
||||
map(lambda x: platform.categories[Category(x)], sub["cats"])
|
||||
map(lambda x: platform.categories[Category(x)], sub.categories)
|
||||
)
|
||||
)
|
||||
if platform.enable_tag:
|
||||
res += " {}".format(", ".join(sub["tags"]))
|
||||
res += " {}".format(", ".join(sub.tags))
|
||||
res += "\n"
|
||||
await query_sub.finish(Message(await parse_text(res)))
|
||||
|
||||
@ -250,11 +250,10 @@ def do_del_sub(del_sub: Type[Matcher]):
|
||||
|
||||
@del_sub.handle()
|
||||
async def send_list(bot: Bot, event: Event, state: T_State):
|
||||
config: Config = Config()
|
||||
user_info = state["target_user_info"]
|
||||
assert isinstance(user_info, User)
|
||||
try:
|
||||
sub_list = config.list_subscribe(
|
||||
sub_list = await config.list_subscribe(
|
||||
# state.get("_user_id") or event.group_id, "group"
|
||||
user_info.user,
|
||||
user_info.user_type,
|
||||
@ -267,21 +266,27 @@ def do_del_sub(del_sub: Type[Matcher]):
|
||||
state["sub_table"] = {}
|
||||
for index, sub in enumerate(sub_list, 1):
|
||||
state["sub_table"][index] = {
|
||||
"target_type": sub["target_type"],
|
||||
"target": sub["target"],
|
||||
"platform_name": sub.target.platform_name,
|
||||
"target": sub.target.target,
|
||||
}
|
||||
res += "{} {} {} {}\n".format(
|
||||
index, sub["target_type"], sub["target_name"], sub["target"]
|
||||
index,
|
||||
sub.target.platform_name,
|
||||
sub.target.target_name,
|
||||
sub.target.target,
|
||||
)
|
||||
platform = platform_manager[sub["target_type"]]
|
||||
platform = platform_manager[sub.target.platform_name]
|
||||
if platform.categories:
|
||||
res += " [{}]".format(
|
||||
", ".join(
|
||||
map(lambda x: platform.categories[Category(x)], sub["cats"])
|
||||
map(
|
||||
lambda x: platform.categories[Category(x)],
|
||||
sub.categories,
|
||||
)
|
||||
)
|
||||
)
|
||||
if platform.enable_tag:
|
||||
res += " {}".format(", ".join(sub["tags"]))
|
||||
res += " {}".format(", ".join(sub.tags))
|
||||
res += "\n"
|
||||
res += "请输入要删除的订阅的序号\n输入'取消'中止"
|
||||
await bot.send(event=event, message=Message(await parse_text(res)))
|
||||
@ -293,10 +298,9 @@ def do_del_sub(del_sub: Type[Matcher]):
|
||||
await del_sub.finish("删除中止")
|
||||
try:
|
||||
index = int(user_msg)
|
||||
config = Config()
|
||||
user_info = state["target_user_info"]
|
||||
assert isinstance(user_info, User)
|
||||
config.del_subscribe(
|
||||
await config.del_subscribe(
|
||||
# state.get("_user_id") or event.group_id,
|
||||
# "group",
|
||||
user_info.user,
|
||||
|
@ -7,9 +7,16 @@ from nonebot.plugin import require
|
||||
from ..post import Post
|
||||
from ..types import Category, RawPost, Target
|
||||
from ..utils import http_client
|
||||
from ..utils.scheduler_config import SchedulerConfig
|
||||
from .platform import CategoryNotSupport, NewMessage, StatusChange
|
||||
|
||||
|
||||
class ArknightsSchedConf(SchedulerConfig, name="arknights"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 30}
|
||||
|
||||
|
||||
class Arknights(NewMessage):
|
||||
|
||||
categories = {1: "游戏公告"}
|
||||
@ -18,8 +25,7 @@ class Arknights(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "arknights"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
@ -91,8 +97,7 @@ class AkVersion(StatusChange):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "arknights"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
@ -147,8 +152,7 @@ class MonsterSiren(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "arknights"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
@ -199,8 +203,7 @@ class TerraHistoricusComic(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "arknights"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
|
@ -4,10 +4,16 @@ from typing import Any, Optional
|
||||
|
||||
from ..post import Post
|
||||
from ..types import Category, RawPost, Tag, Target
|
||||
from ..utils import http_client
|
||||
from ..utils import SchedulerConfig, http_client
|
||||
from .platform import CategoryNotSupport, NewMessage, StatusChange
|
||||
|
||||
|
||||
class BilibiliSchedConf(SchedulerConfig, name="bilibili.com"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 10}
|
||||
|
||||
|
||||
class Bilibili(NewMessage):
|
||||
|
||||
categories = {
|
||||
@ -22,8 +28,7 @@ class Bilibili(NewMessage):
|
||||
enable_tag = True
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 10}
|
||||
scheduler_class = "bilibili.com"
|
||||
name = "B站"
|
||||
has_target = True
|
||||
parse_target_promot = "请输入用户主页的链接"
|
||||
@ -167,8 +172,7 @@ class Bilibililive(StatusChange):
|
||||
enable_tag = True
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 10}
|
||||
scheduler_class = "bilibili.com"
|
||||
name = "Bilibili直播"
|
||||
has_target = True
|
||||
|
||||
|
@ -2,10 +2,16 @@ from typing import Any
|
||||
|
||||
from ..post import Post
|
||||
from ..types import RawPost, Target
|
||||
from ..utils import http_client
|
||||
from ..utils import SchedulerConfig, http_client
|
||||
from .platform import NewMessage
|
||||
|
||||
|
||||
class FF14SchedConf(SchedulerConfig, name="ff14"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 60}
|
||||
|
||||
|
||||
class FF14(NewMessage):
|
||||
|
||||
categories = {}
|
||||
@ -14,8 +20,7 @@ class FF14(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 60}
|
||||
scheduler_class = "ff14"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
|
@ -7,9 +7,16 @@ from bs4 import BeautifulSoup, NavigableString, Tag
|
||||
|
||||
from ..post import Post
|
||||
from ..types import Category, RawPost, Target
|
||||
from ..utils import SchedulerConfig
|
||||
from .platform import CategoryNotSupport, NewMessage
|
||||
|
||||
|
||||
class McbbsSchedConf(SchedulerConfig, name="mcbbs"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"hours": 1}
|
||||
|
||||
|
||||
def _format_text(rawtext: str, mode: int) -> str:
|
||||
"""处理BeautifulSoup生成的string中奇怪的回车+连续空格
|
||||
mode 0:处理标题
|
||||
@ -38,12 +45,11 @@ class McbbsNews(NewMessage):
|
||||
name = "MCBBS幻翼块讯"
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"hours": 1}
|
||||
scheduler_class = "mcbbs"
|
||||
has_target = False
|
||||
|
||||
async def get_target_name(self, _: Target) -> str:
|
||||
return f"{self.name} {self.categories[1]}"
|
||||
return self.name
|
||||
|
||||
async def get_sub_list(self, _: Target) -> list[RawPost]:
|
||||
url = "https://www.mcbbs.net/forum-news-1.html"
|
||||
@ -62,12 +68,37 @@ class McbbsNews(NewMessage):
|
||||
|
||||
return post_list
|
||||
|
||||
@staticmethod
|
||||
def _format_text(rawtext: str, mode: int) -> str:
|
||||
"""处理BeautifulSoup生成的string中奇怪的回车+连续空格
|
||||
mode 0:处理标题
|
||||
mode 1:处理版本资讯类推文
|
||||
mode 2:处理快讯类推文"""
|
||||
if mode == 0:
|
||||
ftext = re.sub(r"\n\s*", " ", rawtext)
|
||||
elif mode == 1:
|
||||
ftext = re.sub(r"[\n\s*]", "", rawtext)
|
||||
elif mode == 2:
|
||||
ftext = re.sub(r"\r\n", "", rawtext)
|
||||
else:
|
||||
raise NotImplementedError
|
||||
return ftext
|
||||
|
||||
@staticmethod
|
||||
def _stamp_date(rawdate: str) -> int:
|
||||
"""将时间转化为时间戳yyyy-mm-dd->timestamp"""
|
||||
time_stamp = int(time.mktime(time.strptime(rawdate, "%Y-%m-%d")))
|
||||
return time_stamp
|
||||
|
||||
def _gen_post_list(self, raw_post_list) -> list[RawPost]:
|
||||
"""解析生成推文列表"""
|
||||
post_list = []
|
||||
for raw_post in raw_post_list:
|
||||
post = {}
|
||||
post["url"] = raw_post.find("a", class_="s xst")["href"]
|
||||
post["title"] = _format_text(raw_post.find("a", class_="s xst").string, 0)
|
||||
post["title"] = self._format_text(
|
||||
raw_post.find("a", class_="s xst").string, 0
|
||||
)
|
||||
post["category"] = raw_post.select("th em a")[0].string
|
||||
post["author"] = raw_post.select("td:nth-of-type(2) cite a")[0].string
|
||||
post["id"] = raw_post["id"]
|
||||
@ -76,7 +107,7 @@ class McbbsNews(NewMessage):
|
||||
if raw_post.select("td:nth-of-type(2) em span span")
|
||||
else raw_post.select("td:nth-of-type(2) em span")[0].string
|
||||
)
|
||||
post["date"] = _stamp_date(rawdate)
|
||||
post["date"] = self._stamp_date(rawdate)
|
||||
post_list.append(post)
|
||||
return post_list
|
||||
|
||||
@ -84,19 +115,20 @@ class McbbsNews(NewMessage):
|
||||
return post["id"]
|
||||
|
||||
def get_date(self, post: RawPost) -> int:
|
||||
# 获取datetime精度只到日期,故暂时舍弃
|
||||
# return post["date"]
|
||||
return None
|
||||
|
||||
def get_category(self, post: RawPost) -> Category:
|
||||
match post["category"]:
|
||||
case "Java版本资讯":
|
||||
return Category(1)
|
||||
case "基岩版本资讯":
|
||||
return Category(2)
|
||||
case _:
|
||||
raise CategoryNotSupport("McbbsNews订阅暂不支持 `{}".format(post["category"]))
|
||||
if post["category"] == "Java版本资讯":
|
||||
return Category(1)
|
||||
elif post["category"] == "基岩版本资讯":
|
||||
return Category(2)
|
||||
else:
|
||||
raise CategoryNotSupport("McbbsNews订阅暂不支持 `{}".format(post["category"]))
|
||||
|
||||
def _check_str_chinese(self, check_str: str) -> bool:
|
||||
@staticmethod
|
||||
def _check_str_chinese(check_str: str) -> bool:
|
||||
"""检测字符串是否含有中文(有一个就算)"""
|
||||
for ch in check_str:
|
||||
if "\u4e00" <= ch <= "\u9fff":
|
||||
@ -107,40 +139,40 @@ class McbbsNews(NewMessage):
|
||||
"""提取Java/Bedrock版本资讯的推送消息"""
|
||||
raw_soup = BeautifulSoup(raw_text.replace("<br />", ""), "html.parser")
|
||||
# 获取头图
|
||||
match news_type:
|
||||
case "Java版本资讯":
|
||||
# 获取头图
|
||||
pic_tag = raw_soup.find(
|
||||
"img", file=re.compile(r"https://www.minecraft.net/\S*header.jpg")
|
||||
)
|
||||
pic_url: list[str] = (
|
||||
[pic_tag.get("src", pic_tag.get("file"))] if pic_tag else []
|
||||
)
|
||||
# 获取blockquote标签下的内容
|
||||
soup = raw_soup.find(
|
||||
"td", id=re.compile(r"postmessage_[0-9]*")
|
||||
).blockquote.blockquote
|
||||
case "基岩版本资讯":
|
||||
# 获取头图
|
||||
pic_tag_0 = raw_soup.find(
|
||||
"img", file=re.compile(r"https://www.minecraft.net/\S*header.jpg")
|
||||
)
|
||||
pic_tag_1 = raw_soup.find(
|
||||
"img",
|
||||
file=re.compile(r"https://feedback.minecraft.net/\S*beta\S*.jpg"),
|
||||
)
|
||||
pic_url: list[str] = [
|
||||
pic_tag_0.get("src", pic_tag_0.get("file")) if pic_tag_0 else None,
|
||||
pic_tag_1.get("src", pic_tag_1.get("file")) if pic_tag_1 else None,
|
||||
]
|
||||
# 获取blockquote标签下的内容
|
||||
soup = (
|
||||
raw_soup.find("td", id=re.compile(r"postmessage_[0-9]*"))
|
||||
.select("blockquote:nth-of-type(2)")[0]
|
||||
.blockquote
|
||||
)
|
||||
case _:
|
||||
raise CategoryNotSupport(f"该函数不支持处理{news_type}")
|
||||
if news_type == "Java版本资讯":
|
||||
# 获取头图
|
||||
pic_tag = raw_soup.find(
|
||||
"img", file=re.compile(r"https://www.minecraft.net/\S*header.jpg")
|
||||
)
|
||||
pic_url: list[str] = (
|
||||
[pic_tag.get("src", pic_tag.get("file"))] if pic_tag else []
|
||||
)
|
||||
# 获取blockquote标签下的内容
|
||||
soup = raw_soup.find(
|
||||
"td", id=re.compile(r"postmessage_[0-9]*")
|
||||
).blockquote.blockquote
|
||||
elif news_type == "基岩版本资讯":
|
||||
# 获取头图
|
||||
pic_tag_0 = raw_soup.find(
|
||||
"img", file=re.compile(r"https://www.minecraft.net/\S*header.jpg")
|
||||
)
|
||||
pic_tag_1 = raw_soup.find(
|
||||
"img",
|
||||
file=re.compile(r"https://feedback.minecraft.net/\S*beta\S*.jpg"),
|
||||
)
|
||||
pic_url: list[str] = [
|
||||
pic_tag_0.get("src", pic_tag_0.get("file")) if pic_tag_0 else None,
|
||||
pic_tag_1.get("src", pic_tag_1.get("file")) if pic_tag_1 else None,
|
||||
]
|
||||
# 获取blockquote标签下的内容
|
||||
soup = (
|
||||
raw_soup.find("td", id=re.compile(r"postmessage_[0-9]*"))
|
||||
.select("blockquote:nth-of-type(2)")[0]
|
||||
.blockquote
|
||||
)
|
||||
else:
|
||||
raise CategoryNotSupport(f"该函数不支持处理{news_type}")
|
||||
|
||||
# 通用步骤
|
||||
# 删除无用的div和span段内容
|
||||
for del_tag in soup.find_all(["div", "span"]):
|
||||
@ -150,45 +182,43 @@ class McbbsNews(NewMessage):
|
||||
# orig_info[0].extract()
|
||||
# 展开所有的a,u和strong标签,展开ul,font标签里的font标签
|
||||
for unwrap_tag in soup.find_all(["a", "strong", "u", "ul", "font"]):
|
||||
match unwrap_tag.name:
|
||||
case "a" | "strong" | "u": # 展开所有的a,u和strong标签
|
||||
unwrap_tag.unwrap()
|
||||
case "ul" | "font": # 展开ul,font里的font标签
|
||||
for font_tag in unwrap_tag.find_all("font"):
|
||||
font_tag.unwrap()
|
||||
if unwrap_tag.name in ["a", "strong", "u"]: # 展开所有的a,u和strong标签
|
||||
unwrap_tag.unwrap()
|
||||
elif unwrap_tag.name in ["ul", "font"]: # 展开ul,font里的font标签
|
||||
for font_tag in unwrap_tag.find_all("font"):
|
||||
font_tag.unwrap()
|
||||
|
||||
# 获取所有的中文句子
|
||||
post_text = ""
|
||||
last_is_empty_line = True
|
||||
for element in soup.contents:
|
||||
if isinstance(element, Tag):
|
||||
match element.name:
|
||||
case "font":
|
||||
if element.name == "font":
|
||||
text = ""
|
||||
for sub in element.contents:
|
||||
if isinstance(sub, NavigableString):
|
||||
text += sub
|
||||
if self._check_str_chinese(text):
|
||||
post_text += "{}\n".format(self._format_text(text, 1))
|
||||
last_is_empty_line = False
|
||||
elif element.name == "ul":
|
||||
for li_tag in element.find_all("li"):
|
||||
text = ""
|
||||
for sub in element.contents:
|
||||
for sub in li_tag.contents:
|
||||
if isinstance(sub, NavigableString):
|
||||
text += sub
|
||||
if self._check_str_chinese(text):
|
||||
post_text += "{}\n".format(_format_text(text, 1))
|
||||
post_text += "{}\n".format(self._format_text(text, 1))
|
||||
last_is_empty_line = False
|
||||
case "ul":
|
||||
for li_tag in element.find_all("li"):
|
||||
text = ""
|
||||
for sub in li_tag.contents:
|
||||
if isinstance(sub, NavigableString):
|
||||
text += sub
|
||||
if self._check_str_chinese(text):
|
||||
post_text += "{}\n".format(_format_text(text, 1))
|
||||
last_is_empty_line = False
|
||||
case _:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
elif isinstance(element, NavigableString):
|
||||
if str(element) == "\n":
|
||||
if not last_is_empty_line:
|
||||
post_text += "\n"
|
||||
last_is_empty_line = True
|
||||
else:
|
||||
post_text += "{}\n".format(_format_text(element, 1))
|
||||
post_text += "{}\n".format(self._format_text(element, 1))
|
||||
last_is_empty_line = False
|
||||
else:
|
||||
continue
|
||||
@ -211,7 +241,8 @@ class McbbsNews(NewMessage):
|
||||
# 删除无用的span,div段内容
|
||||
for del_tag in soup.find_all("i"):
|
||||
del_tag.extract()
|
||||
soup.find(class_="attach_nopermission attach_tips").extract()
|
||||
if extag := soup.find(class_="attach_nopermission attach_tips"):
|
||||
extag.extract()
|
||||
# 展开所有的a,strong标签
|
||||
for unwrap_tag in soup.find_all(["a", "strong"]):
|
||||
unwrap_tag.unwrap()
|
||||
@ -229,10 +260,11 @@ class McbbsNews(NewMessage):
|
||||
else:
|
||||
for string in soup.stripped_strings:
|
||||
text += "{}\n".format(string)
|
||||
ftext = _format_text(text, 2)
|
||||
ftext = self._format_text(text, 2)
|
||||
return ftext, pic_urls
|
||||
|
||||
async def parse(self, raw_post: RawPost) -> Post:
|
||||
"""获取并分配正式推文交由相应的函数解析"""
|
||||
post_url = "https://www.mcbbs.net/{}".format(raw_post["url"])
|
||||
headers = {
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||
@ -242,20 +274,14 @@ class McbbsNews(NewMessage):
|
||||
async with httpx.AsyncClient() as client:
|
||||
html = await client.get(post_url, headers=headers)
|
||||
|
||||
match raw_post["category"]:
|
||||
case "Java版本资讯":
|
||||
# 事先删除不需要的尾部
|
||||
raw_text = re.sub(r"【本文排版借助了:[\s\S]*】", "", html.text)
|
||||
text, pic_urls = self._news_parser(raw_text, raw_post["category"])
|
||||
case "基岩版本资讯":
|
||||
raw_text = re.sub(r"【本文排版借助了:[\s\S]*】", "", html.text)
|
||||
text, pic_urls = self._news_parser(raw_text, raw_post["category"])
|
||||
case "快讯" | "基岩快讯" | "周边消息":
|
||||
text, pic_urls = self._express_parser(html.text, raw_post["category"])
|
||||
case _:
|
||||
raise CategoryNotSupport(
|
||||
"McbbsNews订阅暂不支持 `{}".format(raw_post["category"])
|
||||
)
|
||||
if raw_post["category"] in ["Java版本资讯", "基岩版本资讯"]:
|
||||
# 事先删除不需要的尾部
|
||||
raw_text = re.sub(r"【本文排版借助了:[\s\S]*】", "", html.text)
|
||||
text, pic_urls = self._news_parser(raw_text, raw_post["category"])
|
||||
elif raw_post["category"] in ["快讯", "基岩快讯", "周边消息"]:
|
||||
text, pic_urls = self._express_parser(html.text, raw_post["category"])
|
||||
else:
|
||||
raise CategoryNotSupport("McbbsNews订阅暂不支持 `{}".format(raw_post["category"]))
|
||||
|
||||
return Post(
|
||||
self.name,
|
||||
|
@ -3,10 +3,16 @@ from typing import Any, Optional
|
||||
|
||||
from ..post import Post
|
||||
from ..types import RawPost, Target
|
||||
from ..utils import http_client
|
||||
from ..utils import SchedulerConfig, http_client
|
||||
from .platform import NewMessage
|
||||
|
||||
|
||||
class NcmSchedConf(SchedulerConfig, name="music.163.com"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"minutes": 1}
|
||||
|
||||
|
||||
class NcmArtist(NewMessage):
|
||||
|
||||
categories = {}
|
||||
@ -14,8 +20,7 @@ class NcmArtist(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"minutes": 1}
|
||||
scheduler_class = "music.163.com"
|
||||
name = "网易云-歌手"
|
||||
has_target = True
|
||||
parse_target_promot = "请输入歌手主页(包含数字ID)的链接"
|
||||
|
@ -14,8 +14,7 @@ class NcmRadio(NewMessage):
|
||||
enable_tag = False
|
||||
enabled = True
|
||||
is_common = False
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"minutes": 10}
|
||||
scheduler_class = "music.163.com"
|
||||
name = "网易云-电台"
|
||||
has_target = True
|
||||
parse_target_promot = "请输入主播电台主页(包含数字ID)的链接"
|
||||
|
@ -39,8 +39,7 @@ class RegistryABCMeta(RegistryMeta, ABC):
|
||||
|
||||
class Platform(metaclass=RegistryABCMeta, base=True):
|
||||
|
||||
schedule_type: Literal["date", "interval", "cron"]
|
||||
schedule_kw: dict
|
||||
scheduler_class: str
|
||||
is_common: bool
|
||||
enabled: bool
|
||||
name: str
|
||||
@ -136,9 +135,7 @@ class Platform(metaclass=RegistryABCMeta, base=True):
|
||||
self, target: Target, new_posts: list[RawPost], users: list[UserSubInfo]
|
||||
) -> list[tuple[User, list[Post]]]:
|
||||
res: list[tuple[User, list[Post]]] = []
|
||||
for user, category_getter, tag_getter in users:
|
||||
required_tags = tag_getter(target) if self.enable_tag else []
|
||||
cats = category_getter(target)
|
||||
for user, cats, required_tags in users:
|
||||
user_raw_post = await self.filter_user_custom(
|
||||
new_posts, cats, required_tags
|
||||
)
|
||||
@ -332,11 +329,11 @@ class NoTargetGroup(Platform, abstract=True):
|
||||
|
||||
def __init__(self, platform_list: list[Platform]):
|
||||
self.platform_list = platform_list
|
||||
self.platform_name = platform_list[0].platform_name
|
||||
name = self.DUMMY_STR
|
||||
self.categories = {}
|
||||
categories_keys = set()
|
||||
self.schedule_type = platform_list[0].schedule_type
|
||||
self.schedule_kw = platform_list[0].schedule_kw
|
||||
self.scheduler_class = platform_list[0].scheduler_class
|
||||
for platform in platform_list:
|
||||
if platform.has_target:
|
||||
raise RuntimeError(
|
||||
@ -355,10 +352,7 @@ class NoTargetGroup(Platform, abstract=True):
|
||||
)
|
||||
categories_keys |= platform_category_key_set
|
||||
self.categories.update(platform.categories)
|
||||
if (
|
||||
platform.schedule_kw != self.schedule_kw
|
||||
or platform.schedule_type != self.schedule_type
|
||||
):
|
||||
if platform.scheduler_class != self.scheduler_class:
|
||||
raise RuntimeError(
|
||||
"Platform scheduler for {} not fit".format(self.platform_name)
|
||||
)
|
||||
|
@ -6,10 +6,16 @@ from bs4 import BeautifulSoup as bs
|
||||
|
||||
from ..post import Post
|
||||
from ..types import RawPost, Target
|
||||
from ..utils import http_client
|
||||
from ..utils import SchedulerConfig, http_client
|
||||
from .platform import NewMessage
|
||||
|
||||
|
||||
class RssSchedConf(SchedulerConfig, name="rss"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 30}
|
||||
|
||||
|
||||
class Rss(NewMessage):
|
||||
|
||||
categories = {}
|
||||
@ -18,8 +24,7 @@ class Rss(NewMessage):
|
||||
name = "Rss"
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "rss"
|
||||
has_target = True
|
||||
|
||||
async def get_target_name(self, target: Target) -> Optional[str]:
|
||||
|
@ -8,10 +8,15 @@ from nonebot.log import logger
|
||||
|
||||
from ..post import Post
|
||||
from ..types import *
|
||||
from ..utils import http_client
|
||||
from ..utils import SchedulerConfig, http_client
|
||||
from .platform import NewMessage
|
||||
|
||||
|
||||
class WeiboSchedConf(SchedulerConfig, name="weibo.com"):
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 3}
|
||||
|
||||
|
||||
class Weibo(NewMessage):
|
||||
|
||||
categories = {
|
||||
@ -25,8 +30,7 @@ class Weibo(NewMessage):
|
||||
name = "新浪微博"
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 3}
|
||||
scheduler_class = "weibo.com"
|
||||
has_target = True
|
||||
parse_target_promot = "请输入用户主页(包含数字UID)的链接"
|
||||
|
||||
|
1
src/plugins/nonebot_bison/scheduler/__init__.py
Normal file
1
src/plugins/nonebot_bison/scheduler/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
from .manager import *
|
31
src/plugins/nonebot_bison/scheduler/aps.py
Normal file
31
src/plugins/nonebot_bison/scheduler/aps.py
Normal file
@ -0,0 +1,31 @@
|
||||
import logging
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from nonebot.log import LoguruHandler
|
||||
|
||||
from ..plugin_config import plugin_config
|
||||
from ..send import do_send_msgs
|
||||
|
||||
aps = AsyncIOScheduler(timezone="Asia/Shanghai")
|
||||
|
||||
|
||||
class CustomLogHandler(LoguruHandler):
|
||||
def filter(self, record: logging.LogRecord):
|
||||
return record.msg != (
|
||||
'Execution of job "%s" '
|
||||
"skipped: maximum number of running instances reached (%d)"
|
||||
)
|
||||
|
||||
|
||||
if plugin_config.bison_use_queue:
|
||||
aps.add_job(do_send_msgs, "interval", seconds=0.3, coalesce=True)
|
||||
|
||||
aps_logger = logging.getLogger("apscheduler")
|
||||
aps_logger.setLevel(30)
|
||||
aps_logger.handlers.clear()
|
||||
aps_logger.addHandler(CustomLogHandler())
|
||||
|
||||
|
||||
def start_scheduler():
|
||||
aps.configure({"apscheduler.timezone": "Asia/Shanghai"})
|
||||
aps.start()
|
43
src/plugins/nonebot_bison/scheduler/manager.py
Normal file
43
src/plugins/nonebot_bison/scheduler/manager.py
Normal file
@ -0,0 +1,43 @@
|
||||
from nonebot.log import logger
|
||||
|
||||
from ..config import config
|
||||
from ..config.db_model import Target
|
||||
from ..platform import platform_manager
|
||||
from ..types import Target as T_Target
|
||||
from ..utils import SchedulerConfig
|
||||
from .scheduler import Scheduler
|
||||
|
||||
scheduler_dict: dict[str, Scheduler] = {}
|
||||
_schedule_class_dict: dict[str, list[Target]] = {}
|
||||
|
||||
|
||||
async def init_scheduler():
|
||||
for platform in platform_manager.values():
|
||||
scheduler_class = platform.scheduler_class
|
||||
platform_name = platform.platform_name
|
||||
targets = await config.get_platform_target(platform_name)
|
||||
if scheduler_class not in _schedule_class_dict:
|
||||
_schedule_class_dict[scheduler_class] = targets
|
||||
else:
|
||||
_schedule_class_dict[scheduler_class].extend(targets)
|
||||
for scheduler_class, target_list in _schedule_class_dict.items():
|
||||
schedulable_args = []
|
||||
for target in target_list:
|
||||
schedulable_args.append((target.platform_name, T_Target(target.target)))
|
||||
scheduler_dict[scheduler_class] = Scheduler(scheduler_class, schedulable_args)
|
||||
|
||||
|
||||
async def handle_insert_new_target(platform_name: str, target: T_Target):
|
||||
platform = platform_manager[platform_name]
|
||||
scheduler_obj = scheduler_dict[platform.scheduler_class]
|
||||
scheduler_obj.insert_new_schedulable(platform_name, target)
|
||||
|
||||
|
||||
async def handle_delete_target(platform_name: str, target: T_Target):
|
||||
platform = platform_manager[platform_name]
|
||||
scheduler_obj = scheduler_dict[platform.scheduler_class]
|
||||
scheduler_obj.delete_schedulable(platform_name, target)
|
||||
|
||||
|
||||
config.register_add_target_hook(handle_delete_target)
|
||||
config.register_delete_target_hook(handle_delete_target)
|
@ -6,7 +6,7 @@ from nonebot import get_driver
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
from nonebot.log import LoguruHandler, logger
|
||||
|
||||
from .config import Config
|
||||
from .config import config
|
||||
from .platform import platform_manager
|
||||
from .plugin_config import plugin_config
|
||||
from .send import do_send_msgs, send_msgs
|
||||
@ -37,7 +37,6 @@ async def _start():
|
||||
|
||||
|
||||
async def fetch_and_send(target_type: str):
|
||||
config = Config()
|
||||
target = config.get_next_target(target_type)
|
||||
if not target:
|
||||
return
|
128
src/plugins/nonebot_bison/scheduler/scheduler.py
Normal file
128
src/plugins/nonebot_bison/scheduler/scheduler.py
Normal file
@ -0,0 +1,128 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Optional
|
||||
|
||||
import nonebot
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
from nonebot.log import logger
|
||||
|
||||
from ..config import config
|
||||
from ..platform import platform_manager
|
||||
from ..platform.platform import Platform
|
||||
from ..send import send_msgs
|
||||
from ..types import Target
|
||||
from ..utils import SchedulerConfig
|
||||
from .aps import aps
|
||||
|
||||
|
||||
@dataclass
|
||||
class Schedulable:
|
||||
platform_name: str
|
||||
target: Target
|
||||
current_weight: int
|
||||
|
||||
|
||||
class Scheduler:
|
||||
|
||||
schedulable_list: list[Schedulable]
|
||||
|
||||
def __init__(self, name: str, schedulables: list[tuple[str, Target]]):
|
||||
conf = SchedulerConfig.registry.get(name)
|
||||
self.name = name
|
||||
if not conf:
|
||||
logger.error(f"scheduler config [{name}] not found, exiting")
|
||||
raise RuntimeError(f"{name} not found")
|
||||
self.scheduler_config = conf
|
||||
self.schedulable_list = []
|
||||
platform_name_set = set()
|
||||
for platform_name, target in schedulables:
|
||||
self.schedulable_list.append(
|
||||
Schedulable(
|
||||
platform_name=platform_name, target=target, current_weight=0
|
||||
)
|
||||
)
|
||||
platform_name_set.add(platform_name)
|
||||
self.platform_name_list = list(platform_name_set)
|
||||
self.pre_weight_val = 0 # 轮调度中“本轮”增加权重和的初值
|
||||
logger.info(
|
||||
f"register scheduler for {name} with {self.scheduler_config.schedule_type} {self.scheduler_config.schedule_setting}"
|
||||
)
|
||||
aps.add_job(
|
||||
self.exec_fetch,
|
||||
self.scheduler_config.schedule_type,
|
||||
**self.scheduler_config.schedule_setting,
|
||||
)
|
||||
|
||||
async def get_next_schedulable(self) -> Optional[Schedulable]:
|
||||
if not self.schedulable_list:
|
||||
return None
|
||||
cur_weight = await config.get_current_weight_val(self.platform_name_list)
|
||||
weight_sum = self.pre_weight_val
|
||||
self.pre_weight_val = 0
|
||||
cur_max_schedulable = None
|
||||
for schedulable in self.schedulable_list:
|
||||
schedulable.current_weight += cur_weight[
|
||||
f"{schedulable.platform_name}-{schedulable.target}"
|
||||
]
|
||||
weight_sum += cur_weight[
|
||||
f"{schedulable.platform_name}-{schedulable.target}"
|
||||
]
|
||||
if (
|
||||
not cur_max_schedulable
|
||||
or cur_max_schedulable.current_weight < schedulable.current_weight
|
||||
):
|
||||
cur_max_schedulable = schedulable
|
||||
assert cur_max_schedulable
|
||||
cur_max_schedulable.current_weight -= weight_sum
|
||||
return cur_max_schedulable
|
||||
|
||||
async def exec_fetch(self):
|
||||
if not (schedulable := await self.get_next_schedulable()):
|
||||
return
|
||||
logger.debug(
|
||||
f"scheduler {self.name} fetching next target: [{schedulable.platform_name}]{schedulable.target}"
|
||||
)
|
||||
send_userinfo_list = await config.get_platform_target_subscribers(
|
||||
schedulable.platform_name, schedulable.target
|
||||
)
|
||||
to_send = await platform_manager[schedulable.platform_name].do_fetch_new_post(
|
||||
schedulable.target, send_userinfo_list
|
||||
)
|
||||
if not to_send:
|
||||
return
|
||||
bot = nonebot.get_bot()
|
||||
assert isinstance(bot, Bot)
|
||||
for user, send_list in to_send:
|
||||
for send_post in send_list:
|
||||
logger.info("send to {}: {}".format(user, send_post))
|
||||
if not bot:
|
||||
logger.warning("no bot connected")
|
||||
else:
|
||||
await send_msgs(
|
||||
bot,
|
||||
user.user,
|
||||
user.user_type,
|
||||
await send_post.generate_messages(),
|
||||
)
|
||||
|
||||
def insert_new_schedulable(self, platform_name: str, target: Target):
|
||||
self.pre_weight_val += 1000
|
||||
self.schedulable_list.append(Schedulable(platform_name, target, 1000))
|
||||
logger.info(
|
||||
f"insert [{platform_name}]{target} to Schduler({self.scheduler_config.name})"
|
||||
)
|
||||
|
||||
def delete_schedulable(self, platform_name, target: Target):
|
||||
if not self.schedulable_list:
|
||||
return
|
||||
to_find_idx = None
|
||||
for idx, schedulable in enumerate(self.schedulable_list):
|
||||
if (
|
||||
schedulable.platform_name == platform_name
|
||||
and schedulable.target == target
|
||||
):
|
||||
to_find_idx = idx
|
||||
break
|
||||
if to_find_idx is not None:
|
||||
deleted_schdulable = self.schedulable_list.pop(to_find_idx)
|
||||
self.pre_weight_val -= deleted_schdulable.current_weight
|
||||
return
|
@ -13,7 +13,14 @@ class User:
|
||||
user_type: Literal["group", "private"]
|
||||
|
||||
|
||||
@dataclass(eq=True, frozen=True)
|
||||
class PlatformTarget:
|
||||
target: str
|
||||
platform_name: str
|
||||
target_name: str
|
||||
|
||||
|
||||
class UserSubInfo(NamedTuple):
|
||||
user: User
|
||||
category_getter: Callable[[Target], list[Category]]
|
||||
tag_getter: Callable[[Target], list[Tag]]
|
||||
categories: list[Category]
|
||||
tags: list[Tag]
|
||||
|
@ -10,8 +10,9 @@ from nonebot.plugin import require
|
||||
|
||||
from ..plugin_config import plugin_config
|
||||
from .http import http_client
|
||||
from .scheduler_config import SchedulerConfig
|
||||
|
||||
__all__ = ["http_client", "Singleton", "parse_text", "html_to_text"]
|
||||
__all__ = ["http_client", "Singleton", "parse_text", "html_to_text", "SchedulerConfig"]
|
||||
|
||||
|
||||
class Singleton(type):
|
||||
|
17
src/plugins/nonebot_bison/utils/scheduler_config.py
Normal file
17
src/plugins/nonebot_bison/utils/scheduler_config.py
Normal file
@ -0,0 +1,17 @@
|
||||
from typing import Literal, Type
|
||||
|
||||
|
||||
class SchedulerConfig:
|
||||
|
||||
schedule_type: Literal["date", "interval", "cron"]
|
||||
schedule_setting: dict
|
||||
registry: dict[str, Type["SchedulerConfig"]] = {}
|
||||
name: str
|
||||
|
||||
def __init_subclass__(cls, *, name, **kwargs):
|
||||
super().__init_subclass__(**kwargs)
|
||||
cls.registry[name] = cls
|
||||
cls.name = name
|
||||
|
||||
def __str__(self):
|
||||
return f"[{self.name}]-{self.name}-{self.schedule_setting}"
|
@ -7,24 +7,24 @@ if typing.TYPE_CHECKING:
|
||||
import sys
|
||||
|
||||
sys.path.append("./src/plugins")
|
||||
import nonebot_bison
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config.config_legacy import Config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config(app: App):
|
||||
def config_legacy(app: App, use_legacy_config):
|
||||
from nonebot_bison import config
|
||||
from nonebot_bison.config import config_legacy as config
|
||||
|
||||
config.start_up()
|
||||
return config.Config()
|
||||
|
||||
|
||||
def test_create_and_get(config: "Config", app: App):
|
||||
def test_create_and_get(config_legacy: "Config", app: App):
|
||||
from nonebot_bison import types
|
||||
from nonebot_bison.types import Target
|
||||
|
||||
config.add_subscribe(
|
||||
user="123",
|
||||
config_legacy.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target="weibo_id",
|
||||
target_name="weibo_name",
|
||||
@ -32,14 +32,14 @@ def test_create_and_get(config: "Config", app: App):
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
confs = config.list_subscribe("123", "group")
|
||||
confs = config_legacy.list_subscribe(123, "group")
|
||||
assert len(confs) == 1
|
||||
assert config.target_user_cache["weibo"][Target("weibo_id")] == [
|
||||
types.User("123", "group")
|
||||
assert config_legacy.target_user_cache["weibo"][Target("weibo_id")] == [
|
||||
types.User(123, "group")
|
||||
]
|
||||
assert confs[0]["cats"] == []
|
||||
config.update_subscribe(
|
||||
user="123",
|
||||
config_legacy.update_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target="weibo_id",
|
||||
target_name="weibo_name",
|
||||
@ -47,6 +47,6 @@ def test_create_and_get(config: "Config", app: App):
|
||||
cats=["1"],
|
||||
tags=[],
|
||||
)
|
||||
confs = config.list_subscribe("123", "group")
|
||||
confs = config_legacy.list_subscribe(123, "group")
|
||||
assert len(confs) == 1
|
||||
assert confs[0]["cats"] == ["1"]
|
131
tests/config/test_config_operation.py
Normal file
131
tests/config/test_config_operation.py
Normal file
@ -0,0 +1,131 @@
|
||||
from nonebug.app import App
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
from sqlalchemy.sql.functions import func
|
||||
from sqlmodel.sql.expression import select
|
||||
|
||||
|
||||
async def test_add_subscribe(app: App, init_scheduler):
|
||||
|
||||
from nonebot_bison.config.db_config import config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as TTarget
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=234,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
confs = await config.list_subscribe(123, "group")
|
||||
assert len(confs) == 1
|
||||
conf: Subscribe = confs[0]
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
related_user_obj = await sess.scalar(
|
||||
select(User).where(User.id == conf.user_id)
|
||||
)
|
||||
related_target_obj = await sess.scalar(
|
||||
select(Target).where(Target.id == conf.target_id)
|
||||
)
|
||||
assert related_user_obj.uid == 123
|
||||
assert related_target_obj.target_name == "weibo_name"
|
||||
assert related_target_obj.target == "weibo_id"
|
||||
assert conf.target.target == "weibo_id"
|
||||
assert conf.categories == []
|
||||
|
||||
await config.update_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
platform_name="weibo",
|
||||
target_name="weibo_name2",
|
||||
cats=[1],
|
||||
tags=["tag"],
|
||||
)
|
||||
confs = await config.list_subscribe(123, "group")
|
||||
assert len(confs) == 1
|
||||
conf: Subscribe = confs[0]
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
related_user_obj = await sess.scalar(
|
||||
select(User).where(User.id == conf.user_id)
|
||||
)
|
||||
related_target_obj = await sess.scalar(
|
||||
select(Target).where(Target.id == conf.target_id)
|
||||
)
|
||||
assert related_user_obj.uid == 123
|
||||
assert related_target_obj.target_name == "weibo_name2"
|
||||
assert related_target_obj.target == "weibo_id"
|
||||
assert conf.target.target == "weibo_id"
|
||||
assert conf.categories == [1]
|
||||
assert conf.tags == ["tag"]
|
||||
|
||||
|
||||
async def test_del_subsribe(init_scheduler):
|
||||
from nonebot_bison.config.db_config import config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as TTarget
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.del_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
platform_name="weibo",
|
||||
)
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
assert (await sess.scalar(select(func.count()).select_from(Subscribe))) == 0
|
||||
assert (await sess.scalar(select(func.count()).select_from(Target))) == 1
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
|
||||
await config.add_subscribe(
|
||||
user=124,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
target_name="weibo_name_new",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
|
||||
await config.del_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=TTarget("weibo_id"),
|
||||
platform_name="weibo",
|
||||
)
|
||||
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
assert (await sess.scalar(select(func.count()).select_from(Subscribe))) == 1
|
||||
assert (await sess.scalar(select(func.count()).select_from(Target))) == 1
|
||||
target: Target = await sess.scalar(select(Target))
|
||||
assert target.target_name == "weibo_name_new"
|
56
tests/config/test_data_migration.py
Normal file
56
tests/config/test_data_migration.py
Normal file
@ -0,0 +1,56 @@
|
||||
import pytest
|
||||
|
||||
|
||||
async def test_migration(use_legacy_config):
|
||||
from nonebot_bison.config.config_legacy import config as config_legacy
|
||||
from nonebot_bison.config.db import data_migrate, upgrade_db
|
||||
from nonebot_bison.config.db_config import config
|
||||
|
||||
config_legacy.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target="weibo_id",
|
||||
target_name="weibo_name",
|
||||
target_type="weibo",
|
||||
cats=[2, 3],
|
||||
tags=[],
|
||||
)
|
||||
config_legacy.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target="weibo_id2",
|
||||
target_name="weibo_name2",
|
||||
target_type="weibo",
|
||||
cats=[1, 2],
|
||||
tags=["tag"],
|
||||
)
|
||||
config_legacy.add_subscribe(
|
||||
user=234,
|
||||
user_type="group",
|
||||
target="weibo_id",
|
||||
target_name="weibo_name",
|
||||
target_type="weibo",
|
||||
cats=[1],
|
||||
tags=[],
|
||||
)
|
||||
# await data_migrate()
|
||||
await upgrade_db()
|
||||
user123_config = await config.list_subscribe(123, "group")
|
||||
assert len(user123_config) == 2
|
||||
for c in user123_config:
|
||||
if c.target.target == "weibo_id":
|
||||
assert c.categories == [2, 3]
|
||||
assert c.target.target_name == "weibo_name"
|
||||
assert c.target.platform_name == "weibo"
|
||||
assert c.tags == []
|
||||
elif c.target.target == "weibo_id2":
|
||||
assert c.categories == [1, 2]
|
||||
assert c.target.target_name == "weibo_name2"
|
||||
assert c.target.platform_name == "weibo"
|
||||
assert c.tags == ["tag"]
|
||||
user234_config = await config.list_subscribe(234, "group")
|
||||
assert len(user234_config) == 1
|
||||
assert user234_config[0].categories == [1]
|
||||
assert user234_config[0].target.target == "weibo_id"
|
||||
assert user234_config[0].target.target_name == "weibo_name"
|
||||
assert user234_config[0].tags == []
|
218
tests/config/test_scheduler_conf.py
Normal file
218
tests/config/test_scheduler_conf.py
Normal file
@ -0,0 +1,218 @@
|
||||
from datetime import time
|
||||
|
||||
from nonebug import App
|
||||
|
||||
|
||||
async def test_create_config(app: App, init_scheduler):
|
||||
from nonebot_bison.config.db_config import TimeWeightConfig, WeightConfig, config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as T_Target
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.update_time_weight_config(
|
||||
target=T_Target("weibo_id"),
|
||||
platform_name="weibo",
|
||||
conf=WeightConfig(
|
||||
default=10,
|
||||
time_config=[
|
||||
TimeWeightConfig(start_time=time(1, 0), end_time=time(2, 0), weight=20)
|
||||
],
|
||||
),
|
||||
)
|
||||
|
||||
test_config = await config.get_time_weight_config(
|
||||
target=T_Target("weibo_id"), platform_name="weibo"
|
||||
)
|
||||
assert test_config.default == 10
|
||||
assert test_config.time_config == [
|
||||
TimeWeightConfig(start_time=time(1, 0), end_time=time(2, 0), weight=20)
|
||||
]
|
||||
test_config1 = await config.get_time_weight_config(
|
||||
target=T_Target("weibo_id1"), platform_name="weibo"
|
||||
)
|
||||
assert test_config1.default == 10
|
||||
assert test_config1.time_config == []
|
||||
|
||||
|
||||
async def test_get_current_weight(app: App, init_scheduler):
|
||||
from datetime import time
|
||||
|
||||
from nonebot_bison.config import db_config
|
||||
from nonebot_bison.config.db_config import TimeWeightConfig, WeightConfig, config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as T_Target
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name2",
|
||||
platform_name="bilibili",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.update_time_weight_config(
|
||||
target=T_Target("weibo_id"),
|
||||
platform_name="weibo",
|
||||
conf=WeightConfig(
|
||||
default=10,
|
||||
time_config=[
|
||||
TimeWeightConfig(start_time=time(1, 0), end_time=time(2, 0), weight=20),
|
||||
TimeWeightConfig(start_time=time(4, 0), end_time=time(5, 0), weight=30),
|
||||
],
|
||||
),
|
||||
)
|
||||
app.monkeypatch.setattr(db_config, "_get_time", lambda: time(1, 30))
|
||||
weight = await config.get_current_weight_val(["weibo", "bilibili"])
|
||||
assert len(weight) == 3
|
||||
assert weight["weibo-weibo_id"] == 20
|
||||
assert weight["weibo-weibo_id1"] == 10
|
||||
assert weight["bilibili-weibo_id1"] == 10
|
||||
app.monkeypatch.setattr(db_config, "_get_time", lambda: time(4, 0))
|
||||
weight = await config.get_current_weight_val(["weibo", "bilibili"])
|
||||
assert len(weight) == 3
|
||||
assert weight["weibo-weibo_id"] == 30
|
||||
assert weight["weibo-weibo_id1"] == 10
|
||||
assert weight["bilibili-weibo_id1"] == 10
|
||||
app.monkeypatch.setattr(db_config, "_get_time", lambda: time(5, 0))
|
||||
weight = await config.get_current_weight_val(["weibo", "bilibili"])
|
||||
assert len(weight) == 3
|
||||
assert weight["weibo-weibo_id"] == 10
|
||||
assert weight["weibo-weibo_id1"] == 10
|
||||
assert weight["bilibili-weibo_id1"] == 10
|
||||
|
||||
|
||||
async def test_get_platform_target(app: App, init_scheduler):
|
||||
from nonebot_bison.config import db_config
|
||||
from nonebot_bison.config.db_config import TimeWeightConfig, WeightConfig, config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as T_Target
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
from sqlalchemy.sql.expression import select
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=245,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[],
|
||||
tags=[],
|
||||
)
|
||||
res = await config.get_platform_target("weibo")
|
||||
assert len(res) == 2
|
||||
await config.del_subscribe(123, "group", T_Target("weibo_id1"), "weibo")
|
||||
res = await config.get_platform_target("weibo")
|
||||
assert len(res) == 2
|
||||
await config.del_subscribe(123, "group", T_Target("weibo_id"), "weibo")
|
||||
res = await config.get_platform_target("weibo")
|
||||
assert len(res) == 1
|
||||
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
res = await sess.scalars(select(Target).where(Target.platform_name == "weibo"))
|
||||
assert len(res.all()) == 2
|
||||
|
||||
|
||||
async def test_get_platform_target_subscribers(app: App, init_scheduler):
|
||||
from nonebot_bison.config import db_config
|
||||
from nonebot_bison.config.db_config import TimeWeightConfig, WeightConfig, config
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_bison.types import Target as T_Target
|
||||
from nonebot_bison.types import User as T_User
|
||||
from nonebot_bison.types import UserSubInfo
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
from sqlalchemy.sql.expression import select
|
||||
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id"),
|
||||
target_name="weibo_name",
|
||||
platform_name="weibo",
|
||||
cats=[1],
|
||||
tags=["tag1"],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=123,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[2],
|
||||
tags=["tag2"],
|
||||
)
|
||||
await config.add_subscribe(
|
||||
user=245,
|
||||
user_type="group",
|
||||
target=T_Target("weibo_id1"),
|
||||
target_name="weibo_name1",
|
||||
platform_name="weibo",
|
||||
cats=[3],
|
||||
tags=["tag3"],
|
||||
)
|
||||
|
||||
res = await config.get_platform_target_subscribers("weibo", T_Target("weibo_id"))
|
||||
assert len(res) == 1
|
||||
assert res[0] == UserSubInfo(T_User(123, "group"), [1], ["tag1"])
|
||||
|
||||
res = await config.get_platform_target_subscribers("weibo", T_Target("weibo_id1"))
|
||||
assert len(res) == 2
|
||||
assert UserSubInfo(T_User(123, "group"), [2], ["tag2"]) in res
|
||||
assert UserSubInfo(T_User(245, "group"), [3], ["tag3"]) in res
|
@ -5,6 +5,8 @@ from pathlib import Path
|
||||
import nonebot
|
||||
import pytest
|
||||
from nonebug.app import App
|
||||
from sqlalchemy.ext.asyncio.session import AsyncSession
|
||||
from sqlalchemy.sql.expression import delete
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -12,7 +14,10 @@ async def app(nonebug_init: None, tmp_path: Path, monkeypatch: pytest.MonkeyPatc
|
||||
import nonebot
|
||||
|
||||
config = nonebot.get_driver().config
|
||||
config.bison_config_path = str(tmp_path)
|
||||
config.bison_config_path = str(tmp_path / "legacy_config")
|
||||
config.datastore_config_dir = str(tmp_path / "config")
|
||||
config.datastore_cache_dir = str(tmp_path / "cache")
|
||||
config.datastore_data_dir = str(tmp_path / "data")
|
||||
config.command_start = {""}
|
||||
config.superusers = {"10001"}
|
||||
config.log_level = "TRACE"
|
||||
@ -25,23 +30,37 @@ def dummy_user_subinfo(app: App):
|
||||
from nonebot_bison.types import User, UserSubInfo
|
||||
|
||||
user = User(123, "group")
|
||||
return UserSubInfo(user=user, category_getter=lambda _: [], tag_getter=lambda _: [])
|
||||
return UserSubInfo(user=user, categories=[], tags=[])
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def task_watchdog(request):
|
||||
def cancel_test_on_exception(task: asyncio.Task):
|
||||
def maybe_cancel_clbk(t: asyncio.Task):
|
||||
exception = t.exception()
|
||||
if exception is None:
|
||||
return
|
||||
async def db_migration(app: App):
|
||||
from nonebot_bison.config.db import upgrade_db
|
||||
from nonebot_bison.config.db_model import Subscribe, Target, User
|
||||
from nonebot_plugin_datastore.db import get_engine
|
||||
|
||||
for task in asyncio.all_tasks():
|
||||
coro = task.get_coro()
|
||||
if coro.__qualname__ == request.function.__qualname__:
|
||||
task.cancel()
|
||||
return
|
||||
await upgrade_db()
|
||||
async with AsyncSession(get_engine()) as sess:
|
||||
await sess.execute(delete(User))
|
||||
await sess.execute(delete(Subscribe))
|
||||
await sess.execute(delete(Target))
|
||||
await sess.commit()
|
||||
await sess.close()
|
||||
|
||||
task.add_done_callback(maybe_cancel_clbk)
|
||||
|
||||
return cancel_test_on_exception
|
||||
@pytest.fixture
|
||||
async def init_scheduler(db_migration):
|
||||
from nonebot_bison.scheduler.manager import init_scheduler
|
||||
|
||||
await init_scheduler()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def use_legacy_config(app: App):
|
||||
import aiofiles
|
||||
from nonebot_bison.config.config_legacy import config, get_config_path
|
||||
|
||||
async with aiofiles.open(get_config_path(), "w") as f:
|
||||
await f.write("{}")
|
||||
|
||||
config._do_init()
|
||||
|
218
tests/platforms/static/mcbbsnews/mcbbsnews_raw_post_list.json
Normal file
218
tests/platforms/static/mcbbsnews/mcbbsnews_raw_post_list.json
Normal file
@ -0,0 +1,218 @@
|
||||
[
|
||||
{
|
||||
"url": "thread-1340080-1-1.html",
|
||||
"title": "Mojang Status:服务器出现一些小问题",
|
||||
"category": "快讯",
|
||||
"author": "DreamVoid",
|
||||
"id": "normalthread_1340080",
|
||||
"date": 1652630400
|
||||
},
|
||||
{
|
||||
"url": "thread-1339940-1-1.html",
|
||||
"title": "kinbdogz 就近期荒野更新的风波发表看法",
|
||||
"category": "快讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1339940",
|
||||
"date": 1652630400
|
||||
},
|
||||
{
|
||||
"url": "thread-1339097-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.33 发布(仅 Switch)",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "电量量",
|
||||
"id": "normalthread_1339097",
|
||||
"date": 1652457600
|
||||
},
|
||||
{
|
||||
"url": "thread-1338607-1-1.html",
|
||||
"title": "Minecraft Java版 22w19a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "寂华",
|
||||
"id": "normalthread_1338607",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338592-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.32/33 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "苦力怕553",
|
||||
"id": "normalthread_1338592",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338588-1-1.html",
|
||||
"title": "请给我们一个真正的“荒野更新”",
|
||||
"category": "时评",
|
||||
"author": "斯乌",
|
||||
"id": "normalthread_1338588",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338496-1-1.html",
|
||||
"title": "slicedlime:周三无快照,推迟至周四",
|
||||
"category": "快讯",
|
||||
"author": "橄榄Chan",
|
||||
"id": "normalthread_1338496",
|
||||
"date": 1652198400
|
||||
},
|
||||
{
|
||||
"url": "thread-1336371-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.32 发布(仅 Android、NS)【新增 NS 平台】",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "电量量",
|
||||
"id": "normalthread_1336371",
|
||||
"date": 1651766400
|
||||
},
|
||||
{
|
||||
"url": "thread-1335897-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.30/31 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "AzureZeng",
|
||||
"id": "normalthread_1335897",
|
||||
"date": 1651680000
|
||||
},
|
||||
{
|
||||
"url": "thread-1335891-1-1.html",
|
||||
"title": "Minecraft Java版 22w18a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "Aurora_Feather",
|
||||
"id": "normalthread_1335891",
|
||||
"date": 1651680000
|
||||
},
|
||||
{
|
||||
"url": "thread-1333196-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.28/29 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1333196",
|
||||
"date": 1651161600
|
||||
},
|
||||
{
|
||||
"url": "thread-1332834-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.31 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1332834",
|
||||
"date": 1651075200
|
||||
},
|
||||
{
|
||||
"url": "thread-1332811-1-1.html",
|
||||
"title": "Minecraft Java版 22w17a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1332811",
|
||||
"date": 1651075200
|
||||
},
|
||||
{
|
||||
"url": "thread-1332424-1-1.html",
|
||||
"title": "Mojang Status:正在寻找1.18.30更新问题的解决方案",
|
||||
"category": "基岩快讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1332424",
|
||||
"date": 1650988800
|
||||
},
|
||||
{
|
||||
"url": "thread-1329712-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.26/27 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1329712",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329651-1-1.html",
|
||||
"title": "Minecraft Java版 22w16b 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1329651",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329644-1-1.html",
|
||||
"title": "Minecraft Java版 22w16a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1329644",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329335-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.30 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1329335",
|
||||
"date": 1650384000
|
||||
},
|
||||
{
|
||||
"url": "thread-1328892-1-1.html",
|
||||
"title": "“海王” 杰森·莫玛 有望主演《我的世界》大电影",
|
||||
"category": "快讯",
|
||||
"author": "广药",
|
||||
"id": "normalthread_1328892",
|
||||
"date": 1650297600
|
||||
},
|
||||
{
|
||||
"url": "thread-1327089-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.24/25 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1327089",
|
||||
"date": 1649952000
|
||||
},
|
||||
{
|
||||
"url": "thread-1326640-1-1.html",
|
||||
"title": "Minecraft Java版 22w15a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1326640",
|
||||
"date": 1649865600
|
||||
},
|
||||
{
|
||||
"url": "thread-1323762-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.20 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1323762",
|
||||
"date": 1649260800
|
||||
},
|
||||
{
|
||||
"url": "thread-1323662-1-1.html",
|
||||
"title": "Minecraft Java版 22w14a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1323662",
|
||||
"date": 1649260800
|
||||
},
|
||||
{
|
||||
"url": "thread-1321419-1-1.html",
|
||||
"title": "[愚人节] Minecraft Java版 22w13oneBlockAtATime 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1321419",
|
||||
"date": 1648742400
|
||||
},
|
||||
{
|
||||
"url": "thread-1320986-1-1.html",
|
||||
"title": "Minecraft:近期没有为主机平台添加光线追踪的计划",
|
||||
"category": "基岩快讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1320986",
|
||||
"date": 1648742400
|
||||
},
|
||||
{
|
||||
"url": "thread-1320931-1-1.html",
|
||||
"title": "Minecraft Java版 22w13a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1320931",
|
||||
"date": 1648742400
|
||||
},
|
||||
{
|
||||
"url": "thread-1342236-1-1.html",
|
||||
"title": "Minecraft: 加入Microsoft Rewards赢取限量Xbox Series S",
|
||||
"category": "周边消息",
|
||||
"author": "ETW_Derp",
|
||||
"id": "normalthread_1342236",
|
||||
"date": 1648742400
|
||||
}
|
||||
]
|
@ -0,0 +1,218 @@
|
||||
[
|
||||
{
|
||||
"url": "thread-1340927-1-1.html",
|
||||
"title": "Minecraft Java版 1.19-pre1 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1340927",
|
||||
"date": 1652889600
|
||||
},
|
||||
{
|
||||
"url": "thread-1340080-1-1.html",
|
||||
"title": "Mojang Status:服务器出现一些小问题",
|
||||
"category": "快讯",
|
||||
"author": "DreamVoid",
|
||||
"id": "normalthread_1340080",
|
||||
"date": 1652630400
|
||||
},
|
||||
{
|
||||
"url": "thread-1339940-1-1.html",
|
||||
"title": "kinbdogz 就近期荒野更新的风波发表看法",
|
||||
"category": "快讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1339940",
|
||||
"date": 1652630400
|
||||
},
|
||||
{
|
||||
"url": "thread-1339097-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.33 发布(仅 Switch)",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "电量量",
|
||||
"id": "normalthread_1339097",
|
||||
"date": 1652457600
|
||||
},
|
||||
{
|
||||
"url": "thread-1338607-1-1.html",
|
||||
"title": "Minecraft Java版 22w19a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "寂华",
|
||||
"id": "normalthread_1338607",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338592-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.32/33 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "苦力怕553",
|
||||
"id": "normalthread_1338592",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338588-1-1.html",
|
||||
"title": "请给我们一个真正的“荒野更新”",
|
||||
"category": "时评",
|
||||
"author": "斯乌",
|
||||
"id": "normalthread_1338588",
|
||||
"date": 1652371200
|
||||
},
|
||||
{
|
||||
"url": "thread-1338496-1-1.html",
|
||||
"title": "slicedlime:周三无快照,推迟至周四",
|
||||
"category": "快讯",
|
||||
"author": "橄榄Chan",
|
||||
"id": "normalthread_1338496",
|
||||
"date": 1652198400
|
||||
},
|
||||
{
|
||||
"url": "thread-1336371-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.32 发布(仅 Android、NS)【新增 NS 平台】",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "电量量",
|
||||
"id": "normalthread_1336371",
|
||||
"date": 1651766400
|
||||
},
|
||||
{
|
||||
"url": "thread-1335897-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.30/31 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "AzureZeng",
|
||||
"id": "normalthread_1335897",
|
||||
"date": 1651680000
|
||||
},
|
||||
{
|
||||
"url": "thread-1335891-1-1.html",
|
||||
"title": "Minecraft Java版 22w18a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "Aurora_Feather",
|
||||
"id": "normalthread_1335891",
|
||||
"date": 1651680000
|
||||
},
|
||||
{
|
||||
"url": "thread-1333196-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.28/29 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1333196",
|
||||
"date": 1651161600
|
||||
},
|
||||
{
|
||||
"url": "thread-1332834-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.31 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1332834",
|
||||
"date": 1651075200
|
||||
},
|
||||
{
|
||||
"url": "thread-1332811-1-1.html",
|
||||
"title": "Minecraft Java版 22w17a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1332811",
|
||||
"date": 1651075200
|
||||
},
|
||||
{
|
||||
"url": "thread-1332424-1-1.html",
|
||||
"title": "Mojang Status:正在寻找1.18.30更新问题的解决方案",
|
||||
"category": "基岩快讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1332424",
|
||||
"date": 1650988800
|
||||
},
|
||||
{
|
||||
"url": "thread-1329712-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.26/27 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1329712",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329651-1-1.html",
|
||||
"title": "Minecraft Java版 22w16b 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1329651",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329644-1-1.html",
|
||||
"title": "Minecraft Java版 22w16a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1329644",
|
||||
"date": 1650470400
|
||||
},
|
||||
{
|
||||
"url": "thread-1329335-1-1.html",
|
||||
"title": "Minecraft 基岩版 1.18.30 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1329335",
|
||||
"date": 1650384000
|
||||
},
|
||||
{
|
||||
"url": "thread-1328892-1-1.html",
|
||||
"title": "“海王” 杰森·莫玛 有望主演《我的世界》大电影",
|
||||
"category": "快讯",
|
||||
"author": "广药",
|
||||
"id": "normalthread_1328892",
|
||||
"date": 1650297600
|
||||
},
|
||||
{
|
||||
"url": "thread-1327089-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.24/25 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1327089",
|
||||
"date": 1649952000
|
||||
},
|
||||
{
|
||||
"url": "thread-1326640-1-1.html",
|
||||
"title": "Minecraft Java版 22w15a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1326640",
|
||||
"date": 1649865600
|
||||
},
|
||||
{
|
||||
"url": "thread-1323762-1-1.html",
|
||||
"title": "Minecraft 基岩版 Beta & Preview 1.19.0.20 发布",
|
||||
"category": "基岩版本资讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1323762",
|
||||
"date": 1649260800
|
||||
},
|
||||
{
|
||||
"url": "thread-1323662-1-1.html",
|
||||
"title": "Minecraft Java版 22w14a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1323662",
|
||||
"date": 1649260800
|
||||
},
|
||||
{
|
||||
"url": "thread-1321419-1-1.html",
|
||||
"title": "[愚人节] Minecraft Java版 22w13oneBlockAtATime 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "希铁石z",
|
||||
"id": "normalthread_1321419",
|
||||
"date": 1648742400
|
||||
},
|
||||
{
|
||||
"url": "thread-1320986-1-1.html",
|
||||
"title": "Minecraft:近期没有为主机平台添加光线追踪的计划",
|
||||
"category": "基岩快讯",
|
||||
"author": "ArmorRush",
|
||||
"id": "normalthread_1320986",
|
||||
"date": 1648742400
|
||||
},
|
||||
{
|
||||
"url": "thread-1320931-1-1.html",
|
||||
"title": "Minecraft Java版 22w13a 发布",
|
||||
"category": "Java版本资讯",
|
||||
"author": "卡狗",
|
||||
"id": "normalthread_1320931",
|
||||
"date": 1648742400
|
||||
}
|
||||
]
|
4406
tests/platforms/static/mcbbsnews/mock/mcbbsnews_bedrock_express.html
Normal file
4406
tests/platforms/static/mcbbsnews/mock/mcbbsnews_bedrock_express.html
Normal file
File diff suppressed because one or more lines are too long
4112
tests/platforms/static/mcbbsnews/mock/mcbbsnews_bedrocknews.html
Normal file
4112
tests/platforms/static/mcbbsnews/mock/mcbbsnews_bedrocknews.html
Normal file
File diff suppressed because one or more lines are too long
1437
tests/platforms/static/mcbbsnews/mock/mcbbsnews_java_express.html
Normal file
1437
tests/platforms/static/mcbbsnews/mock/mcbbsnews_java_express.html
Normal file
File diff suppressed because one or more lines are too long
6724
tests/platforms/static/mcbbsnews/mock/mcbbsnews_javanews.html
Normal file
6724
tests/platforms/static/mcbbsnews/mock/mcbbsnews_javanews.html
Normal file
File diff suppressed because one or more lines are too long
2354
tests/platforms/static/mcbbsnews/mock/mcbbsnews_merch.html
Normal file
2354
tests/platforms/static/mcbbsnews/mock/mcbbsnews_merch.html
Normal file
File diff suppressed because one or more lines are too long
7429
tests/platforms/static/mcbbsnews/mock/mcbbsnews_new_post_html.html
Normal file
7429
tests/platforms/static/mcbbsnews/mock/mcbbsnews_new_post_html.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,12 @@
|
||||
Mojang Status:正在寻找1.18.30更新问题的解决方案
|
||||
|
||||
Mojangstatus
|
||||
@Mojangstatus
|
||||
We are aware that the 1.18.30 update caused issues for some Bedrock players. We are actively looking into solutions and hope to have solutions out soon. Thank you for your patience! jhp
|
||||
由 ArmorRush 翻译自 英文
|
||||
我们注意到1.18.30版本的更新导致了一些基岩版玩家出现了(游戏中的)问题。我们正在积极寻找解决方案,并希望能尽快解决问题。感谢您的耐心等待! jhp
|
||||
Twitter
|
||||
· SPXX
|
||||
2022年
|
||||
4月27日
|
||||
上午 7:41 · HipChat Villager
|
@ -0,0 +1,68 @@
|
||||
Minecraft 基岩版 Beta & Preview 1.19.0.32/33 发布
|
||||
|
||||
这里便是本周测试版的新内容啦!一如往常的,请搜索你能发现的bug,在
|
||||
|
||||
报告给我们,并在
|
||||
|
||||
这里
|
||||
|
||||
留下你的反馈。
|
||||
|
||||
MinecraftBeta
|
||||
|
||||
Windows平台上的MinecraftBeta将要退出历史舞台了!如果想要继续体验我们先行版本上的新特性的话,你将需要安装MinecraftPreview版本。你可以在这里
|
||||
|
||||
取得更多详细信息。
|
||||
|
||||
特性和漏洞修复
|
||||
|
||||
悦灵
|
||||
|
||||
现在悦灵不会随其主人一同被传送到下界了。这个更改将暂时保留至我们修复悦灵在传送至其他维度后卡进方块憋死的bug。(MCPE-155678)
|
||||
|
||||
方块
|
||||
|
||||
与自然生成的相对应,非自然生成的幽匿尖啸体现在将在多次激活间间隔一定的冷却时间(MCPE-153944)
|
||||
使用精准采集破坏时,幽匿尖啸体和幽匿感测体将不再掉落经验值(MCPE-153359,MCPE-153965)
|
||||
增加了破坏强化深板岩所需要的时间,并使其与所使用的工具相独立,以更好地与Java版相匹配(MCPE-154097)
|
||||
幽匿块现在无法通过火和灵魂火传播了
|
||||
|
||||
红树沼泽
|
||||
|
||||
红树现在将在负Y维度的位置正确生长(MCPE-154983)
|
||||
|
||||
图像
|
||||
|
||||
修复了Android平台上图像崩坏的问题(MCPE-155509)
|
||||
修复了多次使用表情符号引起的视觉故障问题(MCPE-155049)
|
||||
|
||||
移动
|
||||
|
||||
使用移动预测的Actor现在将再次被平滑传送
|
||||
|
||||
稳定性和性能
|
||||
|
||||
优化了某些Android设备上的游戏性能(MCPE-142934)
|
||||
修复了一个尝试渲染依靠生物群系数据着色的方块时可能引起的崩溃
|
||||
|
||||
用户界面
|
||||
|
||||
现在按下Shift键并点击物品将再次能够将相同物品合并进同一槽位了(MCPE-153992)
|
||||
添加了修改通知持续时间的设置项
|
||||
|
||||
技术性更新
|
||||
|
||||
方块
|
||||
|
||||
修复了复制一个命令方块到另一个后,需要再次切换红石信号才能使其生效的问题
|
||||
|
||||
Gametest框架(实验性游戏内容)
|
||||
|
||||
专用服务器已经更新,现在允许服务器在运行脚本时显式列出他们想要加载的脚本模块。默认配置文件位于/config/default/permissions.json。如果没有这个新文件,默认情况下将禁用所有脚本模块
|
||||
|
||||
【苦力怕553译自
|
||||
|
||||
feedback.minecraft.net2022年5月12日发布的MinecraftBeta&Preview-1.19.0.32/33
|
||||
|
||||
】
|
||||
|
@ -0,0 +1,14 @@
|
||||
Mojang Status:服务器出现一些小问题
|
||||
|
||||
Mojang Status
|
||||
@MojangStatus
|
||||
Our services have returned to normal operations. Thank you for your patience. - Martin
|
||||
由 DreamVoid 翻译自英语
|
||||
我们的服务已恢复正常,感谢你的耐心等待。——Martin
|
||||
下午7:43 · 2022年5月16日 · HipChat Villager · SPX
|
||||
Mojang Status
|
||||
@MojangStatus
|
||||
Some of our services are having issues with increased response times. We are looking into the issue. - Martin
|
||||
由 DreamVoid 翻译自英语
|
||||
我们的一些服务存在响应时间过长的问题。我们正在调查这个问题。——Martin
|
||||
下午7:01 · 2022年5月16日 · Twitter Web App · SPX
|
@ -0,0 +1,68 @@
|
||||
Minecraft Java版 22w19a 发布
|
||||
|
||||
稀有的周四快照!除修复了一些错误以及对一些花里胡哨的标签和命令进行了更改以外,我们还引入了“聊天预览”作为对服务器动态样式的聊天消息进行加密的一种方式。针对这个快照,我们在
|
||||
中保留了测试选项
|
||||
,该选项可以用于测试的
|
||||
选项一起设置为
|
||||
。如果你对此感兴趣,尤其是如果你在开服务器的话,我们希望您对此提供反馈
|
||||
玩的愉快!
|
||||
|
||||
22W19A的修改内容
|
||||
|
||||
监守者和铁傀儡现在只能在固体方块上生成
|
||||
|
||||
22W19A的技术性修改
|
||||
|
||||
服务器现在可以启用聊天预览,这会在聊天框上显示一个受服务器控制的预览
|
||||
对locate和place命令的更改
|
||||
PointofInteresttagsCHATPREVIEW聊天预览COMMANDS命令PLACETEMPLATEPLACE模板Theplacecommandcannowalsoplacetemplatesatagivenlocation.Syntax:placetemplate<template>[pos][rotation][mirror][integrity][-seed](seed前面没有-,加-只是因为不加会变成论坛表情)place命令现在还可以将模板放置在指定位置。用法:placetemplate<template>[pos][rotation][mirror][integrity][-seed]Parameters:参数:POINTOFINTERESTTYPES兴趣点类型FIXEDBUGSIN22W19A22W19A修复的漏洞【寂华、满床迷离译自官网2022年05月12日发布的MinecraftSnapshot22w19a;原作者AdrianÖstergård】
|
||||
服务器现在可以在server.properties中设置previews-chat=true来启用聊天预览
|
||||
当它启动后,聊天框上将出现一个受服务器控制的预览界面,显示消息发送时的样子
|
||||
服务器里可以使用这个功能来预览消息,例如表情和彩色聊天
|
||||
聊天预览会在你输入聊天消息时,甚至是发送之前将其发送给服务器
|
||||
然后服务器将实时返回带样式的预览
|
||||
这允许服务器使用动态消息样式,同时仍允许对聊天进行安全签名
|
||||
当你使用聊天预览进入服务器的时候,客户端上将显示一个警告窗口,但你可以在“聊天设置”中完全禁用这个窗口
|
||||
动态聊天的的样式可以有服务器决定,这只在启用聊天预览启动后才会生效
|
||||
玩家可以在“聊天设置”中启用“仅显示已签名的聊天”来始终显示原始签名的消息
|
||||
添加了用于place命令的模板子命令
|
||||
locate命令移动到了locatestructure,locatebiome移动到了locatebiome
|
||||
添加了locatepoi<type:point_of_interest_type>
|
||||
服务器现在还将在玩家连接后发送一个额外的图标和MOTD数据包
|
||||
这允许设置enable-status=false的服务器给已上线的玩家设置图标和MOTD
|
||||
placetemplate现在的使用方式类似于在UI中使用结构方块的加载按钮
|
||||
template:需要加载和放置的模板(“结构方块文件”)命名空间ID
|
||||
rotation:需要应用的旋转参数(如果省略,则不会选择)
|
||||
mirror:需要应用的镜像参数(如果省略,则不会镜像)
|
||||
integrity:结构完整性介于0和1之间
|
||||
seed:当结构完整性小于1时用于随机补全的种子(?)
|
||||
移除了unemployed和nitwitpoint_of_interest_type标签
|
||||
为所有没有职业的村民添加了point_of_interest_type/acquirable_job_site标签
|
||||
为村庄中的兴趣点添加了point_of_interest_type/village标签
|
||||
为蜜蜂的兴趣点添加了point_of_interest_type/bee_home标签
|
||||
MC-197647-如果有一个方块在头顶,在按住shift键时,玩家无法从方块边缘跳下
|
||||
MC-231600-在被红石充能的大型垂滴叶旁边时,幽匿感测体持续收到震动
|
||||
MC-249130-蝌蚪会在邻近的方块内部孵化,导致他们窒息死亡
|
||||
MC-249161-在睡莲下方时,青蛙会频繁地被卡住
|
||||
MC-249634-监守者被分散注意力后仍会进行闻嗅动作
|
||||
MC-249664-监守者在远离之后会被刷新掉
|
||||
MC-249801-废弃矿井可以分割古代城市
|
||||
MC-249888-监守者在被火球击中时不会被激怒
|
||||
MC-249910-监守者的“迫近”音效未被使用
|
||||
MC-249966-监守者可能停止追逐一个刚刚咆哮过的目标
|
||||
MC-250172-监守者在发射音波时不会转向
|
||||
MC-250233-通过刷怪蛋召唤的监守者会突然丢失AI
|
||||
MC-250255-监守者的音波不会伤害末影龙,只会推开他
|
||||
MC-250272-在方块中生成的监守者没有碰撞箱
|
||||
MC-250353-监守者无法像其他怪物一样在一层雪上生成
|
||||
MC-250357-幽匿感测体和监守者会探测到玩家举起盾牌的动作
|
||||
MC-250948-监守者的攻击范围不会被游戏难度影响
|
||||
MC-250966-监守者的声波造成的死亡不算监守者的击杀
|
||||
MC-251029-监守者会停下并且取消与玩家的敌对状态
|
||||
MC-251263-在打开一个单人游戏时,会显示“Invalidsignatureforprofilepublickey”
|
||||
MC-251316-游戏会在加载含有拼图方块的的区块时会崩溃
|
||||
MC-251321-在生成时,监守者可以被爆炸推开
|
||||
MC-251350-执行/give@sgoat_horn会给予玩家一个没有属性的山羊角
|
||||
MC-251396java.lang.IllegalArgumentException:名字和身份识别号不可以同时是空的
|
||||
MC-251464-中立生物在被监守者的声波打中时,他们不会因为恐慌而逃逸
|
||||
|
@ -0,0 +1,78 @@
|
||||
Minecraft Java版 1.19-pre1 发布
|
||||
|
||||
1.19:荒野更新的第一个预发布版已发布!
|
||||
这个版本之后的改动,应该都会是漏洞修复。因此,预发布版不会遵循普通快照周三发布的规律,所以请关注后续预发布版的消息;)
|
||||
如同往常,我们对社区给予的反馈、漏洞报告和对快照提出的好主意表示衷心的感谢。迎接预发布版的到来吧!
|
||||
|
||||
1.19-pre1的修改内容
|
||||
|
||||
稍微下调了红树木沼泽中红树的生成数量
|
||||
末影人,骷髅,凋灵骷髅和猪灵现在会在下界中更广的光照强度范围中生成(从光照强度等级0到11)
|
||||
在开始或结束“使用”一个物品时,与物品交互会产生振动(例如弓、十字弩、山羊角、盾和食物)
|
||||
现在潜行时与物品交互不会产生振动
|
||||
在装备栏中装备非盔甲的物品(如南瓜和头颅)现在有单独的装备音效
|
||||
|
||||
1.19-pre1的技术性修改
|
||||
|
||||
自动补全现在可用于placetemplate的模板参数
|
||||
自定义服务器现在可以通过发送新的网络数据包的方式以对特定客户端启用或禁用聊天预览
|
||||
现在,聊天预览在聊天相关指令中也会展示。例如/say和/msg
|
||||
test-rainbow-chat从server.properties中移除了
|
||||
|
||||
添加的游戏事件
|
||||
|
||||
note_block_play带有振动频率6
|
||||
instrument_play带有振动频率15
|
||||
|
||||
1.19-pre1修复的漏洞
|
||||
|
||||
MC-94060-通过物品栏或发射器装备盔甲/鞘翅时不会播放声音
|
||||
MC-134892-PacketBuffer.writeString以byte类型检查最大长度,而readString按字符串长度检查
|
||||
MC-209222-尝试打开MinecraftRealms菜单时会声称客户端已过时,即使快照的版本比正式版更新
|
||||
MC-210279-刷怪笼生成实体时,幽匿感测体不会激活
|
||||
MC-213915-通过物品栏装备盔甲不被算作振动
|
||||
MC-218222-幽匿感测体的距离值被限制为整数,从而导致某些值永远不会被输出
|
||||
MC-225195-山羊在被它们喜爱的食物引诱时不会惊慌
|
||||
MC-230735-“视场角效果”在设置中的描述不准确
|
||||
MC-249141-青蛙行走时没有相应的字幕
|
||||
MC-249164-声音名称entity.frog.tounge拼写错误
|
||||
MC-249209-青蛙在被它们喜爱的食物引诱时不会惊慌
|
||||
MC-249260-蝌蚪不会被黏液球引诱
|
||||
MC-249328-青蛙被黏液球引诱时会跳来跳去
|
||||
MC-249456-与其它幼年生物不同,蝌蚪死亡后会掉落经验
|
||||
MC-249619-幽匿感测体在有实体压在正上方时发出的红石信号强度是它最后感受到声音的强度
|
||||
MC-249711-物品被悦灵从地上捡起时会飞到比悦灵碰撞箱更高的位置
|
||||
MC-249757-“它蔓延了”成就不是“怪物猎人”的子项
|
||||
MC-249834-与玩家的副手交换物品时会产生振动
|
||||
MC-249980-进度“生日快乐歌”的描述中有不正确的大小写
|
||||
MC-250006-英国短毛猫的纹理名称与ID不匹配
|
||||
MC-250019-当村民被僵尸转换为僵尸村民时,幽匿催化体会被触发
|
||||
MC-250317-用桶装一只蝌蚪的字幕为通用的“桶:装满”字幕
|
||||
MC-250351-/tp“参数”在Tab键选项中重复
|
||||
MC-250919-当尝试加载包括由前一个输出字段中的大量字符组成的含有命令方块的区块时,服务器会崩溃
|
||||
MC-250932-山羊角(Goathorn)的字幕未正确大小写
|
||||
MC-250940-使用山羊角时不会检测为振动
|
||||
MC-251132-服务器日志的“游戏测试服务器”消息
|
||||
MC-251312-/say命令里的实体选择器不再被计算
|
||||
MC-251355-红树胎生苗盆栽的模型不正确
|
||||
MC-251405-结构方块的消息被当作聊天消息来格式化
|
||||
MC-251479-语言文件里出现重复的键值对
|
||||
MC-251550-无法在32位操作系统中启动游戏
|
||||
MC-251640-在聊天消息中使用特殊字符时报错:io.netty.handler.codec.EncoderException
|
||||
MC-251641-与监守者发怒有关的游戏崩溃
|
||||
MC-251647-如果打开聊天栏的按键绑定为Enter键,则聊天栏会自动关闭
|
||||
MC-251649-点击“命令不完整”提示后会移除输入框中的斜杠
|
||||
MC-251650-铁傀儡可以在树叶、玻璃、海晶灯等非生成方块上生成
|
||||
MC-251652-除非玩家先看见监守者,否则监守者的出现/咆哮/蓄力/掘地动画不会启动
|
||||
MC-251656-不像/msg命令,/say命令被命令方块、服务器控制台或RCON执行时,应用服务器消息格式会失败
|
||||
MC-251690-监守者可以在任何非完整的固体方块上生成
|
||||
MC-251736-恶魂的火球在反弹后不能击中恶魂
|
||||
MC-251762-使用两条斜杠作前缀时也可执行命令
|
||||
MC-251773-数据生成器的--dev参数不再正确地将NBT转换为SNBT
|
||||
|
||||
【希铁石z译自
|
||||
|
||||
官网2022年05月18日发布的Minecraft1.19Pre-Release1
|
||||
|
||||
;原作者AdrianÖstergård】
|
||||
|
@ -0,0 +1,16 @@
|
||||
Minecraft: 加入Microsoft Rewards赢取限量Xbox Series S
|
||||
|
||||
Minecraft
|
||||
@Minecraft
|
||||
Here's one warden you'll want to awaken...
|
||||
|
||||
Join Microsoft Rewards and get a chance to win this exclusive Deep Dark Minecraft Xbox Series S!
|
||||
https://www.microsoft.com/en-us/rewards/minecraft-xbox-series-s-sweeps?rtc=1&ocid=Wild_Update_soc_omc_min_tw_Link_no_
|
||||
由 ETW_Derp 翻译自 英语
|
||||
这里有一只等待你唤醒的监守者……
|
||||
|
||||
加入Microsoft Rewards,你将有机会赢得这台
|
||||
**
|
||||
独一无二的“深暗之域”Minecraft主题Xbox Series S!
|
||||
https://www.microsoft.com/en-us/rewards/minecraft-xbox-series-s-sweeps?rtc=1&ocid=Wild_Update_soc_omc_min_tw_Link_no_
|
||||
上午2:42 · 2022年5月21日
|
@ -15,44 +15,94 @@ def mcbbsnews(app: App):
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def raw_post_list():
|
||||
return get_json("mcbbsnews_raw_post_list.json")
|
||||
return get_json("mcbbsnews/mcbbsnews_raw_post_list.json")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def javanews_post_0():
|
||||
return get_file("mcbbsnews_java_post-0.txt")
|
||||
return get_file("mcbbsnews/post/mcbbsnews_java_post-0.txt")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def javanews_post_1():
|
||||
return get_file("mcbbsnews_java_post-1.txt")
|
||||
return get_file("mcbbsnews/post/mcbbsnews_java_post-1.txt")
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def bedrocknews_post():
|
||||
return get_file("mcbbsnews_bedrock_post.txt")
|
||||
return get_file("mcbbsnews/post/mcbbsnews_bedrock_post.txt")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_javanews_parser(mcbbsnews, raw_post_list, javanews_post_0):
|
||||
javanews_mock = respx.get("https://www.mcbbs.net/thread-1338607-1-1.html")
|
||||
javanews_mock.mock(
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_javanews.html")
|
||||
)
|
||||
)
|
||||
|
||||
post = await mcbbsnews.parse(raw_post_list[3])
|
||||
assert post.text == javanews_post_0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_bedrocknews_parser(mcbbsnews, raw_post_list, bedrocknews_post):
|
||||
bedrocknews_mock = respx.get("https://www.mcbbs.net/thread-1338592-1-1.html")
|
||||
bedrocknews_mock.mock(
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_bedrocknews.html")
|
||||
)
|
||||
)
|
||||
|
||||
post = await mcbbsnews.parse(raw_post_list[4])
|
||||
assert post.text == bedrocknews_post
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_express_merch_parser(mcbbsnews, raw_post_list):
|
||||
java_express_post = await mcbbsnews.parse(raw_post_list[0])
|
||||
@respx.mock
|
||||
async def test_bedrock_express_parser(mcbbsnews, raw_post_list):
|
||||
bedrock_express_mock = respx.get("https://www.mcbbs.net/thread-1332424-1-1.html")
|
||||
bedrock_express_mock.mock(
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_bedrock_express.html")
|
||||
)
|
||||
)
|
||||
|
||||
bedrock_express_post = await mcbbsnews.parse(raw_post_list[13])
|
||||
assert bedrock_express_post.text == get_file(
|
||||
"mcbbsnews/post/mcbbsnews_bedrock_express_post.txt"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_java_express_parser(mcbbsnews, raw_post_list):
|
||||
java_express_mock = respx.get("https://www.mcbbs.net/thread-1340080-1-1.html")
|
||||
java_express_mock.mock(
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_java_express.html")
|
||||
)
|
||||
)
|
||||
|
||||
java_express_post = await mcbbsnews.parse(raw_post_list[0])
|
||||
assert java_express_post.text == get_file(
|
||||
"mcbbsnews/post/mcbbsnews_java_express_post.txt"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_merch_parser(mcbbsnews, raw_post_list):
|
||||
mc_merch_mock = respx.get("https://www.mcbbs.net/thread-1342236-1-1.html")
|
||||
mc_merch_mock.mock(
|
||||
return_value=Response(200, text=get_file("mcbbsnews/mock/mcbbsnews_merch.html"))
|
||||
)
|
||||
|
||||
mc_merch_post = await mcbbsnews.parse(raw_post_list[26])
|
||||
assert java_express_post.text == get_file("mcbbsnews_java_express_post.txt")
|
||||
assert bedrock_express_post.text == get_file("mcbbsnews_bedrock_express_post.txt")
|
||||
assert mc_merch_post.text == get_file("mcbbsnews_merch_post.txt")
|
||||
assert mc_merch_post.text == get_file("mcbbsnews/post/mcbbsnews_merch_post.txt")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -60,18 +110,24 @@ async def test_express_merch_parser(mcbbsnews, raw_post_list):
|
||||
async def test_fetch_new(mcbbsnews, dummy_user_subinfo, javanews_post_1):
|
||||
news_router = respx.get("https://www.mcbbs.net/forum-news-1.html")
|
||||
news_router.mock(
|
||||
return_value=Response(200, text=get_file("mcbbsnews_post_list_html-0.html"))
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_post_list_html-0.html")
|
||||
)
|
||||
)
|
||||
new_post = respx.get("https://www.mcbbs.net/thread-1340927-1-1.html")
|
||||
new_post.mock(
|
||||
return_value=Response(200, text=get_file("mcbbsnews_new_post_html.html"))
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_new_post_html.html")
|
||||
)
|
||||
)
|
||||
target = ""
|
||||
res = await mcbbsnews.fetch_new_post(target, [dummy_user_subinfo])
|
||||
assert news_router.called
|
||||
assert len(res) == 0
|
||||
news_router.mock(
|
||||
return_value=Response(200, text=get_file("mcbbsnews_post_list_html-1.html"))
|
||||
return_value=Response(
|
||||
200, text=get_file("mcbbsnews/mock/mcbbsnews_post_list_html-1.html")
|
||||
)
|
||||
)
|
||||
res = await mcbbsnews.fetch_new_post(target, [dummy_user_subinfo])
|
||||
assert news_router.called
|
||||
|
@ -22,7 +22,7 @@ raw_post_list_2 = raw_post_list_1 + [
|
||||
def dummy_user(app: App):
|
||||
from nonebot_bison.types import User
|
||||
|
||||
user = User("123", "group")
|
||||
user = User(123, "group")
|
||||
return user
|
||||
|
||||
|
||||
@ -90,6 +90,12 @@ def mock_platform(app: App):
|
||||
from nonebot_bison.platform.platform import NewMessage
|
||||
from nonebot_bison.post import Post
|
||||
from nonebot_bison.types import Category, RawPost, Tag, Target
|
||||
from nonebot_bison.utils import SchedulerConfig
|
||||
|
||||
class MockPlatformSchedConf(SchedulerConfig, name="mock"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 100}
|
||||
|
||||
class MockPlatform(NewMessage):
|
||||
|
||||
@ -97,9 +103,9 @@ def mock_platform(app: App):
|
||||
name = "Mock Platform"
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_interval = 10
|
||||
enable_tag = True
|
||||
has_target = True
|
||||
scheduler_class = "mock"
|
||||
categories = {
|
||||
Category(1): "转发",
|
||||
Category(2): "视频",
|
||||
@ -148,6 +154,12 @@ def mock_platform_no_target(app: App):
|
||||
from nonebot_bison.platform.platform import CategoryNotSupport, NewMessage
|
||||
from nonebot_bison.post import Post
|
||||
from nonebot_bison.types import Category, RawPost, Tag, Target
|
||||
from nonebot_bison.utils import SchedulerConfig
|
||||
|
||||
class MockPlatformSchedConf(SchedulerConfig, name="mock"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 100}
|
||||
|
||||
class MockPlatform(NewMessage):
|
||||
|
||||
@ -155,8 +167,7 @@ def mock_platform_no_target(app: App):
|
||||
name = "Mock Platform"
|
||||
enabled = True
|
||||
is_common = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "mock"
|
||||
enable_tag = True
|
||||
has_target = False
|
||||
categories = {Category(1): "转发", Category(2): "视频", Category(3): "不支持"}
|
||||
@ -206,14 +217,19 @@ def mock_platform_no_target_2(app: App):
|
||||
from nonebot_bison.platform.platform import NewMessage
|
||||
from nonebot_bison.post import Post
|
||||
from nonebot_bison.types import Category, RawPost, Tag, Target
|
||||
from nonebot_bison.utils import SchedulerConfig
|
||||
|
||||
class MockPlatformSchedConf(SchedulerConfig, name="mock"):
|
||||
|
||||
schedule_type = "interval"
|
||||
schedule_setting = {"seconds": 100}
|
||||
|
||||
class MockPlatform(NewMessage):
|
||||
|
||||
platform_name = "mock_platform"
|
||||
name = "Mock Platform"
|
||||
enabled = True
|
||||
schedule_type = "interval"
|
||||
schedule_kw = {"seconds": 30}
|
||||
scheduler_class = "mock"
|
||||
is_common = True
|
||||
enable_tag = True
|
||||
has_target = False
|
||||
@ -324,13 +340,13 @@ async def test_new_message_target_without_cats_tags(
|
||||
mock_platform_without_cats_tags, user_info_factory
|
||||
):
|
||||
res1 = await mock_platform_without_cats_tags.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res1) == 0
|
||||
res2 = await mock_platform_without_cats_tags.fetch_new_post(
|
||||
"dummy",
|
||||
[
|
||||
user_info_factory(lambda _: [], lambda _: []),
|
||||
user_info_factory([], []),
|
||||
],
|
||||
)
|
||||
assert len(res2) == 1
|
||||
@ -342,16 +358,14 @@ async def test_new_message_target_without_cats_tags(
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_new_message_target(mock_platform, user_info_factory):
|
||||
res1 = await mock_platform.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
)
|
||||
res1 = await mock_platform.fetch_new_post("dummy", [user_info_factory([1, 2], [])])
|
||||
assert len(res1) == 0
|
||||
res2 = await mock_platform.fetch_new_post(
|
||||
"dummy",
|
||||
[
|
||||
user_info_factory(lambda _: [1, 2], lambda _: []),
|
||||
user_info_factory(lambda _: [1], lambda _: []),
|
||||
user_info_factory(lambda _: [1, 2], lambda _: ["tag1"]),
|
||||
user_info_factory([1, 2], []),
|
||||
user_info_factory([1], []),
|
||||
user_info_factory([1, 2], ["tag1"]),
|
||||
],
|
||||
)
|
||||
assert len(res2) == 3
|
||||
@ -372,15 +386,15 @@ async def test_new_message_target(mock_platform, user_info_factory):
|
||||
@pytest.mark.asyncio
|
||||
async def test_new_message_no_target(mock_platform_no_target, user_info_factory):
|
||||
res1 = await mock_platform_no_target.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res1) == 0
|
||||
res2 = await mock_platform_no_target.fetch_new_post(
|
||||
"dummy",
|
||||
[
|
||||
user_info_factory(lambda _: [1, 2], lambda _: []),
|
||||
user_info_factory(lambda _: [1], lambda _: []),
|
||||
user_info_factory(lambda _: [1, 2], lambda _: ["tag1"]),
|
||||
user_info_factory([1, 2], []),
|
||||
user_info_factory([1], []),
|
||||
user_info_factory([1, 2], ["tag1"]),
|
||||
],
|
||||
)
|
||||
assert len(res2) == 3
|
||||
@ -397,7 +411,7 @@ async def test_new_message_no_target(mock_platform_no_target, user_info_factory)
|
||||
assert "p2" in id_set_2
|
||||
assert "p2" in id_set_3
|
||||
res3 = await mock_platform_no_target.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res3) == 0
|
||||
|
||||
@ -405,11 +419,11 @@ async def test_new_message_no_target(mock_platform_no_target, user_info_factory)
|
||||
@pytest.mark.asyncio
|
||||
async def test_status_change(mock_status_change, user_info_factory):
|
||||
res1 = await mock_status_change.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res1) == 0
|
||||
res2 = await mock_status_change.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res2) == 1
|
||||
posts = res2[0][1]
|
||||
@ -418,8 +432,8 @@ async def test_status_change(mock_status_change, user_info_factory):
|
||||
res3 = await mock_status_change.fetch_new_post(
|
||||
"dummy",
|
||||
[
|
||||
user_info_factory(lambda _: [1, 2], lambda _: []),
|
||||
user_info_factory(lambda _: [1], lambda _: []),
|
||||
user_info_factory([1, 2], []),
|
||||
user_info_factory([1], []),
|
||||
],
|
||||
)
|
||||
assert len(res3) == 2
|
||||
@ -427,7 +441,7 @@ async def test_status_change(mock_status_change, user_info_factory):
|
||||
assert res3[0][1][0].text == "off"
|
||||
assert len(res3[1][1]) == 0
|
||||
res4 = await mock_status_change.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 2], lambda _: [])]
|
||||
"dummy", [user_info_factory([1, 2], [])]
|
||||
)
|
||||
assert len(res4) == 0
|
||||
|
||||
@ -445,19 +459,13 @@ async def test_group(
|
||||
from nonebot_bison.types import Category, RawPost, Tag, Target
|
||||
|
||||
group_platform = NoTargetGroup([mock_platform_no_target, mock_platform_no_target_2])
|
||||
res1 = await group_platform.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 4], lambda _: [])]
|
||||
)
|
||||
res1 = await group_platform.fetch_new_post("dummy", [user_info_factory([1, 4], [])])
|
||||
assert len(res1) == 0
|
||||
res2 = await group_platform.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 4], lambda _: [])]
|
||||
)
|
||||
res2 = await group_platform.fetch_new_post("dummy", [user_info_factory([1, 4], [])])
|
||||
assert len(res2) == 1
|
||||
posts = res2[0][1]
|
||||
assert len(posts) == 2
|
||||
id_set_2 = set(map(lambda x: x.text, posts))
|
||||
assert "p2" in id_set_2 and "p6" in id_set_2
|
||||
res3 = await group_platform.fetch_new_post(
|
||||
"dummy", [user_info_factory(lambda _: [1, 4], lambda _: [])]
|
||||
)
|
||||
res3 = await group_platform.fetch_new_post("dummy", [user_info_factory([1, 4], [])])
|
||||
assert len(res3) == 0
|
||||
|
@ -10,16 +10,12 @@ from .utils import BotReply, fake_admin_user, fake_group_message_event
|
||||
# 选择platform阶段中止
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_abort_add_on_platform(app: App):
|
||||
async def test_abort_add_on_platform(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -61,17 +57,13 @@ async def test_abort_add_on_platform(app: App):
|
||||
# 输入id阶段中止
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_abort_add_on_id(app: App):
|
||||
async def test_abort_add_on_id(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.weibo import Weibo
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -122,17 +114,13 @@ async def test_abort_add_on_id(app: App):
|
||||
# 输入订阅类别阶段中止
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_abort_add_on_cats(app: App):
|
||||
async def test_abort_add_on_cats(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.weibo import Weibo
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -203,17 +191,13 @@ async def test_abort_add_on_cats(app: App):
|
||||
# 输入标签阶段中止
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_abort_add_on_tag(app: App):
|
||||
async def test_abort_add_on_tag(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.weibo import Weibo
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -288,19 +272,18 @@ async def test_abort_add_on_tag(app: App):
|
||||
|
||||
# 删除订阅阶段中止
|
||||
@pytest.mark.asyncio
|
||||
async def test_abort_del_sub(app: App):
|
||||
async def test_abort_del_sub(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import del_sub_matcher
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.types import Target as T_Target
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
config.add_subscribe(
|
||||
await config.add_subscribe(
|
||||
10000,
|
||||
"group",
|
||||
"6279793937",
|
||||
T_Target("6279793937"),
|
||||
"明日方舟Arknights",
|
||||
"weibo",
|
||||
[platform_manager["weibo"].reverse_category["图文"]],
|
||||
@ -328,5 +311,5 @@ async def test_abort_del_sub(app: App):
|
||||
ctx.receive_event(bot, event_abort)
|
||||
ctx.should_call_send(event_abort, "删除中止", True)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert subs
|
||||
|
@ -58,17 +58,14 @@ async def test_configurable_at_me_false(app: App):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_add_with_target(app: App):
|
||||
async def test_add_with_target(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.weibo import Weibo
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -162,31 +159,28 @@ async def test_add_with_target(app: App):
|
||||
event_6, BotReply.add_reply_subscribe_success("明日方舟Arknights"), True
|
||||
)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 1
|
||||
sub = subs[0]
|
||||
assert sub["target"] == "6279793937"
|
||||
assert sub["tags"] == []
|
||||
assert sub["cats"] == [platform_manager["weibo"].reverse_category["图文"]] + [
|
||||
assert sub.target.target == "6279793937"
|
||||
assert sub.tags == []
|
||||
assert sub.categories == [platform_manager["weibo"].reverse_category["图文"]] + [
|
||||
platform_manager["weibo"].reverse_category["文字"]
|
||||
]
|
||||
assert sub["target_type"] == "weibo"
|
||||
assert sub["target_name"] == "明日方舟Arknights"
|
||||
assert sub.target.platform_name == "weibo"
|
||||
assert sub.target.target_name == "明日方舟Arknights"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_add_with_target_no_cat(app: App):
|
||||
async def test_add_with_target_no_cat(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.ncm_artist import NcmArtist
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ncm_router = respx.get("https://music.163.com/api/artist/albums/32540734")
|
||||
ncm_router.mock(return_value=Response(200, json=get_json("ncm_siren.json")))
|
||||
|
||||
@ -226,28 +220,25 @@ async def test_add_with_target_no_cat(app: App):
|
||||
event_4_ok, BotReply.add_reply_subscribe_success("塞壬唱片-MSR"), True
|
||||
)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 1
|
||||
sub = subs[0]
|
||||
assert sub["target"] == "32540734"
|
||||
assert sub["tags"] == []
|
||||
assert sub["cats"] == []
|
||||
assert sub["target_type"] == "ncm-artist"
|
||||
assert sub["target_name"] == "塞壬唱片-MSR"
|
||||
assert sub.target.target == "32540734"
|
||||
assert sub.tags == []
|
||||
assert sub.categories == []
|
||||
assert sub.target.platform_name == "ncm-artist"
|
||||
assert sub.target.target_name == "塞壬唱片-MSR"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_add_no_target(app: App):
|
||||
async def test_add_no_target(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
async with app.test_matcher(add_sub_matcher) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
event_1 = fake_group_message_event(
|
||||
@ -279,26 +270,23 @@ async def test_add_no_target(app: App):
|
||||
event_4, BotReply.add_reply_subscribe_success("明日方舟游戏信息"), True
|
||||
)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 1
|
||||
sub = subs[0]
|
||||
assert sub["target"] == "default"
|
||||
assert sub["tags"] == []
|
||||
assert sub["cats"] == [platform_manager["arknights"].reverse_category["游戏公告"]]
|
||||
assert sub["target_type"] == "arknights"
|
||||
assert sub["target_name"] == "明日方舟游戏信息"
|
||||
assert sub.target.target == "default"
|
||||
assert sub.tags == []
|
||||
assert sub.categories == [platform_manager["arknights"].reverse_category["游戏公告"]]
|
||||
assert sub.target.platform_name == "arknights"
|
||||
assert sub.target.target_name == "明日方舟游戏信息"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_platform_name_err(app: App):
|
||||
async def test_platform_name_err(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
async with app.test_matcher(add_sub_matcher) as ctx:
|
||||
bot = ctx.create_bot()
|
||||
event_1 = fake_group_message_event(
|
||||
@ -328,17 +316,14 @@ async def test_platform_name_err(app: App):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_add_with_get_id(app: App):
|
||||
async def test_add_with_get_id(app: App, db_migration):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message, MessageSegment
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.weibo import Weibo
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://m.weibo.cn/api/container/getIndex?containerid=1005056279793937"
|
||||
)
|
||||
@ -406,23 +391,20 @@ async def test_add_with_get_id(app: App):
|
||||
True,
|
||||
)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@respx.mock
|
||||
async def test_add_with_bilibili_target_parser(app: App):
|
||||
async def test_add_with_bilibili_target_parser(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.event import Sender
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import add_sub_matcher, common_platform
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.platform.bilibili import Bilibili
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
|
||||
ak_list_router = respx.get(
|
||||
"https://api.bilibili.com/x/space/acc/info?mid=161775300"
|
||||
)
|
||||
@ -522,11 +504,11 @@ async def test_add_with_bilibili_target_parser(app: App):
|
||||
event_6, BotReply.add_reply_subscribe_success("明日方舟"), True
|
||||
)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 1
|
||||
sub = subs[0]
|
||||
assert sub["target"] == "161775300"
|
||||
assert sub["tags"] == []
|
||||
assert sub["cats"] == [platform_manager["bilibili"].reverse_category["视频"]]
|
||||
assert sub["target_type"] == "bilibili"
|
||||
assert sub["target_name"] == "明日方舟"
|
||||
assert sub.target.target == "161775300"
|
||||
assert sub.tags == []
|
||||
assert sub.categories == [platform_manager["bilibili"].reverse_category["视频"]]
|
||||
assert sub.target.platform_name == "bilibili"
|
||||
assert sub.target.target_name == "明日方舟"
|
||||
|
@ -8,18 +8,17 @@ from .utils import fake_admin_user, fake_group_message_event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_query_sub(app: App):
|
||||
async def test_query_sub(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import query_sub_matcher
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.types import Target
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
config.add_subscribe(
|
||||
await config.add_subscribe(
|
||||
10000,
|
||||
"group",
|
||||
"6279793937",
|
||||
Target("6279793937"),
|
||||
"明日方舟Arknights",
|
||||
"weibo",
|
||||
[platform_manager["weibo"].reverse_category["图文"]],
|
||||
@ -37,19 +36,18 @@ async def test_query_sub(app: App):
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_del_sub(app: App):
|
||||
async def test_del_sub(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import del_sub_matcher
|
||||
from nonebot_bison.platform import platform_manager
|
||||
from nonebot_bison.types import Target
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
config.add_subscribe(
|
||||
await config.add_subscribe(
|
||||
10000,
|
||||
"group",
|
||||
"6279793937",
|
||||
Target("6279793937"),
|
||||
"明日方舟Arknights",
|
||||
"weibo",
|
||||
[platform_manager["weibo"].reverse_category["图文"]],
|
||||
@ -83,20 +81,18 @@ async def test_del_sub(app: App):
|
||||
ctx.receive_event(bot, event_1_ok)
|
||||
ctx.should_call_send(event_1_ok, "删除成功", True)
|
||||
ctx.should_finished()
|
||||
subs = config.list_subscribe(10000, "group")
|
||||
subs = await config.list_subscribe(10000, "group")
|
||||
assert len(subs) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_del_empty_sub(app: App):
|
||||
async def test_del_empty_sub(app: App, init_scheduler):
|
||||
from nonebot.adapters.onebot.v11.bot import Bot
|
||||
from nonebot.adapters.onebot.v11.message import Message
|
||||
from nonebot_bison.config import Config
|
||||
from nonebot_bison.config import config
|
||||
from nonebot_bison.config_manager import del_sub_matcher
|
||||
from nonebot_bison.platform import platform_manager
|
||||
|
||||
config = Config()
|
||||
config.user_target.truncate()
|
||||
async with app.test_matcher(del_sub_matcher) as ctx:
|
||||
bot = ctx.create_bot(base=Bot)
|
||||
assert isinstance(bot, Bot)
|
||||
|
Loading…
x
Reference in New Issue
Block a user