Merge branch 'main' into feat/db

This commit is contained in:
felinae98
2022-05-29 00:19:40 +08:00
47 changed files with 7271 additions and 8830 deletions
@@ -129,8 +129,7 @@ def register_router_fastapi(driver: Driver, socketio):
def init():
driver = get_driver()
if driver.type == "fastapi":
assert isinstance(driver, Driver)
if isinstance(driver, Driver):
register_router_fastapi(driver, socket_app)
else:
logger.warning(f"Driver {driver.type} not supported")
+52 -35
View File
@@ -9,7 +9,6 @@ from nonebot.adapters.onebot.v11.message import Message
from nonebot.adapters.onebot.v11.permission import GROUP_ADMIN, GROUP_OWNER
from nonebot.internal.params import ArgStr
from nonebot.internal.rule import Rule
from nonebot.log import logger
from nonebot.matcher import Matcher
from nonebot.params import Depends, EventPlainText, EventToMe
from nonebot.permission import SUPERUSER
@@ -17,7 +16,7 @@ from nonebot.rule import to_me
from nonebot.typing import T_State
from .config import config
from .platform import check_sub_target, platform_manager
from .platform import Platform, check_sub_target, platform_manager
from .plugin_config import plugin_config
from .types import Category, Target, User
from .utils import parse_text
@@ -108,8 +107,13 @@ def do_add_sub(add_sub: Type[Matcher]):
"platform", _gen_prompt_template("{_prompt}"), [Depends(parse_platform)]
)
async def init_id(state: T_State):
if platform_manager[state["platform"]].has_target:
state["_prompt"] = "请输入订阅用户的id:\n查询id获取方法请回复:“查询”"
cur_platform = platform_manager[state["platform"]]
if cur_platform.has_target:
state["_prompt"] = (
("1." + cur_platform.parse_target_promot + "\n2.")
if cur_platform.parse_target_promot
else ""
) + "请输入订阅用户的id\n查询id获取方法请回复:“查询”"
else:
state["id"] = "default"
state["name"] = await platform_manager[state["platform"]].get_target_name(
@@ -125,6 +129,8 @@ def do_add_sub(add_sub: Type[Matcher]):
raise LookupError
if target == "取消":
raise KeyboardInterrupt
platform = platform_manager[state["platform"]]
target = await platform.parse_target(target)
name = await check_sub_target(state["platform"], target)
if not name:
raise ValueError
@@ -141,6 +147,8 @@ def do_add_sub(add_sub: Type[Matcher]):
await add_sub.finish("已中止订阅")
except (ValueError):
await add_sub.reject("id输入错误")
except (Platform.ParseTargetException):
await add_sub.reject("不能从你的输入中提取出id,请检查你输入的内容是否符合预期")
else:
await add_sub.send(
"即将订阅的用户为:{} {} {}\n如有错误请输入“取消”重新订阅".format(
@@ -244,41 +252,46 @@ def do_del_sub(del_sub: Type[Matcher]):
async def send_list(bot: Bot, event: Event, state: T_State):
user_info = state["target_user_info"]
assert isinstance(user_info, User)
sub_list = await config.list_subscribe(
# state.get("_user_id") or event.group_id, "group"
user_info.user,
user_info.user_type,
)
res = "订阅的帐号为:\n"
state["sub_table"] = {}
for index, sub in enumerate(sub_list, 1):
state["sub_table"][index] = {
"platform_name": sub.target.platform_name,
"target": sub.target.target,
}
res += "{} {} {} {}\n".format(
index,
sub.target.platform_name,
sub.target.target_name,
sub.target.target,
try:
sub_list = await config.list_subscribe(
# state.get("_user_id") or event.group_id, "group"
user_info.user,
user_info.user_type,
)
platform = platform_manager[sub.target.platform_name]
if platform.categories:
res += " [{}]".format(
", ".join(
map(lambda x: platform.categories[Category(x)], sub.categories)
)
assert sub_list
except AssertionError:
await del_sub.finish("暂无已订阅账号\n请使用“添加订阅”命令添加订阅")
else:
res = "订阅的帐号为:\n"
state["sub_table"] = {}
for index, sub in enumerate(sub_list, 1):
state["sub_table"][index] = {
"target_type": sub["target_type"],
"target": sub["target"],
}
res += "{} {} {} {}\n".format(
index, sub["target_type"], sub["target_name"], sub["target"]
)
if platform.enable_tag:
res += " {}".format(", ".join(sub.tags))
res += "\n"
res += "请输入要删除的订阅的序号"
await bot.send(event=event, message=Message(await parse_text(res)))
platform = platform_manager[sub["target_type"]]
if platform.categories:
res += " [{}]".format(
", ".join(
map(lambda x: platform.categories[Category(x)], sub["cats"])
)
)
if platform.enable_tag:
res += " {}".format(", ".join(sub["tags"]))
res += "\n"
res += "请输入要删除的订阅的序号\n输入'取消'中止"
await bot.send(event=event, message=Message(await parse_text(res)))
@del_sub.receive()
async def do_del(event: Event, state: T_State):
user_msg = str(event.get_message()).strip()
if user_msg == "取消":
await del_sub.finish("删除中止")
try:
index = int(str(event.get_message()).strip())
index = int(user_msg)
user_info = state["target_user_info"]
assert isinstance(user_info, User)
await config.del_subscribe(
@@ -299,12 +312,13 @@ add_sub_matcher = on_command(
rule=configurable_to_me,
permission=GROUP_ADMIN | GROUP_OWNER | SUPERUSER,
priority=5,
block=True,
)
add_sub_matcher.handle()(set_target_user_info)
do_add_sub(add_sub_matcher)
query_sub_matcher = on_command("查询订阅", rule=configurable_to_me, priority=5)
query_sub_matcher = on_command("查询订阅", rule=configurable_to_me, priority=5, block=True)
query_sub_matcher.handle()(set_target_user_info)
do_query_sub(query_sub_matcher)
@@ -314,11 +328,14 @@ del_sub_matcher = on_command(
rule=configurable_to_me,
permission=GROUP_ADMIN | GROUP_OWNER | SUPERUSER,
priority=5,
block=True,
)
del_sub_matcher.handle()(set_target_user_info)
do_del_sub(del_sub_matcher)
group_manage_matcher = on_command("群管理", rule=to_me(), permission=SUPERUSER, priority=4)
group_manage_matcher = on_command(
"群管理", rule=to_me(), permission=SUPERUSER, priority=4, block=True
)
@group_manage_matcher.handle()
@@ -1,12 +1,12 @@
import json
from typing import Any
import httpx
from bs4 import BeautifulSoup as bs
from nonebot.plugin import require
from ..post import Post
from ..types import Category, RawPost, Target
from ..utils import http_client
from .platform import CategoryNotSupport, NewMessage, StatusChange
@@ -26,7 +26,7 @@ class Arknights(NewMessage):
return "明日方舟游戏信息"
async def get_sub_list(self, _) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
raw_data = await client.get(
"https://ak-conf.hypergryph.com/config/prod/announce_meta/IOS/announcement.meta.json"
)
@@ -44,7 +44,7 @@ class Arknights(NewMessage):
async def parse(self, raw_post: RawPost) -> Post:
announce_url = raw_post["webUrl"]
text = ""
async with httpx.AsyncClient() as client:
async with http_client() as client:
raw_html = await client.get(announce_url)
soup = bs(raw_html.text, "html.parser")
pics = []
@@ -99,7 +99,7 @@ class AkVersion(StatusChange):
return "明日方舟游戏信息"
async def get_status(self, _):
async with httpx.AsyncClient() as client:
async with http_client() as client:
res_ver = await client.get(
"https://ak-conf.hypergryph.com/config/prod/official/IOS/version"
)
@@ -155,7 +155,7 @@ class MonsterSiren(NewMessage):
return "明日方舟游戏信息"
async def get_sub_list(self, _) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
raw_data = await client.get("https://monster-siren.hypergryph.com/api/news")
return raw_data.json()["data"]["list"]
@@ -170,7 +170,7 @@ class MonsterSiren(NewMessage):
async def parse(self, raw_post: RawPost) -> Post:
url = f'https://monster-siren.hypergryph.com/info/{raw_post["cid"]}'
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(
f'https://monster-siren.hypergryph.com/api/news/{raw_post["cid"]}'
)
@@ -189,3 +189,47 @@ class MonsterSiren(NewMessage):
compress=True,
override_use_pic=False,
)
class TerraHistoricusComic(NewMessage):
categories = {4: "泰拉记事社漫画"}
platform_name = "arknights"
name = "明日方舟游戏信息"
enable_tag = False
enabled = True
is_common = False
schedule_type = "interval"
schedule_kw = {"seconds": 30}
has_target = False
async def get_target_name(self, _: Target) -> str:
return "明日方舟游戏信息"
async def get_sub_list(self, _) -> list[RawPost]:
async with http_client() as client:
raw_data = await client.get(
"https://terra-historicus.hypergryph.com/api/recentUpdate"
)
return raw_data.json()["data"]
def get_id(self, post: RawPost) -> Any:
return f'{post["comicCid"]}/{post["episodeCid"]}'
def get_date(self, _) -> None:
return None
def get_category(self, _) -> Category:
return Category(4)
async def parse(self, raw_post: RawPost) -> Post:
url = f'https://terra-historicus.hypergryph.com/comic/{raw_post["comicCid"]}/episode/{raw_post["episodeCid"]}'
return Post(
"terra-historicus",
text=f'{raw_post["title"]} - {raw_post["episodeShortTitle"]}',
pics=[raw_post["coverUrl"]],
url=url,
target_name="泰拉记事社漫画",
compress=True,
override_use_pic=False,
)
+15 -4
View File
@@ -1,10 +1,10 @@
import json
import re
from typing import Any, Optional
import httpx
from ..post import Post
from ..types import Category, RawPost, Tag, Target
from ..utils import http_client
from .platform import CategoryNotSupport, NewMessage
@@ -26,9 +26,10 @@ class Bilibili(NewMessage):
schedule_kw = {"seconds": 10}
name = "B站"
has_target = True
parse_target_promot = "请输入用户主页的链接"
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(
"https://api.bilibili.com/x/space/acc/info", params={"mid": target}
)
@@ -37,8 +38,18 @@ class Bilibili(NewMessage):
return None
return res_data["data"]["name"]
async def parse_target(self, target_text: str) -> Target:
if re.match(r"\d+", target_text):
return Target(target_text)
elif match := re.match(
r"(?:https?://)?space\.bilibili\.com/(\d+)", target_text
):
return Target(match.group(1))
else:
raise self.ParseTargetException()
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
params = {"host_uid": target, "offset": 0, "need_top": 0}
res = await client.get(
"https://api.vc.bilibili.com/dynamic_svr/v1/dynamic_svr/space_history",
+2 -3
View File
@@ -1,9 +1,8 @@
from typing import Any
import httpx
from ..post import Post
from ..types import RawPost, Target
from ..utils import http_client
from .platform import NewMessage
@@ -23,7 +22,7 @@ class FF14(NewMessage):
return "最终幻想XIV官方公告"
async def get_sub_list(self, _) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
raw_data = await client.get(
"https://ff.web.sdo.com/inc/newdata.ashx?url=List?gameCode=ff&category=5309,5310,5311,5312,5313&pageIndex=0&pageSize=5"
)
@@ -1,9 +1,9 @@
import re
from typing import Any, Optional
import httpx
from ..post import Post
from ..types import RawPost, Target
from ..utils import http_client
from .platform import NewMessage
@@ -18,9 +18,10 @@ class NcmArtist(NewMessage):
schedule_kw = {"minutes": 1}
name = "网易云-歌手"
has_target = True
parse_target_promot = "请输入歌手主页(包含数字ID)的链接"
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(
"https://music.163.com/api/artist/albums/{}".format(target),
headers={"Referer": "https://music.163.com/"},
@@ -30,8 +31,18 @@ class NcmArtist(NewMessage):
return
return res_data["artist"]["name"]
async def parse_target(self, target_text: str) -> Target:
if re.match(r"^\d+$", target_text):
return Target(target_text)
elif match := re.match(
r"(?:https?://)?music\.163\.com/#/artist\?id=(\d+)", target_text
):
return Target(match.group(1))
else:
raise self.ParseTargetException()
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(
"https://music.163.com/api/artist/albums/{}".format(target),
headers={"Referer": "https://music.163.com/"},
@@ -1,9 +1,9 @@
import re
from typing import Any, Optional
import httpx
from ..post import Post
from ..types import RawPost, Target
from ..utils import http_client
from .platform import NewMessage
@@ -18,9 +18,10 @@ class NcmRadio(NewMessage):
schedule_kw = {"minutes": 10}
name = "网易云-电台"
has_target = True
parse_target_promot = "请输入主播电台主页(包含数字ID)的链接"
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.post(
"http://music.163.com/api/dj/program/byradio",
headers={"Referer": "https://music.163.com/"},
@@ -31,8 +32,18 @@ class NcmRadio(NewMessage):
return
return res_data["programs"][0]["radio"]["name"]
async def parse_target(self, target_text: str) -> Target:
if re.match(r"^\d+$", target_text):
return Target(target_text)
elif match := re.match(
r"(?:https?://)?music\.163\.com/#/djradio\?id=(\d+)", target_text
):
return Target(match.group(1))
else:
raise self.ParseTargetException()
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.post(
"http://music.163.com/api/dj/program/byradio",
headers={"Referer": "https://music.163.com/"},
@@ -47,6 +47,7 @@ class Platform(metaclass=RegistryABCMeta, base=True):
enable_tag: bool
store: dict[Target, Any]
platform_name: str
parse_target_promot: Optional[str] = None
@abstractmethod
async def get_target_name(self, target: Target) -> Optional[str]:
@@ -73,6 +74,12 @@ class Platform(metaclass=RegistryABCMeta, base=True):
self.reverse_category[val] = key
self.store = dict()
class ParseTargetException(Exception):
pass
async def parse_target(self, target_string: str) -> Target:
return Target(target_string)
@abstractmethod
def get_tags(self, raw_post: RawPost) -> Optional[Collection[Tag]]:
"Return Tag list of given RawPost"
+3 -3
View File
@@ -2,11 +2,11 @@ import calendar
from typing import Any, Optional
import feedparser
import httpx
from bs4 import BeautifulSoup as bs
from ..post import Post
from ..types import RawPost, Target
from ..utils import http_client
from .platform import NewMessage
@@ -23,7 +23,7 @@ class Rss(NewMessage):
has_target = True
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(target, timeout=10.0)
feed = feedparser.parse(res.text)
return feed["feed"]["title"]
@@ -35,7 +35,7 @@ class Rss(NewMessage):
return post.id
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(target, timeout=10.0)
feed = feedparser.parse(res)
entries = feed.entries
+28 -9
View File
@@ -3,12 +3,12 @@ import re
from datetime import datetime
from typing import Any, Optional
import httpx
from bs4 import BeautifulSoup as bs
from nonebot.log import logger
from ..post import Post
from ..types import *
from ..utils import http_client
from .platform import NewMessage
@@ -28,9 +28,10 @@ class Weibo(NewMessage):
schedule_type = "interval"
schedule_kw = {"seconds": 3}
has_target = True
parse_target_promot = "请输入用户主页(包含数字UID)的链接"
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
param = {"containerid": "100505" + target}
res = await client.get(
"https://m.weibo.cn/api/container/getIndex", params=param
@@ -41,8 +42,17 @@ class Weibo(NewMessage):
else:
return None
async def parse_target(self, target_text: str) -> Target:
if re.match(r"\d+", target_text):
return Target(target_text)
elif match := re.match(r"(?:https?://)?weibo\.com/u/(\d+)", target_text):
# 都2202年了应该不会有http了吧,不过还是防一手
return Target(match.group(1))
else:
raise self.ParseTargetException()
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
async with http_client() as client:
params = {"containerid": "107603" + target}
res = await client.get(
"https://m.weibo.cn/api/container/getIndex?", params=params, timeout=4.0
@@ -123,15 +133,19 @@ class Weibo(NewMessage):
"Mobile Safari/537.36",
}
info = raw_post["mblog"]
if info["isLongText"] or info["pic_num"] > 9:
async with httpx.AsyncClient() as client:
retweeted = False
if info.get("retweeted_status"):
retweeted = True
pic_num = info["retweeted_status"]["pic_num"] if retweeted else info["pic_num"]
if info["isLongText"] or pic_num > 9:
async with http_client() as client:
res = await client.get(
"https://m.weibo.cn/detail/{}".format(info["mid"]), headers=header
)
try:
full_json_text = re.search(
r'"status": ([\s\S]+),\s+"call"', res.text
).group(1)
match = re.search(r'"status": ([\s\S]+),\s+"call"', res.text)
assert match
full_json_text = match.group(1)
info = json.loads(full_json_text)
except:
logger.info(
@@ -140,7 +154,12 @@ class Weibo(NewMessage):
)
)
parsed_text = self._get_text(info["text"])
pic_urls = [img["large"]["url"] for img in info.get("pics", [])]
raw_pics_list = (
info["retweeted_status"].get("pics", [])
if retweeted
else info.get("pics", [])
)
pic_urls = [img["large"]["url"] for img in raw_pics_list]
detail_url = "https://weibo.com/{}/{}".format(info["user"]["id"], info["bid"])
# return parsed_text, detail_url, pic_urls
return Post(
+4 -1
View File
@@ -1,3 +1,5 @@
from typing import Optional
import nonebot
from pydantic import BaseSettings
@@ -12,9 +14,10 @@ class PlugConfig(BaseSettings):
bison_filter_log: bool = False
bison_to_me: bool = True
bison_skip_browser_check: bool = False
bison_use_pic_merge: int = 0 # 多图片时启用图片合并转发(仅限群),当bison_use_queue为False时该配置不会生效
bison_use_pic_merge: int = 0 # 多图片时启用图片合并转发(仅限群)
# 0:不启用;1:首条消息单独发送,剩余照片合并转发;2以及以上:所有消息全部合并转发
bison_resend_times: int = 0
bison_proxy: Optional[str]
class Config:
extra = "ignore"
+2 -3
View File
@@ -3,13 +3,12 @@ from functools import reduce
from io import BytesIO
from typing import Optional, Union
import httpx
from nonebot.adapters.onebot.v11.message import Message, MessageSegment
from nonebot.log import logger
from PIL import Image
from .plugin_config import plugin_config
from .utils import parse_text
from .utils import http_client, parse_text
@dataclass
@@ -34,7 +33,7 @@ class Post:
async def _pic_url_to_image(self, data: Union[str, bytes]) -> Image.Image:
pic_buffer = BytesIO()
if isinstance(data, str):
async with httpx.AsyncClient() as client:
async with http_client() as client:
res = await client.get(data)
pic_buffer.write(res.content)
else:
@@ -0,0 +1,93 @@
import logging
import nonebot
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from nonebot import get_driver
from nonebot.adapters.onebot.v11.bot import Bot
from nonebot.log import LoguruHandler, logger
from .config import config
from .platform import platform_manager
from .plugin_config import plugin_config
from .send import do_send_msgs, send_msgs
from .types import UserSubInfo
scheduler = AsyncIOScheduler(timezone="Asia/Shanghai")
@get_driver().on_startup
async def _start():
for platform_name, platform in platform_manager.items():
if platform.schedule_type in ["cron", "interval", "date"]:
logger.info(
f"start scheduler for {platform_name} with {platform.schedule_type} {platform.schedule_kw}"
)
scheduler.add_job(
fetch_and_send,
platform.schedule_type,
**platform.schedule_kw,
args=(platform_name,),
)
scheduler.configure({"apscheduler.timezone": "Asia/Shanghai"})
scheduler.start()
# get_driver().on_startup(_start)
async def fetch_and_send(target_type: str):
target = config.get_next_target(target_type)
if not target:
return
logger.debug(
"try to fecth new posts from {}, target: {}".format(target_type, target)
)
send_user_list = config.target_user_cache[target_type][target]
send_userinfo_list = list(
map(
lambda user: UserSubInfo(
user,
lambda target: config.get_sub_category(
target_type, target, user.user_type, user.user
),
lambda target: config.get_sub_tags(
target_type, target, user.user_type, user.user
),
),
send_user_list,
)
)
to_send = await platform_manager[target_type].fetch_new_post(
target, send_userinfo_list
)
if not to_send:
return
bot = nonebot.get_bot()
assert isinstance(bot, Bot)
for user, send_list in to_send:
for send_post in send_list:
logger.info("send to {}: {}".format(user, send_post))
if not bot:
logger.warning("no bot connected")
else:
await send_msgs(
bot, user.user, user.user_type, await send_post.generate_messages()
)
class CustomLogHandler(LoguruHandler):
def filter(self, record: logging.LogRecord):
return record.msg != (
'Execution of job "%s" '
"skipped: maximum number of running instances reached (%d)"
)
if plugin_config.bison_use_queue:
scheduler.add_job(do_send_msgs, "interval", seconds=0.3, coalesce=True)
aps_logger = logging.getLogger("apscheduler")
aps_logger.setLevel(30)
aps_logger.handlers.clear()
aps_logger.addHandler(CustomLogHandler())
@@ -8,7 +8,10 @@ from nonebot.adapters.onebot.v11.message import MessageSegment
from nonebot.log import default_format, logger
from nonebot.plugin import require
from .plugin_config import plugin_config
from ..plugin_config import plugin_config
from .http import http_client
__all__ = ["http_client", "Singleton", "parse_text", "html_to_text"]
class Singleton(type):
+12
View File
@@ -0,0 +1,12 @@
import functools
import httpx
from ..plugin_config import plugin_config
if plugin_config.bison_proxy:
http_client = functools.partial(
httpx.AsyncClient, proxies=plugin_config.bison_proxy
)
else:
http_client = httpx.AsyncClient