⬆️ auto update by pre-commit hooks (#521)

* ⬆️ auto update by pre-commit hooks

updates:
- [github.com/astral-sh/ruff-pre-commit: v0.2.0 → v0.3.5](https://github.com/astral-sh/ruff-pre-commit/compare/v0.2.0...v0.3.5)
- [github.com/psf/black: 24.1.1 → 24.3.0](https://github.com/psf/black/compare/24.1.1...24.3.0)
- [github.com/pre-commit/mirrors-eslint: v9.0.0-beta.1 → v9.0.0-rc.0](https://github.com/pre-commit/mirrors-eslint/compare/v9.0.0-beta.1...v9.0.0-rc.0)

* 💄 auto fix by pre-commit hooks

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
pre-commit-ci[bot] 2024-04-02 15:07:29 +08:00 committed by GitHub
parent 230fe8f4a3
commit ddc5e0eed7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
4 changed files with 65 additions and 47 deletions

View File

@ -7,7 +7,7 @@ ci:
autoupdate_commit_msg: ":arrow_up: auto update by pre-commit hooks"
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.0
rev: v0.3.5
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
@ -20,7 +20,7 @@ repos:
stages: [commit]
- repo: https://github.com/psf/black
rev: 24.1.1
rev: 24.3.0
hooks:
- id: black
stages: [commit]
@ -34,7 +34,7 @@ repos:
stages: [commit]
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v9.0.0-beta.1
rev: v9.0.0-rc.0
hooks:
- id: eslint
additional_dependencies:

View File

@ -99,27 +99,33 @@ class Config(metaclass=Singleton):
# update
assert not isinstance(user_data, list)
subs: list = user_data.get("subs", [])
subs.append({
"target": target,
"target_type": target_type,
"target_name": target_name,
"cats": cats,
"tags": tags,
})
self.user_target.update({"subs": subs}, query)
else:
# insert
self.user_target.insert({
"user": user,
"user_type": user_type,
"subs": [{
subs.append(
{
"target": target,
"target_type": target_type,
"target_name": target_name,
"cats": cats,
"tags": tags,
}],
})
}
)
self.user_target.update({"subs": subs}, query)
else:
# insert
self.user_target.insert(
{
"user": user,
"user_type": user_type,
"subs": [
{
"target": target,
"target_type": target_type,
"target_name": target_name,
"cats": cats,
"tags": tags,
}
],
}
)
self.update_send_cache()
def list_subscribe(self, user, user_type) -> list[SubscribeContent]:
@ -180,12 +186,14 @@ class Config(metaclass=Singleton):
for user in self.user_target.all():
for sub in user.get("subs", []):
if sub.get("target_type") not in supported_target_type:
to_del.append({
"user": user["user"],
"user_type": user["user_type"],
"target": sub["target"],
"target_type": sub["target_type"],
})
to_del.append(
{
"user": user["user"],
"user_type": user["user_type"],
"target": sub["target"],
"target_type": sub["target_type"],
}
)
continue
res[sub["target_type"]][sub["target"]].append(User(user["user"], user["user_type"]))
cat_res[sub["target_type"]][sub["target"]]["{}-{}".format(user["user_type"], user["user"])] = sub[

View File

@ -540,18 +540,22 @@ async def test_batch_fetch_new_message(app: App):
platform_obj = BatchNewMessage(ProcessContext(), None) # type:ignore
res1 = await platform_obj.batch_fetch_new_post([
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
SubUnit(Target("target3"), [user2]),
])
res1 = await platform_obj.batch_fetch_new_post(
[
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
SubUnit(Target("target3"), [user2]),
]
)
assert len(res1) == 0
res2 = await platform_obj.batch_fetch_new_post([
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
SubUnit(Target("target3"), [user2]),
])
res2 = await platform_obj.batch_fetch_new_post(
[
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
SubUnit(Target("target3"), [user2]),
]
)
assert len(res2) == 3
send_set = set()
for platform_target, posts in res2:
@ -613,16 +617,20 @@ async def test_batch_fetch_compare_status(app: App):
user1 = UserSubInfo(TargetQQGroup(group_id=123), [1, 2, 3], [])
user2 = UserSubInfo(TargetQQGroup(group_id=234), [1, 2, 3], [])
res1 = await batch_status_change.batch_fetch_new_post([
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
])
res1 = await batch_status_change.batch_fetch_new_post(
[
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
]
)
assert len(res1) == 0
res2 = await batch_status_change.batch_fetch_new_post([
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
])
res2 = await batch_status_change.batch_fetch_new_post(
[
SubUnit(Target("target1"), [user1]),
SubUnit(Target("target2"), [user1, user2]),
]
)
send_set = set()
for platform_target, posts in res2:

View File

@ -83,10 +83,12 @@ async def test_scheduler_batch_api(init_scheduler, mocker: MockerFixture):
await scheduler_dict[BililiveSchedConf].exec_fetch()
batch_fetch_mock.assert_called_once_with([
(T_Target("t1"), [UserSubInfo(user=TargetQQGroup(group_id=123), categories=[], tags=[])]),
(T_Target("t2"), [UserSubInfo(user=TargetQQGroup(group_id=123), categories=[], tags=[])]),
])
batch_fetch_mock.assert_called_once_with(
[
(T_Target("t1"), [UserSubInfo(user=TargetQQGroup(group_id=123), categories=[], tags=[])]),
(T_Target("t2"), [UserSubInfo(user=TargetQQGroup(group_id=123), categories=[], tags=[])]),
]
)
async def test_scheduler_with_time(app: App, init_scheduler, mocker: MockerFixture):