Skip to content

Commit 420d82d

Browse files
committed
chore: ruff format
1 parent d87cf89 commit 420d82d

2 files changed

Lines changed: 200 additions & 1 deletion

File tree

astrbot/core/computer/tools/neo_skills.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -164,7 +164,10 @@ class CreateSkillPayloadTool(NeoSkillToolBase):
164164
"type": "object",
165165
"properties": {
166166
"payload": {
167-
"anyOf": [{"type": "object"}, {"type": "array", "items": {"type": "object"}}],
167+
"anyOf": [
168+
{"type": "object"},
169+
{"type": "array", "items": {"type": "object"}},
170+
],
168171
"description": (
169172
"Skill payload JSON. Typical schema: {skill_markdown, inputs, outputs, meta}. "
170173
"This only stores content and returns payload_ref; it does not create a candidate or release."
Lines changed: 196 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,196 @@
1+
from __future__ import annotations
2+
3+
import argparse
4+
import json
5+
import subprocess
6+
import sys
7+
from collections import defaultdict
8+
from dataclasses import dataclass
9+
from datetime import datetime
10+
11+
12+
@dataclass(frozen=True)
13+
class Issue:
14+
number: int
15+
title: str
16+
created_at: datetime
17+
url: str
18+
19+
20+
def parse_args() -> argparse.Namespace:
21+
parser = argparse.ArgumentParser(
22+
description=(
23+
"Close duplicate open plugin-publish issues while keeping the latest one."
24+
)
25+
)
26+
parser.add_argument(
27+
"--repo",
28+
default="AstrBotDevs/AstrBot",
29+
help="GitHub repository in owner/name format.",
30+
)
31+
parser.add_argument(
32+
"--label",
33+
default="plugin-publish",
34+
help="Issue label to target.",
35+
)
36+
parser.add_argument(
37+
"--limit",
38+
type=int,
39+
default=1000,
40+
help="Maximum number of open issues to inspect.",
41+
)
42+
parser.add_argument(
43+
"--apply",
44+
action="store_true",
45+
help="Actually close duplicate issues. Defaults to dry-run.",
46+
)
47+
return parser.parse_args()
48+
49+
50+
def run_gh_command(args: list[str]) -> str:
51+
try:
52+
completed = subprocess.run(
53+
args,
54+
check=True,
55+
capture_output=True,
56+
text=True,
57+
)
58+
except FileNotFoundError as exc:
59+
raise RuntimeError("GitHub CLI `gh` is not installed or not in PATH.") from exc
60+
except subprocess.CalledProcessError as exc:
61+
stderr = exc.stderr.strip()
62+
stdout = exc.stdout.strip()
63+
details = stderr or stdout or str(exc)
64+
raise RuntimeError(f"`{' '.join(args)}` failed: {details}") from exc
65+
return completed.stdout
66+
67+
68+
def load_open_issues(repo: str, label: str, limit: int) -> list[Issue]:
69+
output = run_gh_command(
70+
[
71+
"gh",
72+
"issue",
73+
"list",
74+
"--repo",
75+
repo,
76+
"--label",
77+
label,
78+
"--state",
79+
"open",
80+
"--limit",
81+
str(limit),
82+
"--json",
83+
"number,title,createdAt,url",
84+
]
85+
)
86+
items = json.loads(output)
87+
return [
88+
Issue(
89+
number=item["number"],
90+
title=item["title"],
91+
created_at=datetime.fromisoformat(item["createdAt"].replace("Z", "+00:00")),
92+
url=item["url"],
93+
)
94+
for item in items
95+
]
96+
97+
98+
def normalize_title(title: str) -> str:
99+
return " ".join(title.split()).strip()
100+
101+
102+
def find_duplicates(
103+
issues: list[Issue],
104+
) -> list[tuple[Issue, list[Issue]]]:
105+
grouped: dict[str, list[Issue]] = defaultdict(list)
106+
for issue in issues:
107+
grouped[normalize_title(issue.title)].append(issue)
108+
109+
duplicate_groups: list[tuple[Issue, list[Issue]]] = []
110+
for group in grouped.values():
111+
if len(group) < 2:
112+
continue
113+
ordered = sorted(
114+
group,
115+
key=lambda issue: (issue.created_at, issue.number),
116+
reverse=True,
117+
)
118+
keep = ordered[0]
119+
close_candidates = ordered[1:]
120+
duplicate_groups.append((keep, close_candidates))
121+
122+
duplicate_groups.sort(
123+
key=lambda item: (item[0].created_at, item[0].number),
124+
reverse=True,
125+
)
126+
return duplicate_groups
127+
128+
129+
def print_plan(duplicate_groups: list[tuple[Issue, list[Issue]]], apply: bool) -> None:
130+
action = "Will close" if apply else "Would close"
131+
if not duplicate_groups:
132+
print("No duplicate open issues found.")
133+
return
134+
135+
total_to_close = sum(len(close_list) for _, close_list in duplicate_groups)
136+
print(f"Found {len(duplicate_groups)} duplicate title groups.")
137+
print(
138+
f"{action} {total_to_close} issues and keep {len(duplicate_groups)} latest issues."
139+
)
140+
141+
for keep, close_list in duplicate_groups:
142+
print()
143+
print(f'Keep #{keep.number} [{keep.created_at.isoformat()}] "{keep.title}"')
144+
print(f" {keep.url}")
145+
for issue in close_list:
146+
print(
147+
f'Close #{issue.number} [{issue.created_at.isoformat()}] "{issue.title}"'
148+
)
149+
print(f" {issue.url}")
150+
151+
152+
def close_duplicates(
153+
repo: str, duplicate_groups: list[tuple[Issue, list[Issue]]]
154+
) -> None:
155+
for keep, close_list in duplicate_groups:
156+
reason = (
157+
f"Closing as duplicate of #{keep.number}. "
158+
"Keeping the latest open issue with this title."
159+
)
160+
for issue in close_list:
161+
print(f"Closing #{issue.number} as duplicate of #{keep.number}...")
162+
run_gh_command(
163+
[
164+
"gh",
165+
"issue",
166+
"close",
167+
str(issue.number),
168+
"--repo",
169+
repo,
170+
"--comment",
171+
reason,
172+
]
173+
)
174+
175+
176+
def main() -> int:
177+
args = parse_args()
178+
try:
179+
issues = load_open_issues(args.repo, args.label, args.limit)
180+
duplicate_groups = find_duplicates(issues)
181+
print_plan(duplicate_groups, apply=args.apply)
182+
if args.apply and duplicate_groups:
183+
print()
184+
close_duplicates(args.repo, duplicate_groups)
185+
print("Done.")
186+
elif not args.apply:
187+
print()
188+
print("Dry-run only. Re-run with `--apply` to close the duplicates.")
189+
except RuntimeError as exc:
190+
print(str(exc), file=sys.stderr)
191+
return 1
192+
return 0
193+
194+
195+
if __name__ == "__main__":
196+
raise SystemExit(main())

0 commit comments

Comments
 (0)