Skip to content

Commit b96c6af

Browse files
committed
chore: add some skills and update agents doc
1 parent ad74ef7 commit b96c6af

File tree

6 files changed

+346
-2
lines changed

6 files changed

+346
-2
lines changed
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
---
2+
name: buildkite-get-results
3+
description: Gets buildkite build results
4+
---
5+
6+
Pass the PR number to the `scripts/get_buildkite_results.py` script.
7+
8+
The `--jobs` flag can do glob-style filtering of jobs.
9+
10+
The `--download` flag will download job logs.
Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
#!/usr/bin/env python3
2+
import argparse
3+
import json
4+
import re
5+
import subprocess
6+
import sys
7+
import urllib.request
8+
9+
10+
def get_pr_checks(pr_number):
11+
try:
12+
# Check if gh is installed
13+
subprocess.run(
14+
["gh", "--version"],
15+
check=True,
16+
stdout=subprocess.DEVNULL,
17+
stderr=subprocess.DEVNULL,
18+
)
19+
except FileNotFoundError:
20+
print(
21+
"Error: 'gh' (GitHub CLI) is not installed or not in PATH.", file=sys.stderr
22+
)
23+
sys.exit(1)
24+
except subprocess.CalledProcessError:
25+
print("Error: 'gh' command failed. Is it installed?", file=sys.stderr)
26+
sys.exit(1)
27+
28+
cmd = ["gh", "pr", "checks", str(pr_number), "--json", "bucket,name,link,state"]
29+
try:
30+
result = subprocess.run(cmd, capture_output=True, text=True, check=True)
31+
return json.loads(result.stdout)
32+
except subprocess.CalledProcessError as e:
33+
print(f"Error fetching PR checks: {e.stderr}", file=sys.stderr)
34+
sys.exit(1)
35+
36+
37+
def get_buildkite_build_url(checks):
38+
for check in checks:
39+
# Looking for Buildkite check. The name usually contains "buildkite"
40+
if "buildkite" in check.get("name", "").lower():
41+
return check.get("link")
42+
return None
43+
44+
45+
def fetch_buildkite_data(build_url):
46+
# Convert https://buildkite.com/org/pipeline/builds/number
47+
# to https://buildkite.com/org/pipeline/builds/number.json
48+
if not build_url.endswith(".json"):
49+
json_url = build_url + ".json"
50+
else:
51+
json_url = build_url
52+
53+
try:
54+
with urllib.request.urlopen(json_url) as response:
55+
if response.status != 200:
56+
print(
57+
f"Error fetching data from {json_url}: Status {response.status}",
58+
file=sys.stderr,
59+
)
60+
return None
61+
return json.loads(response.read().decode())
62+
except Exception as e:
63+
print(f"Error fetching data from {json_url}: {e}", file=sys.stderr)
64+
return None
65+
66+
67+
def download_log(job_url, output_path):
68+
# Construct raw log URL: job_url + "/raw" (Buildkite convention)
69+
# job_url e.g. https://buildkite.com/org/pipeline/builds/14394#job-id
70+
# Wait, the job['path'] gives /org/pipeline/builds/14394#job-id
71+
# We want /org/pipeline/builds/14394/jobs/job-id/raw? No
72+
# The clean URL for a job is https://buildkite.com/org/pipeline/builds/14394/jobs/job-id
73+
# And raw log is https://buildkite.com/org/pipeline/builds/14394/jobs/job-id/raw
74+
75+
# We have full_url e.g. https://buildkite.com/bazel/rules-python-python/builds/14394#019c5cf9-e3cf-468f-a7b1-8f9f5ad4b08c
76+
# We need to transform it.
77+
78+
if "#" in job_url:
79+
base, job_id = job_url.split("#")
80+
# Ensure base doesn't end with /
81+
if base.endswith("/"):
82+
base = base[:-1]
83+
84+
# Build raw URL
85+
raw_url = f"{base}/jobs/{job_id}/raw"
86+
else:
87+
print(f"Could not parse job URL for download: {job_url}", file=sys.stderr)
88+
return False
89+
90+
try:
91+
with urllib.request.urlopen(raw_url) as response:
92+
if response.status != 200:
93+
print(
94+
f"Error downloading log from {raw_url}: Status {response.status}",
95+
file=sys.stderr,
96+
)
97+
return False
98+
with open(output_path, "wb") as f:
99+
f.write(response.read())
100+
return True
101+
except Exception as e:
102+
print(f"Error downloading log from {raw_url}: {e}", file=sys.stderr)
103+
return False
104+
105+
106+
def main():
107+
parser = argparse.ArgumentParser(description="Get Buildkite CI results for a PR.")
108+
parser.add_argument("pr_number", help="The PR number.")
109+
parser.add_argument(
110+
"--jobs",
111+
action="append",
112+
help="Filter by job name (regex match). Can be specified multiple times.",
113+
)
114+
parser.add_argument(
115+
"--download",
116+
action="store_true",
117+
help="If exactly one job is matched, download its log to a local file.",
118+
)
119+
120+
args = parser.parse_args()
121+
122+
print(f"Fetching checks for PR #{args.pr_number}...", file=sys.stderr)
123+
checks = get_pr_checks(args.pr_number)
124+
125+
build_url = get_buildkite_build_url(checks)
126+
if not build_url:
127+
print("No Buildkite check found for this PR.", file=sys.stderr)
128+
sys.exit(1)
129+
130+
print(f"Found Buildkite URL: {build_url}", file=sys.stderr)
131+
132+
data = fetch_buildkite_data(build_url)
133+
if not data:
134+
sys.exit(1)
135+
136+
print(f"Build State: {data.get('state')}")
137+
print("-" * 40)
138+
139+
jobs = data.get("jobs", [])
140+
141+
filtered_jobs = []
142+
if args.jobs:
143+
for job in jobs:
144+
job_name = job.get("name")
145+
if not job_name:
146+
continue
147+
for pattern in args.jobs:
148+
if re.search(pattern, job_name, re.IGNORECASE):
149+
filtered_jobs.append(job)
150+
break
151+
else:
152+
filtered_jobs = jobs
153+
154+
for job in filtered_jobs:
155+
name = job.get("name", "Unknown")
156+
state = job.get("state", "Unknown")
157+
path = job.get("path")
158+
full_url = f"https://buildkite.com{path}" if path else "N/A"
159+
160+
passed = job.get("passed", False)
161+
outcome = job.get("outcome")
162+
163+
if passed:
164+
result_str = "PASSED"
165+
elif outcome:
166+
result_str = outcome.upper()
167+
else:
168+
result_str = state.upper()
169+
170+
print(f"Job: {name}")
171+
print(f" Result: {result_str}")
172+
print(f" URL: {full_url}")
173+
print("")
174+
175+
if args.download:
176+
if len(filtered_jobs) == 1:
177+
job = filtered_jobs[0]
178+
name = job.get("name", "unknown_job")
179+
# Sanitize name for filename
180+
safe_name = re.sub(r"[^a-zA-Z0-9_\-]", "_", name)
181+
output_path = f"{safe_name}.log"
182+
183+
path = job.get("path")
184+
if path:
185+
full_url = f"https://buildkite.com{path}"
186+
print(f"Downloading log for '{name}'...", file=sys.stderr)
187+
if download_log(full_url, output_path):
188+
print(f"Downloaded log to: {output_path}")
189+
else:
190+
print("Failed to download log.", file=sys.stderr)
191+
else:
192+
print("Job has no URL path, cannot download.", file=sys.stderr)
193+
elif len(filtered_jobs) == 0:
194+
print("No jobs matched to download.", file=sys.stderr)
195+
else:
196+
print(
197+
f"Matched {len(filtered_jobs)} jobs. Please filter to exactly one job to download.",
198+
file=sys.stderr,
199+
)
200+
201+
202+
if __name__ == "__main__":
203+
main()
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
---
2+
name: buildkite-retry-job
3+
description: Retry a failed build kite job
4+
---
5+
6+
Use `scripts/retry_buildkite_jobs.py` to retry a job. This is best used
7+
when there are network failures.
8+
9+
example:
10+
11+
```
12+
retry_buildkite_jobs.py org pipeline build
13+
```
14+
15+
The `--jobs` flag can be used to retry specific jobs.
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
#!/usr/bin/env python3
2+
import argparse
3+
import json
4+
import os
5+
import sys
6+
import urllib.request
7+
from urllib.error import HTTPError
8+
9+
10+
def make_request(url, method="GET", data=None, token=None):
11+
headers = {
12+
"Authorization": f"Bearer {token}",
13+
"Accept": "application/json",
14+
}
15+
if data:
16+
data = json.dumps(data).encode("utf-8")
17+
headers["Content-Type"] = "application/json"
18+
19+
req = urllib.request.Request(url, data=data, headers=headers, method=method)
20+
try:
21+
with urllib.request.urlopen(req) as response:
22+
return json.loads(response.read().decode())
23+
except HTTPError as e:
24+
print(f"HTTP Error: {e.code} - {e.reason}", file=sys.stderr)
25+
if e.fp:
26+
print(e.fp.read().decode(), file=sys.stderr)
27+
return None
28+
except Exception as e:
29+
print(f"Error: {e}", file=sys.stderr)
30+
return None
31+
32+
33+
def main():
34+
parser = argparse.ArgumentParser(
35+
description="Retry failed jobs in a Buildkite build."
36+
)
37+
parser.add_argument("org", help="Organization slug")
38+
parser.add_argument("pipeline", help="Pipeline slug")
39+
parser.add_argument("build", help="Build number")
40+
parser.add_argument(
41+
"--job-name",
42+
help="Specific job name to retry (if failed). Regex/substring allowed.",
43+
)
44+
45+
args = parser.parse_args()
46+
token = os.environ.get("BUILDKITE_API_TOKEN")
47+
48+
if not token:
49+
print(
50+
"Please set the BUILDKITE_API_TOKEN environment variable.", file=sys.stderr
51+
)
52+
sys.exit(1)
53+
54+
url = f"https://api.buildkite.com/v2/organizations/{args.org}/pipelines/{args.pipeline}/builds/{args.build}"
55+
print(f"Fetching build details from {url}...")
56+
build_data = make_request(url, token=token)
57+
58+
if not build_data:
59+
print("Failed to fetch build details.", file=sys.stderr)
60+
sys.exit(1)
61+
62+
jobs = build_data.get("jobs", [])
63+
failed_jobs = [j for j in jobs if j.get("state") == "failed"]
64+
65+
if not failed_jobs:
66+
print("No failed jobs found in this build.")
67+
sys.exit(0)
68+
69+
for job in failed_jobs:
70+
job_id = job.get("id")
71+
job_name = job.get("name", "Unknown")
72+
73+
if (
74+
args.job_name
75+
and args.job_name.lower() not in job_name.lower()
76+
and args.job_name.lower() not in job.get("step_key", "").lower()
77+
):
78+
continue
79+
80+
print(f"Retrying job: {job_name} ({job_id})")
81+
retry_url = f"https://api.buildkite.com/v2/organizations/{args.org}/pipelines/{args.pipeline}/builds/{args.build}/jobs/{job_id}/retry"
82+
83+
result = make_request(retry_url, method="PUT", token=token)
84+
if result:
85+
print(f" Successfully triggered retry for {job_name}")
86+
else:
87+
print(f" Failed to trigger retry for {job_name}")
88+
89+
90+
if __name__ == "__main__":
91+
main()

.agents/skills/hello/SKILL.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
---
2+
name: hello
3+
description: say hello
4+
---
5+
6+
Say hello with a made up name and greeting.
7+

AGENTS.md

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,14 @@ Act as an expert in Bazel, rules_python, Starlark, and Python.
77

88
DO NOT `git commit` or `git push`.
99

10+
## RULES TO ALWAYS FOLLOW AND NEVER IGNORE
11+
12+
ALWAYS FOLLOW THESE RULES. NEVER VIOLATE THEM.
13+
14+
Ask for user input and provide a justificaiton if trying to violate them.
15+
16+
* NEVER run `bazel clean --expunge`.
17+
1018
## Style and conventions
1119

1220
Read `.editorconfig` for line length wrapping
@@ -121,12 +129,22 @@ bzl_library(
121129

122130
Tests are under the `tests/` directory.
123131

124-
When testing, add `--test_tag_filters=-integration-test`.
132+
When testing, add `--config=fast-tests`.
125133

126-
When building, add `--build_tag_filters=-integration-test`.
134+
When building, add `--config=fast-tests`.
135+
136+
The `--config=fast-tests` flag avoids running expensive and slow tests can that
137+
freeze the host machine or cause flakiness.
127138

128139
## Understanding the code base
129140

141+
This repository contains 3 Bazel bzlmod modules.
142+
143+
* `sphinxdocs/` is for the `@sphinxdocs` module.
144+
* `gazelle/` is for the `@rules_python_gazelle_plugin` module.
145+
* All other code is part of `@rules_python`.
146+
147+
130148
`python/config_settings/BUILD.bazel` contains build flags that are part of the
131149
public API. DO NOT add, remove, or modify these build flags unless specifically
132150
instructed to.

0 commit comments

Comments
 (0)