Skip to content

Commit 16828a8

Browse files
Merge branch 'master' into unify-mailbox-ttl
2 parents 58ac6ca + 5bd68a0 commit 16828a8

File tree

24 files changed

+706
-340
lines changed

24 files changed

+706
-340
lines changed

.github/CONTRIBUTING.md

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ As such, contributions are greatly valued, necessary, and impactful: whether it'
1212

1313
## Communication Channels
1414

15-
Most discussion about Payjoin research and development happens on [Discord](https://discord.gg/X8RRV2VS), or in Github [issues](https://github.com/payjoin/rust-payjoin/issues) or [pull requests](https://github.com/payjoin/rust-payjoin/pulls).
15+
Most discussion about Payjoin research and development happens on [Discord](https://discord.gg/6rJD9R684h), or in Github [issues](https://github.com/payjoin/rust-payjoin/issues) or [pull requests](https://github.com/payjoin/rust-payjoin/pulls).
1616

1717
---
1818

@@ -67,6 +67,23 @@ RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --all-features --document-private
6767
echo "▶ ./contrib/test_local.sh"
6868
./contrib/test_local.sh
6969

70+
# -------- 4. lock file verification --------
71+
changed_tomls=$(git diff --cached --name-only --diff-filter=ACMR | grep -E '(^|/)Cargo\.toml$' || true)
72+
73+
if [ -n "$changed_tomls" ]; then
74+
echo "▶ Checking if lockfiles need updating…"
75+
./contrib/update-lock-files.sh
76+
stale_locks=$(git diff --name-only -- Cargo-minimal.lock Cargo-recent.lock)
77+
if [ -n "$stale_locks" ]; then
78+
git checkout -- Cargo-minimal.lock Cargo-recent.lock
79+
echo "pre-commit: Cargo.toml changed and lockfiles are stale!"
80+
echo "Stale lockfiles:"
81+
echo "$stale_locks"
82+
echo "Run './contrib/update-lock-files.sh' and stage the lockfiles."
83+
exit 1
84+
fi
85+
fi
86+
7087
echo "✓ Pre-commit hook passed"
7188
```
7289

.github/scripts/create_standup_discussion.py

Lines changed: 169 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@
3939
"payjoin/multiparty-protocol-docs",
4040
"payjoin/btsim",
4141
"payjoin/tx-indexer",
42+
"Uniffi-Dart/uniffi-dart",
4243
]
4344

4445
REPO_FILTER = " ".join(f"repo:{r}" for r in REPOS)
@@ -120,6 +121,66 @@ def add_discussion_comment(discussion_id, body):
120121
)
121122

122123

124+
def find_latest_checkin():
125+
"""Find the most recent Weekly Check-in Discussion."""
126+
owner, name = REPO.split("/")
127+
data = graphql(
128+
"""
129+
query($owner: String!, $name: String!) {
130+
repository(owner: $owner, name: $name) {
131+
discussions(first: 10, orderBy: {field: CREATED_AT, direction: DESC}) {
132+
nodes {
133+
id
134+
title
135+
}
136+
}
137+
}
138+
}
139+
""",
140+
{"owner": owner, "name": name},
141+
)
142+
for discussion in data["repository"]["discussions"]["nodes"]:
143+
if discussion["title"].startswith("Weekly Check-in:"):
144+
return discussion
145+
return None
146+
147+
148+
def get_discussion_comments(discussion_id):
149+
"""Fetch top-level comments for a Discussion."""
150+
data = graphql(
151+
"""
152+
query($id: ID!) {
153+
node(id: $id) {
154+
... on Discussion {
155+
comments(first: 50) {
156+
nodes {
157+
body
158+
url
159+
}
160+
}
161+
}
162+
}
163+
}
164+
""",
165+
{"id": discussion_id},
166+
)
167+
return data["node"]["comments"]["nodes"]
168+
169+
170+
def get_previous_thread_links():
171+
"""Map contributors to the previous check-in thread created for them."""
172+
discussion = find_latest_checkin()
173+
if not discussion:
174+
return {}
175+
176+
links = {}
177+
for comment in get_discussion_comments(discussion["id"]):
178+
user = next((u for u in CONTRIBUTORS if f"@{u}" in comment["body"]), None)
179+
if user:
180+
links[user] = comment["url"]
181+
return links
182+
183+
123184
SEARCH_QUERY = """
124185
query($q: String!) {
125186
search(query: $q, type: ISSUE, first: 30) {
@@ -128,18 +189,28 @@ def add_discussion_comment(discussion_id, body):
128189
id
129190
title
130191
url
192+
number
193+
repository {
194+
nameWithOwner
195+
}
131196
author { login }
132197
}
133198
... on Issue {
134199
id
135200
title
136201
url
202+
number
203+
repository {
204+
nameWithOwner
205+
}
137206
author { login }
138207
}
139208
}
140209
}
141210
}
142211
"""
212+
# This avoids refetching review history for the same PR multiple times in one run.
213+
PR_REVIEWS_CACHE = {}
143214

144215

145216
def search_issues(query):
@@ -155,6 +226,8 @@ def search_issues(query):
155226
"id": node["id"],
156227
"title": node["title"],
157228
"html_url": node["url"],
229+
"number": node["number"],
230+
"repository": node["repository"]["nameWithOwner"],
158231
"user": {
159232
"login": node["author"]["login"] if node.get("author") else ""
160233
},
@@ -163,17 +236,91 @@ def search_issues(query):
163236
return items
164237

165238

239+
def parse_github_datetime(value):
240+
"""Parse a GitHub ISO 8601 timestamp into an aware datetime."""
241+
return datetime.fromisoformat(value.replace("Z", "+00:00"))
242+
243+
244+
def get_paginated(url, params=None):
245+
"""GET a paginated REST collection and return all items."""
246+
items = []
247+
next_url = url
248+
next_params = params or {}
249+
while next_url:
250+
for attempt in range(5):
251+
resp = requests.get(
252+
next_url,
253+
headers=HEADERS,
254+
params=next_params,
255+
timeout=30,
256+
)
257+
if resp.status_code in {403, 429} or 500 <= resp.status_code < 600:
258+
wait = 2**attempt
259+
print(
260+
f"REST request failed ({resp.status_code}), retrying in {wait}s..."
261+
)
262+
time.sleep(wait)
263+
continue
264+
resp.raise_for_status()
265+
break
266+
else:
267+
resp.raise_for_status()
268+
items.extend(resp.json())
269+
next_url = resp.links.get("next", {}).get("url")
270+
next_params = None
271+
return items
272+
273+
274+
def get_pull_request_reviews(pr):
275+
"""Return all submitted reviews for a pull request."""
276+
cache_key = (pr["repository"], pr["number"])
277+
if cache_key in PR_REVIEWS_CACHE:
278+
return PR_REVIEWS_CACHE[cache_key]
279+
280+
reviews = get_paginated(
281+
f"{API}/repos/{pr['repository']}/pulls/{pr['number']}/reviews",
282+
{"per_page": 100},
283+
)
284+
PR_REVIEWS_CACHE[cache_key] = reviews
285+
return reviews
286+
287+
288+
def latest_reviewed_at(pr, reviewer):
289+
"""Return the reviewer's latest submitted review timestamp for a PR."""
290+
latest = None
291+
for review in get_pull_request_reviews(pr):
292+
if review.get("user", {}).get("login") != reviewer:
293+
continue
294+
submitted_at = review.get("submitted_at")
295+
if not submitted_at:
296+
continue
297+
submitted = parse_github_datetime(submitted_at)
298+
if latest is None or submitted > latest:
299+
latest = submitted
300+
return latest
301+
302+
166303
def gather_activity(user, since_date):
167304
"""Gather a contributor's past-week activity across the org."""
168305
since = since_date.strftime("%Y-%m-%d")
169306

170307
# PRs merged (authored)
171308
merged_prs = search_issues(f"author:{user} type:pr merged:>{since}")
172309

173-
# PRs reviewed
174-
reviewed_prs = search_issues(f"reviewed-by:{user} type:pr updated:>{since}")
175-
# Exclude PRs the user authored (already counted above)
176-
reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user]
310+
# PRs reviewed use search to find candidate PRs then confirm
311+
# the reviewer actually submitted a review during the standup window.
312+
review_candidates = search_issues(
313+
f"reviewed-by:{user} type:pr updated:>{since} sort:updated-desc"
314+
)
315+
seen_ids = set()
316+
reviewed_prs = []
317+
for pr in review_candidates:
318+
if pr["id"] in seen_ids or pr["user"]["login"] == user:
319+
continue
320+
reviewed_at = latest_reviewed_at(pr, user)
321+
if reviewed_at and reviewed_at > since_date:
322+
seen_ids.add(pr["id"])
323+
reviewed_prs.append(pr)
177324

178325
# Issues opened
179326
issues_opened = search_issues(f"author:{user} type:issue created:>{since}")
@@ -206,7 +353,7 @@ def gather_potential_bottlenecks(user, since_date):
206353

207354

208355
def format_contributor_comment(
209-
user, merged_prs, reviewed_prs, issues_opened, bottlenecks
356+
user, merged_prs, reviewed_prs, issues_opened, bottlenecks, previous_thread_url=None
210357
):
211358
"""Format the threaded reply for a contributor."""
212359
lines = [f"## {user}", "", f"@{user}", ""]
@@ -234,6 +381,16 @@ def format_contributor_comment(
234381
else:
235382
lines.append("_No activity found._")
236383

384+
lines.append("")
385+
lines.append("### Last Week")
386+
if previous_thread_url:
387+
lines.append("")
388+
lines.append(
389+
f"Review your previous thread: [Last week's thread]({previous_thread_url})"
390+
)
391+
else:
392+
lines.append("_No previous thread found._")
393+
237394
if bottlenecks:
238395
lines.append("")
239396
lines.append("_Auto-detected signals:_")
@@ -246,6 +403,7 @@ def main():
246403
today = datetime.now(timezone.utc)
247404
week_label = today.strftime("Week of %Y-%m-%d")
248405
since_date = today - timedelta(days=7)
406+
previous_thread_links = get_previous_thread_links()
249407

250408
dry_run = os.environ.get("DRY_RUN")
251409

@@ -255,7 +413,12 @@ def main():
255413
merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date)
256414
bottlenecks = gather_potential_bottlenecks(user, since_date)
257415
comment_body = format_contributor_comment(
258-
user, merged_prs, reviewed_prs, issues_opened, bottlenecks
416+
user,
417+
merged_prs,
418+
reviewed_prs,
419+
issues_opened,
420+
bottlenecks,
421+
previous_thread_links.get(user),
259422
)
260423
comments.append((user, comment_body))
261424

.github/workflows/dart.yml

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,5 @@ jobs:
3535
- name: "Use cache"
3636
uses: Swatinem/rust-cache@v2
3737

38-
- name: Generate bindings and binaries
39-
run: bash ./scripts/generate_bindings.sh
40-
41-
- name: Run tests
42-
run: dart test
38+
- name: "Build and test"
39+
run: bash ./contrib/test.sh

.github/workflows/javascript.yml

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,5 @@ jobs:
4141
- name: Install wasm-bindgen
4242
run: cargo install --locked wasm-bindgen-cli --version 0.2.108
4343

44-
- name: "Install dependencies"
45-
run: npm ci
46-
47-
- name: Generate bindings and binaries
48-
run: bash ./scripts/generate_bindings.sh
49-
50-
- name: Run tests
51-
run: npm test
44+
- name: "Build and test"
45+
run: bash ./contrib/test.sh

.github/workflows/python.yml

Lines changed: 2 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -41,17 +41,5 @@ jobs:
4141
version: "0.8.2"
4242
enable-cache: true
4343

44-
- name: "uv sync"
45-
run: uv sync --all-extras
46-
47-
- name: "Generate payjoin-ffi.py and binaries"
48-
run: bash ./scripts/generate_bindings.sh
49-
50-
- name: "Build wheel"
51-
run: uv build --wheel
52-
53-
- name: "Install wheel"
54-
run: uv run pip install ./dist/*.whl
55-
56-
- name: "Run tests"
57-
run: uv run python -m unittest --verbose
44+
- name: "Build and test"
45+
run: bash ./contrib/test.sh

0 commit comments

Comments
 (0)