3939 "payjoin/multiparty-protocol-docs" ,
4040 "payjoin/btsim" ,
4141 "payjoin/tx-indexer" ,
42+ "Uniffi-Dart/uniffi-dart" ,
4243]
4344
4445REPO_FILTER = " " .join (f"repo:{ r } " for r in REPOS )
@@ -120,6 +121,66 @@ def add_discussion_comment(discussion_id, body):
120121 )
121122
122123
124+ def find_latest_checkin ():
125+ """Find the most recent Weekly Check-in Discussion."""
126+ owner , name = REPO .split ("/" )
127+ data = graphql (
128+ """
129+ query($owner: String!, $name: String!) {
130+ repository(owner: $owner, name: $name) {
131+ discussions(first: 10, orderBy: {field: CREATED_AT, direction: DESC}) {
132+ nodes {
133+ id
134+ title
135+ }
136+ }
137+ }
138+ }
139+ """ ,
140+ {"owner" : owner , "name" : name },
141+ )
142+ for discussion in data ["repository" ]["discussions" ]["nodes" ]:
143+ if discussion ["title" ].startswith ("Weekly Check-in:" ):
144+ return discussion
145+ return None
146+
147+
148+ def get_discussion_comments (discussion_id ):
149+ """Fetch top-level comments for a Discussion."""
150+ data = graphql (
151+ """
152+ query($id: ID!) {
153+ node(id: $id) {
154+ ... on Discussion {
155+ comments(first: 50) {
156+ nodes {
157+ body
158+ url
159+ }
160+ }
161+ }
162+ }
163+ }
164+ """ ,
165+ {"id" : discussion_id },
166+ )
167+ return data ["node" ]["comments" ]["nodes" ]
168+
169+
170+ def get_previous_thread_links ():
171+ """Map contributors to the previous check-in thread created for them."""
172+ discussion = find_latest_checkin ()
173+ if not discussion :
174+ return {}
175+
176+ links = {}
177+ for comment in get_discussion_comments (discussion ["id" ]):
178+ user = next ((u for u in CONTRIBUTORS if f"@{ u } " in comment ["body" ]), None )
179+ if user :
180+ links [user ] = comment ["url" ]
181+ return links
182+
183+
123184SEARCH_QUERY = """
124185query($q: String!) {
125186 search(query: $q, type: ISSUE, first: 30) {
@@ -128,18 +189,28 @@ def add_discussion_comment(discussion_id, body):
128189 id
129190 title
130191 url
192+ number
193+ repository {
194+ nameWithOwner
195+ }
131196 author { login }
132197 }
133198 ... on Issue {
134199 id
135200 title
136201 url
202+ number
203+ repository {
204+ nameWithOwner
205+ }
137206 author { login }
138207 }
139208 }
140209 }
141210}
142211"""
212+ # This avoids refetching review history for the same PR multiple times in one run.
213+ PR_REVIEWS_CACHE = {}
143214
144215
145216def search_issues (query ):
@@ -155,6 +226,8 @@ def search_issues(query):
155226 "id" : node ["id" ],
156227 "title" : node ["title" ],
157228 "html_url" : node ["url" ],
229+ "number" : node ["number" ],
230+ "repository" : node ["repository" ]["nameWithOwner" ],
158231 "user" : {
159232 "login" : node ["author" ]["login" ] if node .get ("author" ) else ""
160233 },
@@ -163,17 +236,91 @@ def search_issues(query):
163236 return items
164237
165238
239+ def parse_github_datetime (value ):
240+ """Parse a GitHub ISO 8601 timestamp into an aware datetime."""
241+ return datetime .fromisoformat (value .replace ("Z" , "+00:00" ))
242+
243+
244+ def get_paginated (url , params = None ):
245+ """GET a paginated REST collection and return all items."""
246+ items = []
247+ next_url = url
248+ next_params = params or {}
249+ while next_url :
250+ for attempt in range (5 ):
251+ resp = requests .get (
252+ next_url ,
253+ headers = HEADERS ,
254+ params = next_params ,
255+ timeout = 30 ,
256+ )
257+ if resp .status_code in {403 , 429 } or 500 <= resp .status_code < 600 :
258+ wait = 2 ** attempt
259+ print (
260+ f"REST request failed ({ resp .status_code } ), retrying in { wait } s..."
261+ )
262+ time .sleep (wait )
263+ continue
264+ resp .raise_for_status ()
265+ break
266+ else :
267+ resp .raise_for_status ()
268+ items .extend (resp .json ())
269+ next_url = resp .links .get ("next" , {}).get ("url" )
270+ next_params = None
271+ return items
272+
273+
274+ def get_pull_request_reviews (pr ):
275+ """Return all submitted reviews for a pull request."""
276+ cache_key = (pr ["repository" ], pr ["number" ])
277+ if cache_key in PR_REVIEWS_CACHE :
278+ return PR_REVIEWS_CACHE [cache_key ]
279+
280+ reviews = get_paginated (
281+ f"{ API } /repos/{ pr ['repository' ]} /pulls/{ pr ['number' ]} /reviews" ,
282+ {"per_page" : 100 },
283+ )
284+ PR_REVIEWS_CACHE [cache_key ] = reviews
285+ return reviews
286+
287+
288+ def latest_reviewed_at (pr , reviewer ):
289+ """Return the reviewer's latest submitted review timestamp for a PR."""
290+ latest = None
291+ for review in get_pull_request_reviews (pr ):
292+ if review .get ("user" , {}).get ("login" ) != reviewer :
293+ continue
294+ submitted_at = review .get ("submitted_at" )
295+ if not submitted_at :
296+ continue
297+ submitted = parse_github_datetime (submitted_at )
298+ if latest is None or submitted > latest :
299+ latest = submitted
300+ return latest
301+
302+
166303def gather_activity (user , since_date ):
167304 """Gather a contributor's past-week activity across the org."""
168305 since = since_date .strftime ("%Y-%m-%d" )
169306
170307 # PRs merged (authored)
171308 merged_prs = search_issues (f"author:{ user } type:pr merged:>{ since } " )
172309
173- # PRs reviewed
174- reviewed_prs = search_issues (f"reviewed-by:{ user } type:pr updated:>{ since } " )
175- # Exclude PRs the user authored (already counted above)
176- reviewed_prs = [pr for pr in reviewed_prs if pr ["user" ]["login" ] != user ]
310+ # PRs reviewed use search to find candidate PRs then confirm
311+ # the reviewer actually submitted a review during the standup window.
312+ review_candidates = search_issues (
313+ f"reviewed-by:{ user } type:pr updated:>{ since } sort:updated-desc"
314+ )
315+ seen_ids = set ()
316+ reviewed_prs = []
317+ for pr in review_candidates :
318+ if pr ["id" ] in seen_ids or pr ["user" ]["login" ] == user :
319+ continue
320+ reviewed_at = latest_reviewed_at (pr , user )
321+ if reviewed_at and reviewed_at > since_date :
322+ seen_ids .add (pr ["id" ])
323+ reviewed_prs .append (pr )
177324
178325 # Issues opened
179326 issues_opened = search_issues (f"author:{ user } type:issue created:>{ since } " )
@@ -206,7 +353,7 @@ def gather_potential_bottlenecks(user, since_date):
206353
207354
208355def format_contributor_comment (
209- user , merged_prs , reviewed_prs , issues_opened , bottlenecks
356+ user , merged_prs , reviewed_prs , issues_opened , bottlenecks , previous_thread_url = None
210357):
211358 """Format the threaded reply for a contributor."""
212359 lines = [f"## { user } " , "" , f"@{ user } " , "" ]
@@ -234,6 +381,16 @@ def format_contributor_comment(
234381 else :
235382 lines .append ("_No activity found._" )
236383
384+ lines .append ("" )
385+ lines .append ("### Last Week" )
386+ if previous_thread_url :
387+ lines .append ("" )
388+ lines .append (
389+ f"Review your previous thread: [Last week's thread]({ previous_thread_url } )"
390+ )
391+ else :
392+ lines .append ("_No previous thread found._" )
393+
237394 if bottlenecks :
238395 lines .append ("" )
239396 lines .append ("_Auto-detected signals:_" )
@@ -246,6 +403,7 @@ def main():
246403 today = datetime .now (timezone .utc )
247404 week_label = today .strftime ("Week of %Y-%m-%d" )
248405 since_date = today - timedelta (days = 7 )
406+ previous_thread_links = get_previous_thread_links ()
249407
250408 dry_run = os .environ .get ("DRY_RUN" )
251409
@@ -255,7 +413,12 @@ def main():
255413 merged_prs , reviewed_prs , issues_opened = gather_activity (user , since_date )
256414 bottlenecks = gather_potential_bottlenecks (user , since_date )
257415 comment_body = format_contributor_comment (
258- user , merged_prs , reviewed_prs , issues_opened , bottlenecks
416+ user ,
417+ merged_prs ,
418+ reviewed_prs ,
419+ issues_opened ,
420+ bottlenecks ,
421+ previous_thread_links .get (user ),
259422 )
260423 comments .append ((user , comment_body ))
261424
0 commit comments