-
Notifications
You must be signed in to change notification settings - Fork 0
154 lines (132 loc) · 5.25 KB
/
daily-regen.yml
File metadata and controls
154 lines (132 loc) · 5.25 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
name: "Scheduled: Regen oldest specs"
run-name: "Scheduled regen (${{ github.event.inputs.count || '1' }} specs)"
# Picks the N oldest specs (by most-recent implementation `updated` timestamp)
# and re-dispatches `bulk-generate.yml` for each. Default N=1 per cron tick.
#
# Schedule: every 2h, skipping the 20:00–24:00 Berlin (CEST) evening window.
# Berlin CEST run hours: 00, 02, 04, 06, 08, 10, 12, 14, 16, 18 → UTC 22, 00,
# 02, 04, 06, 08, 10, 12, 14, 16. The 20:00 and 22:00 Berlin slots (UTC 18, 20)
# are intentionally skipped so runs never start during the user's evening.
#
# bulk-generate is serialised via its own concurrency group. With Sonnet +
# reduced bulk-generate pace, a single spec completes well within the 2h slot,
# leaving the user window clean.
#
# Triggers:
# - schedule: 10× daily (UTC, every 2h except 18:00 and 20:00 UTC)
# - workflow_dispatch: manual, with inputs for count + dry-run
on:
schedule:
- cron: '0 0,2,4,6,8,10,12,14,16,22 * * *'
workflow_dispatch:
inputs:
count:
description: "How many of the oldest specs to regen (default 1)"
required: false
default: '1'
min_age_hours:
description: "Skip specs regen'd within this many hours (default 20)"
required: false
default: '20'
dry_run:
description: "Just print picks, do not trigger bulk-generate"
type: boolean
default: false
permissions:
contents: read
actions: write
concurrency:
group: daily-regen
cancel-in-progress: false
jobs:
pick:
runs-on: ubuntu-latest
outputs:
specs: ${{ steps.pick.outputs.specs }}
count: ${{ steps.pick.outputs.count }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6
- name: Set up Python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6
with:
python-version: '3.13'
- name: Install PyYAML
run: pip install pyyaml
- name: Pick oldest spec(s)
id: pick
env:
COUNT: ${{ inputs.count || '1' }}
MIN_AGE_HOURS: ${{ inputs.min_age_hours || '20' }}
run: |
python3 <<'PY'
import os
from datetime import datetime, timedelta, timezone
from pathlib import Path
import yaml
COUNT = int(os.environ["COUNT"])
MIN_AGE = timedelta(hours=int(os.environ["MIN_AGE_HOURS"]))
NOW = datetime.now(timezone.utc)
specs_dir = Path("plots")
candidates: list[tuple[datetime, str]] = []
for spec_dir in sorted(specs_dir.iterdir()):
if not spec_dir.is_dir() or spec_dir.name.startswith("."):
continue
meta_dir = spec_dir / "metadata" / "python"
if not meta_dir.is_dir():
continue
latest_updated: str | None = None
for yaml_file in meta_dir.glob("*.yaml"):
try:
data = yaml.safe_load(yaml_file.read_text(encoding="utf-8")) or {}
except Exception:
continue
updated = data.get("updated") or data.get("created")
if not updated:
continue
s = str(updated)
if latest_updated is None or s > latest_updated:
latest_updated = s
if not latest_updated:
# No metadata yet → treat as ancient, candidate for regen
candidates.append((datetime.min.replace(tzinfo=timezone.utc), spec_dir.name))
continue
try:
dt = datetime.fromisoformat(latest_updated.replace("Z", "+00:00"))
if dt.tzinfo is None:
dt = dt.replace(tzinfo=timezone.utc)
except Exception:
continue
if NOW - dt < MIN_AGE:
continue # too fresh to re-roll
candidates.append((dt, spec_dir.name))
candidates.sort() # oldest first
picks = [name for _, name in candidates[:COUNT]]
print(f"::notice::Eligible specs: {len(candidates)} picked: {picks}")
for dt, name in candidates[:COUNT]:
print(f" - {name:40s} latest_updated={dt.isoformat()}")
github_output = os.environ["GITHUB_OUTPUT"]
with open(github_output, "a", encoding="utf-8") as f:
f.write(f"specs={' '.join(picks)}\n")
f.write(f"count={len(picks)}\n")
PY
dispatch:
needs: pick
if: ${{ needs.pick.outputs.count != '0' && !inputs.dry_run }}
runs-on: ubuntu-latest
permissions:
actions: write
steps:
- name: Trigger bulk-generate for each picked spec
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SPECS: ${{ needs.pick.outputs.specs }}
run: |
for spec in $SPECS; do
echo "::notice::Dispatching bulk-generate for $spec (all 9 libs)"
gh workflow run bulk-generate.yml \
--repo "${{ github.repository }}" \
-f specification_id="$spec" \
-f library=all
# Small pause between dispatches so GitHub's webhook processing has a moment.
sleep 5
done