-
Notifications
You must be signed in to change notification settings - Fork 7
577 lines (520 loc) · 24.9 KB
/
probe.yml
File metadata and controls
577 lines (520 loc) · 24.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
name: Probe
on:
workflow_dispatch:
pull_request:
branches: [ main ]
jobs:
probe:
name: Compliance Probe
runs-on: ubuntu-latest
permissions:
pull-requests: write
contents: write
actions: write
steps:
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Discover servers
id: discover
run: |
SERVERS='[]'
for f in src/Servers/*/probe.json; do
dir=$(basename "$(dirname "$f")")
name=$(jq -r .name "$f")
SERVERS=$(echo "$SERVERS" | jq -c --arg d "$dir" --arg n "$name" '. + [{"dir": $d, "name": $n}]')
done
echo "servers=$SERVERS" >> "$GITHUB_OUTPUT"
echo "Discovered: $(echo "$SERVERS" | jq -r '.[].name' | tr '\n' ', ')"
- name: Detect changes
id: changes
run: |
SERVERS='${{ steps.discover.outputs.servers }}'
set_all() {
echo "servers=$SERVERS" >> "$GITHUB_OUTPUT"
}
# workflow_dispatch always runs everything
if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then
set_all
exit 0
fi
CHANGED=$(git diff --name-only origin/${{ github.base_ref }}...HEAD)
# Global triggers → run all
if echo "$CHANGED" | grep -qE '^(src/Http11Probe/|src/Http11Probe\.Cli/|Directory\.Build\.props|\.dockerignore|\.github/workflows/probe\.yml)'; then
set_all
exit 0
fi
AFFECTED='[]'
for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do
dir=$(echo "$row" | base64 -d | jq -r '.dir')
name=$(echo "$row" | base64 -d | jq -r '.name')
if echo "$CHANGED" | grep -q "^src/Servers/${dir}/"; then
AFFECTED=$(echo "$AFFECTED" | jq -c --arg d "$dir" --arg n "$name" '. + [{"dir": $d, "name": $n}]')
fi
done
echo "servers=$AFFECTED" >> "$GITHUB_OUTPUT"
- name: Setup .NET
if: steps.changes.outputs.servers != '[]'
uses: actions/setup-dotnet@v4
with:
dotnet-version: '10.0'
- name: Build probe CLI
if: steps.changes.outputs.servers != '[]'
run: dotnet build Http11Probe.slnx -c Release
# ── Build / Run / Probe / Kill — one server at a time ──────────
- name: Probe servers
if: steps.changes.outputs.servers != '[]'
run: |
SERVERS='${{ steps.changes.outputs.servers }}'
PROBE_PORT=8080
for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do
dir=$(echo "$row" | base64 -d | jq -r '.dir')
name=$(echo "$row" | base64 -d | jq -r '.name')
tag=$(echo "probe-$dir" | tr '[:upper:]' '[:lower:]')
echo "::group::$name"
# Build
docker build -t "$tag" -f "src/Servers/$dir/Dockerfile" .
# Run
docker run -d --name probe-target --network host "$tag"
# Wait
for i in $(seq 1 30); do
curl -sf "http://localhost:${PROBE_PORT}/" > /dev/null 2>&1 && break
sleep 1
done
# Probe
dotnet run --no-build -c Release --project src/Http11Probe.Cli -- \
--host localhost --port "$PROBE_PORT" --output "probe-${dir}.json" || true
# Kill
docker stop probe-target && docker rm probe-target
echo "::endgroup::"
done
- name: Cleanup
if: always()
run: docker rm -f probe-target 2>/dev/null || true
# ── Process results ────────────────────────────────────────────
- name: Process results
if: steps.changes.outputs.servers != '[]'
env:
PROBE_SERVERS: ${{ steps.changes.outputs.servers }}
run: |
python3 << 'PYEOF'
import json, sys, os, subprocess, pathlib
# ── Strict expectations ──────────────────────────────────────
STRICT = {
'COMP-BASELINE': {
'accept': list(range(200, 300)),
'close_ok': False, 'timeout_ok': False,
'expected': '2xx',
'reason': 'Baseline connectivity — valid GET must receive 2xx'
},
'RFC9112-2.2-BARE-LF-REQUEST-LINE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Bare LF — recipient MAY accept but rejection is stricter (RFC 9112 §2.2)'
},
'RFC9112-2.2-BARE-LF-HEADER': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Bare LF — recipient MAY accept but rejection is stricter (RFC 9112 §2.2)'
},
'RFC9112-5.1-OBS-FOLD': {
'accept': [400], 'close_ok': False, 'timeout_ok': False,
'expected': '400',
'reason': 'MUST reject by sending 400 or replace with SP (RFC 9112 §5.1)'
},
'RFC9110-5.6.2-SP-BEFORE-COLON': {
'accept': [400], 'close_ok': False, 'timeout_ok': False,
'expected': '400',
'reason': 'MUST reject with 400 (RFC 9112 §5)'
},
'RFC9112-3-MULTI-SP-REQUEST-LINE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'SHOULD respond with 400 (RFC 9112 §3)'
},
'RFC9112-7.1-MISSING-HOST': {
'accept': [400], 'close_ok': False, 'timeout_ok': False,
'expected': '400',
'reason': 'MUST respond with 400 (RFC 9112 §3.2)'
},
'RFC9112-2.3-INVALID-VERSION': {
'accept': [400, 505], 'close_ok': True, 'timeout_ok': False,
'expected': '400/505 or close',
'reason': 'No MUST — 505 is available but not mandated (RFC 9112 §2.3)'
},
'RFC9112-5-EMPTY-HEADER-NAME': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Empty header name (leading colon) is invalid (RFC 9112 §5)'
},
'RFC9112-3-CR-ONLY-LINE-ENDING': {
'accept': [400], 'close_ok': False, 'timeout_ok': False,
'expected': '400',
'reason': 'MUST consider invalid or replace with SP (RFC 9112 §2.2)'
},
'SMUG-CL-TE-BOTH': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'CL + TE together "ought to" be handled as error (RFC 9112 §6.3)'
},
'SMUG-DUPLICATE-CL': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'MUST treat as unrecoverable error (RFC 9112 §6.3)'
},
'SMUG-CL-LEADING-ZEROS': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Leading zeros in CL can cause length misinterpretation'
},
'SMUG-TE-XCHUNKED': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Unknown TE "xchunked" with CL present is ambiguous — must reject'
},
'SMUG-TE-TRAILING-SPACE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'TE "chunked " (trailing space) is obfuscation — must reject'
},
'SMUG-TE-SP-BEFORE-COLON': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Space before colon is invalid header syntax (RFC 9110 §5.6.2)'
},
'SMUG-CL-NEGATIVE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Negative Content-Length is syntactically invalid'
},
'SMUG-CLTE-PIPELINE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'CL.TE smuggling vector — ambiguous framing must be rejected'
},
'MAL-BINARY-GARBAGE': {
'accept': [400], 'close_ok': True, 'timeout_ok': True,
'expected': '400/close/timeout',
'reason': 'Binary garbage is not valid HTTP — must reject'
},
'MAL-LONG-URL': {
'accept': [400, 414, 431], 'close_ok': True, 'timeout_ok': False,
'expected': '400/414/431 or close',
'reason': '100 KB URL exceeds any reasonable limit'
},
'MAL-LONG-HEADER-VALUE': {
'accept': [400, 431], 'close_ok': True, 'timeout_ok': False,
'expected': '400/431 or close',
'reason': '100 KB header value exceeds limits'
},
'MAL-MANY-HEADERS': {
'accept': [400, 431], 'close_ok': True, 'timeout_ok': False,
'expected': '400/431 or close',
'reason': '10,000 headers exceeds any reasonable limit'
},
'MAL-NUL-IN-URL': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'NUL byte in URL is not valid in HTTP request-target'
},
'MAL-CONTROL-CHARS-HEADER': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Control characters in header values are invalid (RFC 9110 §5.5)'
},
'MAL-INCOMPLETE-REQUEST': {
'accept': [400], 'close_ok': True, 'timeout_ok': True,
'expected': '400/close/timeout',
'reason': 'Incomplete request — server must not crash, may timeout'
},
'MAL-EMPTY-REQUEST': {
'accept': [400], 'close_ok': True, 'timeout_ok': True,
'expected': '400/close/timeout',
'reason': 'Empty request — server must not crash, may timeout'
},
'RFC9112-3-MISSING-TARGET': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'SHOULD respond with 400 (RFC 9112 §3)'
},
'RFC9112-3.2-FRAGMENT-IN-TARGET': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'SHOULD respond with 400 (RFC 9112 §3)'
},
'RFC9112-2.3-HTTP09-REQUEST': {
'accept': [400], 'close_ok': True, 'timeout_ok': True,
'expected': '400/close/timeout',
'reason': 'Invalid request-line — SHOULD respond with 400 (RFC 9112 §3)'
},
'RFC9112-5-INVALID-HEADER-NAME': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Invalid characters in header name must be rejected (RFC 9112 §5)'
},
'RFC9112-5-HEADER-NO-COLON': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Header line without colon is malformed (RFC 9112 §5)'
},
'RFC9110-5.4-DUPLICATE-HOST': {
'accept': [400], 'close_ok': False, 'timeout_ok': False,
'expected': '400',
'reason': 'MUST respond with 400 (RFC 9112 §3.2)'
},
'RFC9112-6.1-CL-NON-NUMERIC': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'MUST treat as unrecoverable error (RFC 9112 §6.3)'
},
'RFC9112-6.1-CL-PLUS-SIGN': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'MUST treat as unrecoverable error (RFC 9112 §6.3)'
},
'SMUG-TECL-PIPELINE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'TE.CL smuggling vector — ambiguous framing must be rejected'
},
'SMUG-CL-TRAILING-SPACE': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'warn_on_2xx': True, 'scored': False,
'expected': '400 or 2xx',
'reason': 'Trailing space in CL — OWS trimming is valid per RFC 9110 §5.5'
},
'SMUG-HEADER-INJECTION': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'warn_on_2xx': True, 'scored': False,
'expected': '400 or 2xx',
'reason': 'Payload is two valid headers on the wire — 2xx is RFC-compliant'
},
'SMUG-TE-DOUBLE-CHUNKED': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'warn_on_2xx': True, 'scored': False,
'expected': '400 or 2xx',
'reason': 'Duplicate chunked TE with CL — 4xx is strict, 2xx is tolerable'
},
'SMUG-CL-EXTRA-LEADING-SP': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'warn_on_2xx': True, 'scored': False,
'expected': '400 or 2xx',
'reason': 'Extra OWS after colon is valid per RFC 9110 §5.5'
},
'SMUG-TE-CASE-MISMATCH': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'warn_on_2xx': True, 'scored': False,
'expected': '400 or 2xx',
'reason': 'Case-insensitive TE matching is valid per RFC — 2xx is compliant'
},
'MAL-LONG-HEADER-NAME': {
'accept': [400, 431], 'close_ok': True, 'timeout_ok': False,
'expected': '400/431 or close',
'reason': '100 KB header name exceeds any reasonable limit'
},
'MAL-LONG-METHOD': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': '100 KB method name exceeds any reasonable limit'
},
'MAL-NON-ASCII-HEADER-NAME': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Non-ASCII bytes in header name are invalid'
},
'MAL-NON-ASCII-URL': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Non-ASCII bytes in URL are invalid'
},
'MAL-CL-OVERFLOW': {
'accept': [400], 'close_ok': True, 'timeout_ok': False,
'expected': '400 or close',
'reason': 'Integer overflow in Content-Length must be rejected'
},
'MAL-WHITESPACE-ONLY-LINE': {
'accept': [400], 'close_ok': True, 'timeout_ok': True,
'expected': '400/close/timeout',
'reason': 'Whitespace-only request line is not valid HTTP'
},
}
# ── Evaluate one server's results ────────────────────────────
def evaluate(raw):
results = []
for r in raw['results']:
tid = r['id']
spec = STRICT.get(tid)
if not spec:
results.append({**r, 'verdict': r['verdict'], 'expected': '?',
'got': str(r.get('statusCode') or r.get('connectionState', '')),
'reason': 'No strict specification defined', 'scored': True})
continue
status = r.get('statusCode')
conn = r.get('connectionState', '')
is_scored = spec.get('scored', True)
passed = (
(status is not None and status in spec['accept']) or
(status is None and spec['close_ok'] and conn == 'ClosedByServer') or
(status is None and spec['timeout_ok'] and conn == 'TimedOut')
)
got = str(status) if status is not None else conn
# For unscored tests: 2xx is Warn (RFC-compliant), 4xx is Pass
if passed:
verdict = 'Pass'
elif spec.get('warn_on_2xx') and status is not None and 200 <= status < 300:
verdict = 'Warn'
else:
verdict = 'Fail'
reason = spec['reason'] if verdict != 'Fail' else f"Expected {spec['expected']}, got {got} — {spec['reason']}"
results.append({
'id': tid, 'description': r['description'],
'category': r['category'], 'rfc': r.get('rfcReference'),
'verdict': verdict, 'statusCode': status,
'expected': spec['expected'], 'got': got,
'connectionState': conn, 'reason': reason,
'scored': is_scored,
'durationMs': r.get('durationMs', 0),
})
scored_results = [r for r in results if r['scored']]
total = len(scored_results)
passed = sum(1 for r in scored_results if r['verdict'] == 'Pass')
warned = sum(1 for r in results if r['verdict'] == 'Warn')
return {
'summary': {'total': len(results), 'scored': total, 'passed': passed, 'failed': total - passed, 'warnings': warned},
'results': results,
}
# ── Process each server ──────────────────────────────────────
servers_config = json.loads(os.environ['PROBE_SERVERS'])
SERVERS = [(s['name'], f"probe-{s['dir']}.json") for s in servers_config]
commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip()
commit_msg = subprocess.check_output(['git', 'log', '-1', '--format=%s']).decode().strip()
commit_time = subprocess.check_output(['git', 'log', '-1', '--format=%cI']).decode().strip()
server_data = []
for name, path in SERVERS:
p = pathlib.Path(path)
if not p.exists():
print(f'::warning::{name}: result file {path} not found, skipping')
continue
with open(path) as f:
raw = json.load(f)
ev = evaluate(raw)
ev['name'] = name
server_data.append(ev)
s = ev['summary']
print(f"{name}: {s['passed']}/{s['scored']} passed, {s['failed']} failed, {s['warnings']} warnings")
if not server_data:
print('::warning::No probe results found — nothing to report')
sys.exit(0)
# ── Write data.js ────────────────────────────────────────────
output = {
'commit': {'id': commit_id, 'message': commit_msg, 'timestamp': commit_time},
'servers': server_data,
}
with open('probe-data.js', 'w') as f:
f.write('window.PROBE_DATA = ' + json.dumps(output) + ';')
# ── Write PR comment ─────────────────────────────────────────
lines = ['<!-- http11probe-results -->', '## Http11Probe — Compliance Comparison', '']
# Summary table with bars
max_scored = max(s['summary']['scored'] for s in server_data)
BAR_WIDTH = 20
lines.append('| Server | Score | |')
lines.append('|--------|------:|---|')
for sv in sorted(server_data, key=lambda s: s['summary']['passed'], reverse=True):
s = sv['summary']
pct = s['passed'] / s['scored'] if s['scored'] else 0
filled = round(pct * BAR_WIDTH)
bar = '\u2588' * filled + '\u2591' * (BAR_WIDTH - filled)
lines.append(f"| **{sv['name']}** | {s['passed']}/{s['scored']} | `{bar}` {pct:.0%} |")
lines.append('')
# Collect all test IDs in order from first server
test_ids = [r['id'] for r in server_data[0]['results']] if server_data else []
# Build lookup: server_name -> {test_id -> result}
lookup = {}
for sv in server_data:
lookup[sv['name']] = {r['id']: r for r in sv['results']}
names = [sv['name'] for sv in server_data]
import re
def short(tid):
return re.sub(r'^(RFC\d+-[\d.]+-|COMP-|SMUG-|MAL-)', '', tid)
for cat_name, title in [('Compliance', 'Compliance'), ('Smuggling', 'Smuggling'), ('MalformedInput', 'Malformed Input')]:
cat_tests = [tid for tid in test_ids if lookup[names[0]][tid]['category'] == cat_name]
if not cat_tests:
continue
lines.append(f'### {title}')
lines.append('')
# Header row: Server | test1 | test2 | ...
hdr = '| Server | ' + ' | '.join(f'`{short(tid)}`' for tid in cat_tests) + ' |'
sep = '|---' + ''.join('|:---:' for _ in cat_tests) + '|'
lines.append(hdr)
lines.append(sep)
# Expected row
exp_cells = []
for tid in cat_tests:
first = lookup[names[0]][tid]
exp_cells.append(first['expected'])
lines.append('| **Expected** | ' + ' | '.join(exp_cells) + ' |')
# Server rows
for n in names:
cells = []
for tid in cat_tests:
r = lookup[n].get(tid)
if not r:
cells.append('—')
else:
icon = '✅' if r['verdict'] == 'Pass' else ('⚠️' if r['verdict'] == 'Warn' else '❌')
cells.append(f"{icon}`{r['got']}`")
lines.append(f"| **{n}** | " + ' | '.join(cells) + ' |')
lines.append('')
lines.append(f"<sub>Commit: {commit_id[:7]}</sub>")
with open('probe-comment.md', 'w') as f:
f.write('\n'.join(lines))
PYEOF
# ── Upload / publish ───────────────────────────────────────────
- name: Upload results
uses: actions/upload-artifact@v4
if: always()
with:
name: probe-results
path: probe-*.json
- name: Comment on PR
if: github.event_name == 'pull_request' && steps.changes.outputs.servers != '[]'
run: |
COMMENT_ID=$(gh api repos/${{ github.repository }}/issues/${{ github.event.number }}/comments \
--jq '.[] | select(.body | contains("<!-- http11probe-results -->")) | .id' | head -1)
if [ -n "$COMMENT_ID" ]; then
gh api repos/${{ github.repository }}/issues/comments/$COMMENT_ID \
-X PATCH -f body="$(cat probe-comment.md)"
else
gh pr comment ${{ github.event.number }} --body-file probe-comment.md
fi
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Push to latest-results
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
run: |
git config user.name "github-actions[bot]"
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
if git fetch origin latest-results 2>/dev/null; then
git worktree add /tmp/latest-results origin/latest-results
else
git worktree add --detach /tmp/latest-results HEAD
git -C /tmp/latest-results switch --orphan latest-results
fi
mkdir -p /tmp/latest-results/probe
cp probe-data.js /tmp/latest-results/probe/data.js
cd /tmp/latest-results
git add probe/data.js
if git diff --cached --quiet; then
echo "No changes to commit."
else
git commit -m "Update probe results"
git push origin HEAD:latest-results
fi
cd -
git worktree remove /tmp/latest-results || true
- name: Rebuild docs
if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main'
run: gh workflow run "Deploy Docs to GitHub Pages"
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}