Probe #13
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Probe | |
| on: | |
| workflow_dispatch: | |
| pull_request: | |
| branches: [ main ] | |
| jobs: | |
| probe: | |
| name: Compliance Probe | |
| runs-on: ubuntu-latest | |
| permissions: | |
| pull-requests: write | |
| contents: write | |
| actions: write | |
| steps: | |
| - name: Checkout | |
| uses: actions/checkout@v4 | |
| with: | |
| fetch-depth: 0 | |
| - name: Discover servers | |
| id: discover | |
| run: | | |
| SERVERS='[]' | |
| for f in src/Servers/*/probe.json; do | |
| dir=$(basename "$(dirname "$f")") | |
| name=$(jq -r .name "$f") | |
| SERVERS=$(echo "$SERVERS" | jq -c --arg d "$dir" --arg n "$name" '. + [{"dir": $d, "name": $n}]') | |
| done | |
| echo "servers=$SERVERS" >> "$GITHUB_OUTPUT" | |
| echo "Discovered: $(echo "$SERVERS" | jq -r '.[].name' | tr '\n' ', ')" | |
| - name: Detect changes | |
| id: changes | |
| run: | | |
| SERVERS='${{ steps.discover.outputs.servers }}' | |
| set_all() { | |
| echo "servers=$SERVERS" >> "$GITHUB_OUTPUT" | |
| } | |
| # workflow_dispatch always runs everything | |
| if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then | |
| set_all | |
| exit 0 | |
| fi | |
| CHANGED=$(git diff --name-only origin/${{ github.base_ref }}...HEAD) | |
| # Global triggers → run all | |
| if echo "$CHANGED" | grep -qE '^(src/Http11Probe/|src/Http11Probe\.Cli/|Directory\.Build\.props|\.dockerignore|\.github/workflows/probe\.yml)'; then | |
| set_all | |
| exit 0 | |
| fi | |
| AFFECTED='[]' | |
| for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do | |
| dir=$(echo "$row" | base64 -d | jq -r '.dir') | |
| name=$(echo "$row" | base64 -d | jq -r '.name') | |
| if echo "$CHANGED" | grep -q "^src/Servers/${dir}/"; then | |
| AFFECTED=$(echo "$AFFECTED" | jq -c --arg d "$dir" --arg n "$name" '. + [{"dir": $d, "name": $n}]') | |
| fi | |
| done | |
| echo "servers=$AFFECTED" >> "$GITHUB_OUTPUT" | |
| - name: Setup .NET | |
| if: steps.changes.outputs.servers != '[]' | |
| uses: actions/setup-dotnet@v4 | |
| with: | |
| dotnet-version: '10.0' | |
| - name: Build probe CLI | |
| if: steps.changes.outputs.servers != '[]' | |
| run: dotnet build Http11Probe.slnx -c Release | |
| # ── Build / Run / Probe / Kill — one server at a time ────────── | |
| - name: Probe servers | |
| if: steps.changes.outputs.servers != '[]' | |
| run: | | |
| SERVERS='${{ steps.changes.outputs.servers }}' | |
| PROBE_PORT=8080 | |
| for row in $(echo "$SERVERS" | jq -r '.[] | @base64'); do | |
| dir=$(echo "$row" | base64 -d | jq -r '.dir') | |
| name=$(echo "$row" | base64 -d | jq -r '.name') | |
| tag=$(echo "probe-$dir" | tr '[:upper:]' '[:lower:]') | |
| echo "::group::$name" | |
| # Build | |
| docker build -t "$tag" -f "src/Servers/$dir/Dockerfile" . | |
| # Run | |
| docker run -d --name probe-target --network host "$tag" | |
| # Wait | |
| for i in $(seq 1 30); do | |
| curl -sf "http://localhost:${PROBE_PORT}/" > /dev/null 2>&1 && break | |
| sleep 1 | |
| done | |
| # Probe | |
| dotnet run --no-build -c Release --project src/Http11Probe.Cli -- \ | |
| --host localhost --port "$PROBE_PORT" --output "probe-${dir}.json" || true | |
| # Kill | |
| docker stop probe-target && docker rm probe-target | |
| echo "::endgroup::" | |
| done | |
| - name: Cleanup | |
| if: always() | |
| run: docker rm -f probe-target 2>/dev/null || true | |
| # ── Process results ──────────────────────────────────────────── | |
| - name: Process results | |
| if: steps.changes.outputs.servers != '[]' | |
| env: | |
| PROBE_SERVERS: ${{ steps.changes.outputs.servers }} | |
| run: | | |
| python3 << 'PYEOF' | |
| import json, sys, os, subprocess, pathlib | |
| # ── Strict expectations ────────────────────────────────────── | |
| STRICT = { | |
| 'COMP-BASELINE': { | |
| 'accept': list(range(200, 300)), | |
| 'close_ok': False, 'timeout_ok': False, | |
| 'expected': '2xx', | |
| 'reason': 'Baseline connectivity — valid GET must receive 2xx' | |
| }, | |
| 'RFC9112-2.2-BARE-LF-REQUEST-LINE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Bare LF in request-line is a framing violation (RFC 9112 §2.2)' | |
| }, | |
| 'RFC9112-2.2-BARE-LF-HEADER': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Bare LF in header field is a framing violation (RFC 9112 §2.2)' | |
| }, | |
| 'RFC9112-5.1-OBS-FOLD': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Obs-fold (line folding) is deprecated and must be rejected (RFC 9112 §5.1)' | |
| }, | |
| 'RFC9110-5.6.2-SP-BEFORE-COLON': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Space between header name and colon is invalid (RFC 9110 §5.6.2)' | |
| }, | |
| 'RFC9112-3-MULTI-SP-REQUEST-LINE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Multiple SP in request-line is malformed (RFC 9112 §3)' | |
| }, | |
| 'RFC9112-7.1-MISSING-HOST': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Missing Host header requires 400 (RFC 9112 §7.1)' | |
| }, | |
| 'RFC9112-2.3-INVALID-VERSION': { | |
| 'accept': [400, 505], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400/505 or close', | |
| 'reason': 'Invalid HTTP version must be rejected (RFC 9112 §2.3)' | |
| }, | |
| 'RFC9112-5-EMPTY-HEADER-NAME': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Empty header name (leading colon) is invalid (RFC 9112 §5)' | |
| }, | |
| 'RFC9112-3-CR-ONLY-LINE-ENDING': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'CR without LF is a framing violation (RFC 9112 §2.2)' | |
| }, | |
| 'SMUG-CL-TE-BOTH': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'CL + TE together enables smuggling — must reject (RFC 9112 §6.1)' | |
| }, | |
| 'SMUG-DUPLICATE-CL': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Conflicting Content-Length values enable smuggling (RFC 9110 §8.6)' | |
| }, | |
| 'SMUG-CL-LEADING-ZEROS': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Leading zeros in CL can cause length misinterpretation' | |
| }, | |
| 'SMUG-TE-XCHUNKED': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Unknown TE "xchunked" with CL present is ambiguous — must reject' | |
| }, | |
| 'SMUG-TE-TRAILING-SPACE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'TE "chunked " (trailing space) is obfuscation — must reject' | |
| }, | |
| 'SMUG-TE-SP-BEFORE-COLON': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Space before colon is invalid header syntax (RFC 9110 §5.6.2)' | |
| }, | |
| 'SMUG-CL-NEGATIVE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Negative Content-Length is syntactically invalid' | |
| }, | |
| 'SMUG-CLTE-PIPELINE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'CL.TE smuggling vector — ambiguous framing must be rejected' | |
| }, | |
| 'MAL-BINARY-GARBAGE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': True, | |
| 'expected': '400/close/timeout', | |
| 'reason': 'Binary garbage is not valid HTTP — must reject' | |
| }, | |
| 'MAL-LONG-URL': { | |
| 'accept': [400, 414, 431], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400/414/431 or close', | |
| 'reason': '100 KB URL exceeds any reasonable limit' | |
| }, | |
| 'MAL-LONG-HEADER-VALUE': { | |
| 'accept': [400, 431], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400/431 or close', | |
| 'reason': '100 KB header value exceeds limits' | |
| }, | |
| 'MAL-MANY-HEADERS': { | |
| 'accept': [400, 431], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400/431 or close', | |
| 'reason': '10,000 headers exceeds any reasonable limit' | |
| }, | |
| 'MAL-NUL-IN-URL': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'NUL byte in URL is not valid in HTTP request-target' | |
| }, | |
| 'MAL-CONTROL-CHARS-HEADER': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Control characters in header values are invalid (RFC 9110 §5.5)' | |
| }, | |
| 'MAL-INCOMPLETE-REQUEST': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': True, | |
| 'expected': '400/close/timeout', | |
| 'reason': 'Incomplete request — server must not crash, may timeout' | |
| }, | |
| 'MAL-EMPTY-REQUEST': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': True, | |
| 'expected': '400/close/timeout', | |
| 'reason': 'Empty request — server must not crash, may timeout' | |
| }, | |
| 'RFC9112-3-MISSING-TARGET': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Missing request-target is malformed (RFC 9112 §3)' | |
| }, | |
| 'RFC9112-3.2-FRAGMENT-IN-TARGET': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Fragment in request-target is invalid (RFC 9112 §3.2)' | |
| }, | |
| 'RFC9112-2.3-HTTP09-REQUEST': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': True, | |
| 'expected': '400/close/timeout', | |
| 'reason': 'HTTP/0.9 requests must be rejected (RFC 9112 §2.3)' | |
| }, | |
| 'RFC9112-5-INVALID-HEADER-NAME': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Invalid characters in header name must be rejected (RFC 9112 §5)' | |
| }, | |
| 'RFC9112-5-HEADER-NO-COLON': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Header line without colon is malformed (RFC 9112 §5)' | |
| }, | |
| 'RFC9110-5.4-DUPLICATE-HOST': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Duplicate Host headers with different values must be rejected (RFC 9110 §5.4)' | |
| }, | |
| 'RFC9112-6.1-CL-NON-NUMERIC': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Non-numeric Content-Length is invalid (RFC 9112 §6.1)' | |
| }, | |
| 'RFC9112-6.1-CL-PLUS-SIGN': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Content-Length with + sign is invalid (RFC 9112 §6.1)' | |
| }, | |
| 'SMUG-TECL-PIPELINE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'TE.CL smuggling vector — ambiguous framing must be rejected' | |
| }, | |
| 'SMUG-CL-TRAILING-SPACE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'warn_on_2xx': True, 'scored': False, | |
| 'expected': '400 or 2xx', | |
| 'reason': 'Trailing space in CL — OWS trimming is valid per RFC 9110 §5.5' | |
| }, | |
| 'SMUG-HEADER-INJECTION': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'warn_on_2xx': True, 'scored': False, | |
| 'expected': '400 or 2xx', | |
| 'reason': 'Payload is two valid headers on the wire — 2xx is RFC-compliant' | |
| }, | |
| 'SMUG-TE-DOUBLE-CHUNKED': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'warn_on_2xx': True, 'scored': False, | |
| 'expected': '400 or 2xx', | |
| 'reason': 'Duplicate chunked TE with CL — 4xx is strict, 2xx is tolerable' | |
| }, | |
| 'SMUG-CL-EXTRA-LEADING-SP': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'warn_on_2xx': True, 'scored': False, | |
| 'expected': '400 or 2xx', | |
| 'reason': 'Extra OWS after colon is valid per RFC 9110 §5.5' | |
| }, | |
| 'SMUG-TE-CASE-MISMATCH': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'warn_on_2xx': True, 'scored': False, | |
| 'expected': '400 or 2xx', | |
| 'reason': 'Case-insensitive TE matching is valid per RFC — 2xx is compliant' | |
| }, | |
| 'MAL-LONG-HEADER-NAME': { | |
| 'accept': [400, 431], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400/431 or close', | |
| 'reason': '100 KB header name exceeds any reasonable limit' | |
| }, | |
| 'MAL-LONG-METHOD': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': '100 KB method name exceeds any reasonable limit' | |
| }, | |
| 'MAL-NON-ASCII-HEADER-NAME': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Non-ASCII bytes in header name are invalid' | |
| }, | |
| 'MAL-NON-ASCII-URL': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Non-ASCII bytes in URL are invalid' | |
| }, | |
| 'MAL-CL-OVERFLOW': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': False, | |
| 'expected': '400 or close', | |
| 'reason': 'Integer overflow in Content-Length must be rejected' | |
| }, | |
| 'MAL-WHITESPACE-ONLY-LINE': { | |
| 'accept': [400], 'close_ok': True, 'timeout_ok': True, | |
| 'expected': '400/close/timeout', | |
| 'reason': 'Whitespace-only request line is not valid HTTP' | |
| }, | |
| } | |
| # ── Evaluate one server's results ──────────────────────────── | |
| def evaluate(raw): | |
| results = [] | |
| for r in raw['results']: | |
| tid = r['id'] | |
| spec = STRICT.get(tid) | |
| if not spec: | |
| results.append({**r, 'verdict': r['verdict'], 'expected': '?', | |
| 'got': str(r.get('statusCode') or r.get('connectionState', '')), | |
| 'reason': 'No strict specification defined', 'scored': True}) | |
| continue | |
| status = r.get('statusCode') | |
| conn = r.get('connectionState', '') | |
| is_scored = spec.get('scored', True) | |
| passed = ( | |
| (status is not None and status in spec['accept']) or | |
| (status is None and spec['close_ok'] and conn == 'ClosedByServer') or | |
| (status is None and spec['timeout_ok'] and conn == 'TimedOut') | |
| ) | |
| got = str(status) if status is not None else conn | |
| # For unscored tests: 2xx is Warn (RFC-compliant), 4xx is Pass | |
| if passed: | |
| verdict = 'Pass' | |
| elif spec.get('warn_on_2xx') and status is not None and 200 <= status < 300: | |
| verdict = 'Warn' | |
| else: | |
| verdict = 'Fail' | |
| reason = spec['reason'] if verdict != 'Fail' else f"Expected {spec['expected']}, got {got} — {spec['reason']}" | |
| results.append({ | |
| 'id': tid, 'description': r['description'], | |
| 'category': r['category'], 'rfc': r.get('rfcReference'), | |
| 'verdict': verdict, 'statusCode': status, | |
| 'expected': spec['expected'], 'got': got, | |
| 'connectionState': conn, 'reason': reason, | |
| 'scored': is_scored, | |
| 'durationMs': r.get('durationMs', 0), | |
| }) | |
| scored_results = [r for r in results if r['scored']] | |
| total = len(scored_results) | |
| passed = sum(1 for r in scored_results if r['verdict'] == 'Pass') | |
| warned = sum(1 for r in results if r['verdict'] == 'Warn') | |
| return { | |
| 'summary': {'total': len(results), 'scored': total, 'passed': passed, 'failed': total - passed, 'warnings': warned}, | |
| 'results': results, | |
| } | |
| # ── Process each server ────────────────────────────────────── | |
| servers_config = json.loads(os.environ['PROBE_SERVERS']) | |
| SERVERS = [(s['name'], f"probe-{s['dir']}.json") for s in servers_config] | |
| commit_id = subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode().strip() | |
| commit_msg = subprocess.check_output(['git', 'log', '-1', '--format=%s']).decode().strip() | |
| commit_time = subprocess.check_output(['git', 'log', '-1', '--format=%cI']).decode().strip() | |
| server_data = [] | |
| for name, path in SERVERS: | |
| p = pathlib.Path(path) | |
| if not p.exists(): | |
| print(f'::warning::{name}: result file {path} not found, skipping') | |
| continue | |
| with open(path) as f: | |
| raw = json.load(f) | |
| ev = evaluate(raw) | |
| ev['name'] = name | |
| server_data.append(ev) | |
| s = ev['summary'] | |
| print(f"{name}: {s['passed']}/{s['scored']} passed, {s['failed']} failed, {s['warnings']} warnings") | |
| if not server_data: | |
| print('::warning::No probe results found — nothing to report') | |
| sys.exit(0) | |
| # ── Write data.js ──────────────────────────────────────────── | |
| output = { | |
| 'commit': {'id': commit_id, 'message': commit_msg, 'timestamp': commit_time}, | |
| 'servers': server_data, | |
| } | |
| with open('probe-data.js', 'w') as f: | |
| f.write('window.PROBE_DATA = ' + json.dumps(output) + ';') | |
| # ── Write PR comment ───────────────────────────────────────── | |
| lines = ['<!-- http11probe-results -->', '## Http11Probe — Compliance Comparison', ''] | |
| # Summary table with bars | |
| max_scored = max(s['summary']['scored'] for s in server_data) | |
| BAR_WIDTH = 20 | |
| lines.append('| Server | Score | |') | |
| lines.append('|--------|------:|---|') | |
| for sv in sorted(server_data, key=lambda s: s['summary']['passed'], reverse=True): | |
| s = sv['summary'] | |
| pct = s['passed'] / s['scored'] if s['scored'] else 0 | |
| filled = round(pct * BAR_WIDTH) | |
| bar = '\u2588' * filled + '\u2591' * (BAR_WIDTH - filled) | |
| lines.append(f"| **{sv['name']}** | {s['passed']}/{s['scored']} | `{bar}` {pct:.0%} |") | |
| lines.append('') | |
| # Collect all test IDs in order from first server | |
| test_ids = [r['id'] for r in server_data[0]['results']] if server_data else [] | |
| # Build lookup: server_name -> {test_id -> result} | |
| lookup = {} | |
| for sv in server_data: | |
| lookup[sv['name']] = {r['id']: r for r in sv['results']} | |
| names = [sv['name'] for sv in server_data] | |
| import re | |
| def short(tid): | |
| return re.sub(r'^(RFC\d+-[\d.]+-|COMP-|SMUG-|MAL-)', '', tid) | |
| for cat_name, title in [('Compliance', 'Compliance'), ('Smuggling', 'Smuggling'), ('MalformedInput', 'Malformed Input')]: | |
| cat_tests = [tid for tid in test_ids if lookup[names[0]][tid]['category'] == cat_name] | |
| if not cat_tests: | |
| continue | |
| lines.append(f'### {title}') | |
| lines.append('') | |
| # Header row: Server | test1 | test2 | ... | |
| hdr = '| Server | ' + ' | '.join(f'`{short(tid)}`' for tid in cat_tests) + ' |' | |
| sep = '|---' + ''.join('|:---:' for _ in cat_tests) + '|' | |
| lines.append(hdr) | |
| lines.append(sep) | |
| # Expected row | |
| exp_cells = [] | |
| for tid in cat_tests: | |
| first = lookup[names[0]][tid] | |
| exp_cells.append(first['expected']) | |
| lines.append('| **Expected** | ' + ' | '.join(exp_cells) + ' |') | |
| # Server rows | |
| for n in names: | |
| cells = [] | |
| for tid in cat_tests: | |
| r = lookup[n].get(tid) | |
| if not r: | |
| cells.append('—') | |
| else: | |
| icon = '✅' if r['verdict'] == 'Pass' else ('⚠️' if r['verdict'] == 'Warn' else '❌') | |
| cells.append(f"{icon}`{r['got']}`") | |
| lines.append(f"| **{n}** | " + ' | '.join(cells) + ' |') | |
| lines.append('') | |
| lines.append(f"<sub>Commit: {commit_id[:7]}</sub>") | |
| with open('probe-comment.md', 'w') as f: | |
| f.write('\n'.join(lines)) | |
| PYEOF | |
| # ── Upload / publish ─────────────────────────────────────────── | |
| - name: Upload results | |
| uses: actions/upload-artifact@v4 | |
| if: always() | |
| with: | |
| name: probe-results | |
| path: probe-*.json | |
| - name: Comment on PR | |
| if: github.event_name == 'pull_request' && steps.changes.outputs.servers != '[]' | |
| run: | | |
| COMMENT_ID=$(gh api repos/${{ github.repository }}/issues/${{ github.event.number }}/comments \ | |
| --jq '.[] | select(.body | contains("<!-- http11probe-results -->")) | .id' | head -1) | |
| if [ -n "$COMMENT_ID" ]; then | |
| gh api repos/${{ github.repository }}/issues/comments/$COMMENT_ID \ | |
| -X PATCH -f body="$(cat probe-comment.md)" | |
| else | |
| gh pr comment ${{ github.event.number }} --body-file probe-comment.md | |
| fi | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| - name: Push to latest-results | |
| if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main' | |
| run: | | |
| git config user.name "github-actions[bot]" | |
| git config user.email "41898282+github-actions[bot]@users.noreply.github.com" | |
| if git fetch origin latest-results 2>/dev/null; then | |
| git worktree add /tmp/latest-results origin/latest-results | |
| else | |
| git worktree add --detach /tmp/latest-results HEAD | |
| git -C /tmp/latest-results switch --orphan latest-results | |
| fi | |
| mkdir -p /tmp/latest-results/probe | |
| cp probe-data.js /tmp/latest-results/probe/data.js | |
| cd /tmp/latest-results | |
| git add probe/data.js | |
| if git diff --cached --quiet; then | |
| echo "No changes to commit." | |
| else | |
| git commit -m "Update probe results" | |
| git push origin HEAD:latest-results | |
| fi | |
| cd - | |
| git worktree remove /tmp/latest-results || true | |
| - name: Rebuild docs | |
| if: github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/main' | |
| run: gh workflow run "Deploy Docs to GitHub Pages" | |
| env: | |
| GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} |