Skip to content

Commit f0856db

Browse files
authored
ci: fix workflows (#533)
1 parent 9c35ecc commit f0856db

3 files changed

Lines changed: 109 additions & 72 deletions

File tree

.github/workflows/build-on-change.yaml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,6 @@ on:
77
paths:
88
- 'binaries/**/*.yaml'
99
- 'packages/**/*.yaml'
10-
pull_request:
11-
types: [closed]
12-
branches:
13-
- main
1410
workflow_dispatch:
1511
inputs:
1612
recipe_path:
@@ -34,7 +30,6 @@ concurrency:
3430

3531
jobs:
3632
detect-changes:
37-
if: github.event_name == 'push' || github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && github.event.pull_request.merged == true)
3833
runs-on: ubuntu-latest
3934
outputs:
4035
changed_recipes: ${{ steps.filter.outputs.recipes_to_build }}

.github/workflows/pr-build-test.yaml

Lines changed: 67 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -142,41 +142,44 @@ jobs:
142142
- name: Download sbuild
143143
run: |
144144
curl -fsSL "https://github.com/pkgforge/sbuilder/releases/download/nightly/sbuild-x86_64-linux" \
145-
-o /usr/local/bin/sbuild || exit 0
146-
chmod +x /usr/local/bin/sbuild
145+
-o /usr/local/bin/sbuild && chmod +x /usr/local/bin/sbuild
146+
sbuild --version
147147
148-
- name: Download existing cache
148+
- name: Download build status artifacts
149+
uses: actions/download-artifact@v7
150+
with:
151+
pattern: build-status-*
152+
path: /tmp/build-statuses
153+
merge-multiple: true
149154
continue-on-error: true
150-
env:
151-
GH_TOKEN: ${{ github.token }}
152-
run: |
153-
gh release download build-cache -p build_cache.sdb -D /tmp/ --repo "${{ github.repository }}" || \
154-
sbuild cache --cache /tmp/build_cache.sdb init
155155

156-
- name: Update cache with recipe hashes
156+
- name: Update cache with build results
157+
env:
158+
SBUILD_CACHE_URI: ${{ secrets.SBUILD_CACHE_URI }}
157159
run: |
160+
# sbuild updates cache directly on success via SBUILD_CACHE_URI.
161+
# This step only records failures/skips that sbuild didn't handle.
158162
RECIPES='${{ needs.detect-changes.outputs.changed_recipes }}'
159-
BUILD_RESULT="${{ needs.build.result }}"
160163
161164
echo "$RECIPES" | jq -c '.[]' | while read -r recipe; do
162165
path=$(echo "$recipe" | jq -r '.path')
163166
164-
# Extract pkg_id from recipe YAML for cache operations
167+
# Extract pkg_id and pkg name from recipe YAML for cache operations
165168
pkg_id=""
166169
pkg_name=""
167170
if [ -f "$path" ]; then
168171
pkg_id=$(grep -E '^pkg_id:' "$path" | head -1 | sed 's/pkg_id:[[:space:]]*//; s/^["'"'"']//; s/["'"'"']$//')
169172
pkg_name=$(grep -E '^pkg:' "$path" | head -1 | sed 's/pkg:[[:space:]]*//; s/^["'"'"']//; s/["'"'"']$//')
170173
fi
171-
# Fallback to pkg field then directory name
174+
# Fallback to pkg name from recipe
172175
if [ -z "$pkg_id" ]; then
173176
pkg_id="${pkg_name:-$(basename "$(dirname "$path")")}"
174177
fi
175178
176179
# Extract version from recipe's pkgver field
177180
pkg_version=""
178181
if [ -f "$path" ]; then
179-
pkg_version=$(grep -E "^pkgver:" "$path" | head -1 | sed 's/pkgver:[[:space:]]*//; s/^["'"'"']//; s/["'"'"']$//')
182+
pkg_version=$(grep -E "^pkgver:" "$path" | head -1 | sed 's/pkgver:[[:space:]]*//; s/^["'"'"']//; s/["'"'"']$//' || echo "")
180183
fi
181184
182185
# Skip if we have no version
@@ -185,31 +188,68 @@ jobs:
185188
continue
186189
fi
187190
188-
# Compute recipe hash (excluding version for consistency)
189-
if [ -f "$path" ]; then
191+
# Compute recipe hash
192+
if [ -f "$path" ] && command -v sbuild &>/dev/null; then
190193
recipe_hash=$(sbuild meta hash --exclude-version "$path" 2>/dev/null || sha256sum "$path" | cut -d' ' -f1)
191194
else
192-
recipe_hash=""
195+
recipe_hash=$(sha256sum "$path" 2>/dev/null | cut -d' ' -f1 || echo "")
193196
fi
194197
195-
status="success"
196-
if [ "$BUILD_RESULT" != "success" ]; then
197-
status="failure"
198-
fi
198+
# Process build status for each host architecture
199+
for host in x86_64-linux aarch64-linux; do
200+
status=""
201+
202+
if [ -d "/tmp/build-statuses" ]; then
203+
for status_file in /tmp/build-statuses/build-status.json /tmp/build-statuses/*/build-status.json; do
204+
[ -f "$status_file" ] || continue
205+
206+
recipe_url=$(jq -r '.recipe_url // ""' "$status_file" 2>/dev/null || echo "")
207+
file_host=$(jq -r '.host // ""' "$status_file" 2>/dev/null || echo "")
208+
209+
if echo "$recipe_url" | grep -q "$path" && [ "$file_host" = "$host" ]; then
210+
file_status=$(jq -r '.status // ""' "$status_file" 2>/dev/null || echo "")
211+
case "$file_status" in
212+
failure) status="failed" ;;
213+
skipped) status="skipped" ;;
214+
success) status="" ;; # Already handled by sbuild
215+
*) status="" ;;
216+
esac
217+
break
218+
fi
219+
done
220+
fi
221+
222+
# Only update cache for failed/skipped (success handled by sbuild)
223+
if [ -z "$status" ]; then
224+
continue
225+
fi
199226
200-
echo "Caching: $pkg_id v${pkg_version} (hash: ${recipe_hash:0:16}..., status: $status)"
227+
echo "Package: $pkg_id ($host), Version: $pkg_version, Status: $status"
201228
202-
sbuild cache --cache /tmp/build_cache.sdb update \
203-
--package "$pkg_id" \
204-
--version "$pkg_version" \
205-
--hash "$recipe_hash" \
206-
--status "$status" || true
229+
sbuild cache update \
230+
--package "$pkg_id" \
231+
--host "$host" \
232+
--version "$pkg_version" \
233+
--hash "$recipe_hash" \
234+
--status "$status" || echo "Warning: Failed to update cache for $pkg_id ($host)"
235+
done
207236
done
208237
209-
- name: Upload updated cache
238+
- name: Generate build summary
239+
env:
240+
SBUILD_CACHE_URI: ${{ secrets.SBUILD_CACHE_URI }}
241+
run: |
242+
sbuild cache gh-summary \
243+
--title "PR Build Results" \
244+
--host x86_64-linux || true
245+
246+
- name: Export cache to SQLite and upload
210247
env:
211248
GH_TOKEN: ${{ github.token }}
249+
SBUILD_CACHE_URI: ${{ secrets.SBUILD_CACHE_URI }}
212250
run: |
251+
sbuild cache export --output /tmp/build_cache.sdb || true
252+
213253
if [ -f "/tmp/build_cache.sdb" ]; then
214254
gh release upload build-cache /tmp/build_cache.sdb --clobber --repo "${{ github.repository }}" || {
215255
gh release create build-cache \

.github/workflows/rolling-rebuilds.yaml

Lines changed: 42 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -92,29 +92,21 @@ jobs:
9292
- name: Checkout repository
9393
uses: actions/checkout@v6
9494

95-
- name: Cache remote_pkgver state
96-
id: cache
97-
uses: actions/cache@v5
98-
with:
99-
path: .rolling-state.json
100-
key: rolling-remote-versions-v1
101-
restore-keys: |
102-
rolling-remote-versions-v1
95+
- name: Download sbuild
96+
run: |
97+
curl -fsSL "https://github.com/pkgforge/sbuilder/releases/download/nightly/sbuild-x86_64-linux" \
98+
-o /usr/local/bin/sbuild && chmod +x /usr/local/bin/sbuild
99+
sbuild --version
103100
104101
- name: Check for source updates
105102
id: check
106103
env:
107104
GITHUB_TOKEN: ${{ github.token }}
108105
GHCR_OWNER: ${{ github.repository_owner }}
106+
SBUILD_CACHE_URI: ${{ secrets.SBUILD_CACHE_URI }}
109107
run: |
110108
TO_REBUILD="[]"
111109
PACKAGES='${{ needs.find-rolling-packages.outputs.packages }}'
112-
STATE_FILE=".rolling-state.json"
113-
114-
# Initialize state file if doesn't exist
115-
if [ ! -f "$STATE_FILE" ]; then
116-
echo "{}" > "$STATE_FILE"
117-
fi
118110
119111
# Function to extract pkgver script from x_exec block
120112
extract_pkgver_script() {
@@ -181,6 +173,12 @@ jobs:
181173
grep -E "^ghcr_pkg:" "$file" | head -1 | sed 's/^ghcr_pkg:[[:space:]]*//; s/^"//; s/"$//'
182174
}
183175
176+
# Function to extract pkg_id from recipe
177+
get_pkg_id() {
178+
local file="$1"
179+
grep -E "^pkg_id:" "$file" | head -1 | sed 's/^pkg_id:[[:space:]]*//; s/^"//; s/"$//'
180+
}
181+
184182
echo "$PACKAGES" | jq -c '.[]' | while IFS= read -r pkg; do
185183
path=$(echo "$pkg" | jq -r '.path')
186184
family=$(echo "$pkg" | jq -r '.family')
@@ -192,24 +190,37 @@ jobs:
192190
pkg_name="$family" # fallback to family name
193191
fi
194192
195-
# Check for ghcr_pkg
196-
ghcr_pkg=$(get_ghcr_pkg "$path")
193+
# Extract pkg_id for cache lookup
194+
pkg_id=$(get_pkg_id "$path")
197195
198-
echo "::group::Checking $family/$name (pkg: $pkg_name)"
196+
echo "::group::Checking $family/$name (pkg_id: $pkg_id)"
199197
200-
# Skip if no ghcr_pkg
201-
if [ -z "$ghcr_pkg" ]; then
202-
echo "No ghcr_pkg set - cannot track version, skipping"
198+
# Skip if no pkg_id
199+
if [ -z "$pkg_id" ]; then
200+
echo "No pkg_id set - cannot check cache, skipping"
203201
echo "::endgroup::"
204202
continue
205203
fi
206204
207205
NEEDS_REBUILD=false
208206
NEW_VERSION=""
209-
CACHED_VERSION=""
210-
211-
# Get cached remote_pkgver from state file
212-
CACHED_VERSION=$(jq -r --arg key "$path" '.[$key] // ""' "$STATE_FILE")
207+
STORED_VERSION=""
208+
209+
# Query MongoDB for stored version
210+
CACHE_DATA=$(sbuild cache get --package "$pkg_id" --json 2>/dev/null || echo "{}")
211+
REMOTE_VER=$(echo "$CACHE_DATA" | jq -r '.remote_version // ""')
212+
CURRENT_VER=$(echo "$CACHE_DATA" | jq -r '.current_version // ""')
213+
214+
# Use remote_version if set, otherwise fall back to current_version
215+
if [ -n "$REMOTE_VER" ] && [ "$REMOTE_VER" != "null" ]; then
216+
STORED_VERSION="$REMOTE_VER"
217+
echo "Stored remote_version: $STORED_VERSION"
218+
elif [ -n "$CURRENT_VER" ] && [ "$CURRENT_VER" != "null" ]; then
219+
STORED_VERSION="$CURRENT_VER"
220+
echo "No remote_version, using current_version: $STORED_VERSION"
221+
else
222+
echo "No stored version in cache"
223+
fi
213224
214225
# Try to extract and run pkgver script
215226
PKGVER_SCRIPT=$(extract_pkgver_script "$path")
@@ -220,17 +231,17 @@ jobs:
220231
221232
if [ -n "$NEW_VERSION" ]; then
222233
echo "New version from pkgver: $NEW_VERSION"
223-
echo "Cached version: ${CACHED_VERSION:-<none>}"
234+
echo "Stored version: ${STORED_VERSION:-<none>}"
224235
225-
# Compare new version with cached version
226-
if [ -n "$CACHED_VERSION" ] && [ "$NEW_VERSION" != "$CACHED_VERSION" ]; then
227-
echo "Version changed: $CACHED_VERSION -> $NEW_VERSION"
236+
# Compare new version with stored version
237+
if [ -n "$STORED_VERSION" ] && [ "$NEW_VERSION" != "$STORED_VERSION" ]; then
238+
echo "Version changed: $STORED_VERSION -> $NEW_VERSION"
228239
NEEDS_REBUILD=true
229-
elif [ -z "$CACHED_VERSION" ]; then
230-
echo "No cached version - first build or cache cleared, will trigger rebuild"
240+
elif [ -z "$STORED_VERSION" ]; then
241+
echo "No stored version - first build, will trigger rebuild"
231242
NEEDS_REBUILD=true
232243
else
233-
echo "Version unchanged (matches cache)"
244+
echo "Version unchanged (matches stored)"
234245
fi
235246
else
236247
echo "pkgver script returned empty result"
@@ -248,8 +259,6 @@ jobs:
248259
echo "Marking for rebuild"
249260
# Write to temp file to persist across subshell
250261
echo "$path|$NEW_VERSION" >> /tmp/to_rebuild.txt
251-
# Update state file with new version
252-
jq --arg key "$path" --arg ver "$NEW_VERSION" '.[$key] = $ver' "$STATE_FILE" > "$STATE_FILE.tmp" && mv "$STATE_FILE.tmp" "$STATE_FILE"
253262
else
254263
echo "No rebuild needed"
255264
fi
@@ -277,13 +286,6 @@ jobs:
277286
echo "has_updates=false" >> $GITHUB_OUTPUT
278287
fi
279288
280-
- name: Update rolling state cache
281-
if: always()
282-
run: |
283-
if [ -f .rolling-state.json ]; then
284-
echo "State file updated with $(jq 'length' .rolling-state.json) tracked versions"
285-
fi
286-
287289
rebuild:
288290
needs: [find-rolling-packages, check-for-updates]
289291
if: needs.check-for-updates.outputs.has_updates == 'true'

0 commit comments

Comments
 (0)