Release Metadata #25
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Release Metadata | |
| on: | |
| workflow_dispatch: | |
| inputs: | |
| force: | |
| description: 'Force regeneration of all metadata' | |
| type: boolean | |
| default: false | |
| concurrency: | |
| group: release-metadata | |
| cancel-in-progress: false | |
| jobs: | |
| generate: | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| include: | |
| - arch: x86_64-Linux | |
| runner: ubuntu-latest | |
| - arch: aarch64-Linux | |
| runner: ubuntu-24.04-arm | |
| runs-on: ${{ matrix.runner }} | |
| permissions: | |
| contents: read | |
| packages: read | |
| steps: | |
| - name: Checkout repository | |
| uses: actions/checkout@v4 | |
| - name: Install tools | |
| run: | | |
| ARCH="$(uname -m)-linux" | |
| sudo apt-get update | |
| sudo apt-get install -y zstd xz-utils jq | |
| # Download sbuild-meta | |
| curl -fsSL "https://github.com/pkgforge/sbuilder/releases/download/latest/sbuild-meta-${ARCH}" \ | |
| -o /usr/local/bin/sbuild-meta || { | |
| echo "::error::Failed to download sbuild-meta" | |
| exit 1 | |
| } | |
| chmod +x /usr/local/bin/sbuild-meta | |
| sbuild-meta --version | |
| # Download soarql for SDB generation | |
| curl -fsSL "https://github.com/pkgforge/soarql/releases/download/nightly/soarql-${ARCH}" \ | |
| -o /usr/local/bin/soarql || { | |
| echo "::error::Failed to download soarql" | |
| exit 1 | |
| } | |
| chmod +x /usr/local/bin/soarql | |
| - name: Filter recipes by pkg_type | |
| run: | | |
| mkdir -p /tmp/bincache-recipes /tmp/pkgcache-recipes /tmp/output | |
| # Find all recipes and filter by pkg_type | |
| for recipe in $(find binaries packages -name "*.yaml" -type f 2>/dev/null); do | |
| pkg_type=$(grep -E "^pkg_type:" "$recipe" | head -1 | sed 's/pkg_type:[[:space:]]*//; s/^["'"'"']//; s/["'"'"']$//' || echo "") | |
| # Create directory structure and copy recipe | |
| rel_path=$(dirname "$recipe") | |
| if [ "$pkg_type" = "static" ] || [ "$pkg_type" = "dynamic" ]; then | |
| mkdir -p "/tmp/bincache-recipes/$rel_path" | |
| cp "$recipe" "/tmp/bincache-recipes/$recipe" | |
| else | |
| mkdir -p "/tmp/pkgcache-recipes/$rel_path" | |
| cp "$recipe" "/tmp/pkgcache-recipes/$recipe" | |
| fi | |
| done | |
| echo "=== Bincache recipes (static/dynamic) ===" | |
| find /tmp/bincache-recipes -name "*.yaml" 2>/dev/null | wc -l | |
| echo "=== Pkgcache recipes (other pkg_types) ===" | |
| find /tmp/pkgcache-recipes -name "*.yaml" 2>/dev/null | wc -l | |
| - name: Generate bincache metadata | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| # Generate metadata for static/dynamic packages (bincache) | |
| if [ -n "$(find /tmp/bincache-recipes -name '*.yaml' 2>/dev/null)" ]; then | |
| sbuild-meta generate \ | |
| --arch "${{ matrix.arch }}" \ | |
| --recipes /tmp/bincache-recipes \ | |
| --output /tmp/output \ | |
| --cache-type bincache \ | |
| --parallel 4 \ | |
| --ghcr-owner pkgforge-dev \ | |
| --github-token "$GITHUB_TOKEN" || { | |
| echo "::warning::bincache metadata generation failed or no packages found" | |
| } | |
| fi | |
| # Rename to include cache type | |
| if [ -f "/tmp/output/bincache/${{ matrix.arch }}.json" ]; then | |
| mv "/tmp/output/bincache/${{ matrix.arch }}.json" "/tmp/output/bincache-${{ matrix.arch }}.json" | |
| echo "::notice::bincache metadata generated" | |
| jq 'length' "/tmp/output/bincache-${{ matrix.arch }}.json" | |
| else | |
| echo "::warning::No bincache metadata generated" | |
| fi | |
| - name: Generate pkgcache metadata | |
| env: | |
| GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
| run: | | |
| # Generate metadata for non-static/dynamic packages (pkgcache) | |
| if [ -n "$(find /tmp/pkgcache-recipes -name '*.yaml' 2>/dev/null)" ]; then | |
| sbuild-meta generate \ | |
| --arch "${{ matrix.arch }}" \ | |
| --recipes /tmp/pkgcache-recipes \ | |
| --output /tmp/output \ | |
| --cache-type pkgcache \ | |
| --parallel 4 \ | |
| --ghcr-owner pkgforge-dev \ | |
| --github-token "$GITHUB_TOKEN" || { | |
| echo "::warning::pkgcache metadata generation failed or no packages found" | |
| } | |
| fi | |
| # Rename to include cache type | |
| if [ -f "/tmp/output/pkgcache/${{ matrix.arch }}.json" ]; then | |
| mv "/tmp/output/pkgcache/${{ matrix.arch }}.json" "/tmp/output/pkgcache-${{ matrix.arch }}.json" | |
| echo "::notice::pkgcache metadata generated" | |
| jq 'length' "/tmp/output/pkgcache-${{ matrix.arch }}.json" | |
| else | |
| echo "::warning::No pkgcache metadata generated" | |
| fi | |
| - name: List outputs | |
| run: | | |
| echo "=== Generated files ===" | |
| ls -lah /tmp/output/ || echo "No files" | |
| for f in /tmp/output/*.json; do | |
| if [ -f "$f" ]; then | |
| echo "=== $(basename $f) ===" | |
| jq '.[0:2]' "$f" 2>/dev/null || cat "$f" | |
| fi | |
| done | |
| - name: Generate SDB metadata | |
| run: | | |
| cd /tmp/output | |
| for json_file in *.json; do | |
| if [ -f "$json_file" ]; then | |
| sdb_file="${json_file%.json}.sdb" | |
| # Extract repo name from filename (e.g., bincache-x86_64-Linux.json -> bincache) | |
| repo_name=$(echo "$json_file" | cut -d'-' -f1) | |
| echo "Converting $json_file to $sdb_file (repo: $repo_name)" | |
| soarql -i "$json_file" -o "$sdb_file" -r "$repo_name" || { | |
| echo "::warning::Failed to convert $json_file to SDB" | |
| continue | |
| } | |
| fi | |
| done | |
| echo "=== SDB files generated ===" | |
| ls -lah *.sdb 2>/dev/null || echo "No SDB files" | |
| - name: Compress outputs | |
| run: | | |
| cd /tmp/output | |
| for file in *.json *.sdb; do | |
| if [ -f "$file" ]; then | |
| # Compress with zstd | |
| zstd -19 "$file" -o "${file}.zstd" | |
| fi | |
| done | |
| ls -lah | |
| - name: Upload artifacts | |
| uses: actions/upload-artifact@v4 | |
| with: | |
| name: metadata-${{ matrix.arch }} | |
| path: | | |
| /tmp/output/*.json | |
| /tmp/output/*.json.zstd | |
| /tmp/output/*.sdb | |
| /tmp/output/*.sdb.zstd | |
| retention-days: 7 | |
| release: | |
| needs: generate | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: write | |
| steps: | |
| - name: Download all artifacts | |
| uses: actions/download-artifact@v4 | |
| with: | |
| path: /tmp/artifacts | |
| merge-multiple: true | |
| - name: List artifacts | |
| run: | | |
| echo "=== Downloaded artifacts ===" | |
| ls -lah /tmp/artifacts/ || echo "No artifacts" | |
| - name: Create release | |
| env: | |
| GH_TOKEN: ${{ github.token }} | |
| run: | | |
| VERSION="v$(date -u '+%Y%m%d.%H%M%S')" | |
| # Check if we have any files | |
| if [ -z "$(ls -A /tmp/artifacts/ 2>/dev/null)" ]; then | |
| echo "::warning::No artifacts to release" | |
| exit 0 | |
| fi | |
| # Generate release notes | |
| cat > /tmp/release_notes.md << EOF | |
| ## Metadata Release | |
| **Generated**: $(date -u '+%Y-%m-%d %H:%M:%S UTC') | |
| ### Contents | |
| | File | Size | | |
| |------|------| | |
| EOF | |
| for f in /tmp/artifacts/*; do | |
| if [ -f "$f" ]; then | |
| size=$(ls -lh "$f" | awk '{print $5}') | |
| echo "| $(basename $f) | $size |" >> /tmp/release_notes.md | |
| fi | |
| done | |
| cat >> /tmp/release_notes.md << EOF | |
| --- | |
| *This release was generated automatically from test repository.* | |
| EOF | |
| # Create the release | |
| gh release create "$VERSION" \ | |
| --title "Metadata $VERSION" \ | |
| --notes-file /tmp/release_notes.md \ | |
| --repo "${{ github.repository }}" \ | |
| /tmp/artifacts/* || { | |
| echo "::error::Failed to create release" | |
| exit 1 | |
| } | |
| echo "::notice::Created release $VERSION" |