Skip to content

Commit 1fc5edc

Browse files
committed
update workflow
1 parent 56b65d3 commit 1fc5edc

2 files changed

Lines changed: 143 additions & 51 deletions

File tree

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import os
2+
import json
3+
import argparse
4+
from typing import List, Dict, Tuple
5+
6+
def calculate_package_weight(pkg_path: str) -> int:
7+
"""
8+
Dynamically profiles a package to determine its computational weight.
9+
GAPIC clients are lightweight. Handwritten clients are heavy (based on test count).
10+
"""
11+
base_weight = 1
12+
13+
meta_path = os.path.join(pkg_path, ".repo-metadata.json")
14+
if os.path.isfile(meta_path):
15+
try:
16+
with open(meta_path, 'r') as f:
17+
if json.load(f).get("library_type") == "GAPIC_AUTO":
18+
return base_weight
19+
except Exception:
20+
pass
21+
22+
test_dir = os.path.join(pkg_path, "tests")
23+
test_file_count = 0
24+
if os.path.isdir(test_dir):
25+
for root, _, files in os.walk(test_dir):
26+
test_file_count += sum(1 for f in files if f.endswith(".py"))
27+
28+
return base_weight + test_file_count
29+
30+
def create_balanced_buckets(packages: List[str], max_buckets: int) -> List[str]:
31+
"""
32+
Distributes packages using the Longest Processing Time (LPT) algorithm.
33+
"""
34+
valid_pkgs = [p for p in packages if os.path.isfile(os.path.join(p, "noxfile.py"))]
35+
if not valid_pkgs:
36+
return []
37+
38+
pkg_weights: List[Tuple[str, int]] = []
39+
for pkg in valid_pkgs:
40+
pkg_weights.append((pkg, calculate_package_weight(pkg)))
41+
42+
pkg_weights.sort(key=lambda x: x[1], reverse=True)
43+
44+
# Do not spin up empty VMs if we have fewer packages than max_buckets
45+
num_buckets = min(len(valid_pkgs), max_buckets)
46+
buckets: List[Dict] = [{"packages": [], "total_weight": 0} for _ in range(num_buckets)]
47+
48+
for pkg, weight in pkg_weights:
49+
lightest_bucket = min(buckets, key=lambda b: b["total_weight"])
50+
lightest_bucket["packages"].append(pkg)
51+
lightest_bucket["total_weight"] += weight
52+
53+
return [" ".join(b["packages"]) for b in buckets]
54+
55+
def main():
56+
parser = argparse.ArgumentParser()
57+
parser.add_argument("--matrix-multiplier", type=int, required=True,
58+
help="Number of matrix permutations (e.g., 6 for Py versions)")
59+
parser.add_argument("--max-vms", type=int, default=40,
60+
help="Hard cap on VMs to protect the organization concurrency limit")
61+
args = parser.parse_args()
62+
63+
# THE L8 MATH: Never exceed ~250 jobs per workflow run.
64+
safe_github_limit = 250 // args.matrix_multiplier
65+
max_allowed_buckets = min(safe_github_limit, args.max_vms)
66+
67+
changed_dirs = os.environ.get("CHANGED_DIRS", "").split()
68+
buckets = create_balanced_buckets(changed_dirs, max_allowed_buckets)
69+
70+
github_output = os.environ.get("GITHUB_OUTPUT")
71+
if github_output:
72+
with open(github_output, "a") as f:
73+
f.write(f"buckets={json.dumps(buckets)}\n")
74+
else:
75+
print(json.dumps(buckets, indent=2))
76+
77+
if __name__ == "__main__":
78+
main()

.github/workflows/experiment.yaml

Lines changed: 65 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -1,85 +1,99 @@
1-
name: CI
2-
on: [pull_request]
1+
name: CI Unit
2+
on:
3+
pull_request:
4+
branches: [ main, preview ]
35

46
concurrency:
57
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
68
cancel-in-progress: true
79

810
jobs:
9-
# =========================================================
10-
# 1. DISCOVERY
11-
# =========================================================
11+
# ==========================================
12+
# 1. DISCOVERY & BUCKETING
13+
# ==========================================
1214
discover:
1315
runs-on: ubuntu-latest
1416
outputs:
15-
packages: ${{ steps.parse.outputs.packages }}
17+
buckets: ${{ steps.generate-matrix.outputs.buckets }}
1618
steps:
1719
- uses: actions/checkout@v4
18-
- id: changes
20+
21+
- name: Detect Changed Packages
22+
id: changes
1923
uses: tj-actions/changed-files@v44
2024
with:
2125
files: packages/**
26+
dir_names: true
27+
dir_names_max_depth: 2
28+
matrix: false
2229

23-
# Bulletproof directory extraction (ignores file names)
24-
- id: parse
25-
name: Extract Package Directories
30+
- name: Generate Balanced Buckets
31+
id: generate-matrix
2632
env:
27-
ALL_FILES: ${{ steps.changes.outputs.all_changed_files }}
28-
run: |
29-
PKGS=$(echo "$ALL_FILES" | tr ' ' '\n' | cut -d'/' -f1,2 | sort -u | tr '\n' ' ')
30-
echo "packages=$PKGS" >> $GITHUB_OUTPUT
33+
CHANGED_DIRS: ${{ steps.changes.outputs.all_changed_files }}
34+
run: python .github/scripts/matrix_generator.py --matrix-multiplier 6 --max-vms 40
3135

32-
# =========================================================
33-
# 2. EXECUTION (Parallelized internally via xargs)
34-
# =========================================================
35-
unit:
36+
# ==========================================
37+
# 2. HORIZONTAL EXECUTION
38+
# ==========================================
39+
unit-tests:
3640
needs: discover
37-
if: ${{ needs.discover.outputs.packages != '' }}
41+
if: ${{ needs.discover.outputs.buckets != '[]' }}
3842
runs-on: ubuntu-latest
3943
strategy:
4044
fail-fast: false
45+
max-parallel: 60
4146
matrix:
42-
# Exactly 5 Jobs total. Never violates GitHub limits.
43-
python: ["3.9", "3.10", "3.11", "3.12", "3.14"]
47+
chunk: ${{ fromJSON(needs.discover.outputs.buckets) }}
48+
python: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
4449

45-
name: Unit (Python ${{ matrix.python }})
50+
name: Unit (Py ${{ matrix.python }})
4651
steps:
4752
- uses: actions/checkout@v4
4853
- uses: astral-sh/setup-uv@v5
4954
with:
5055
python-version: ${{ matrix.python }}
5156
enable-cache: true
52-
53-
- name: Run Tests Concurrently
57+
58+
- name: Optimize Core Dependencies
59+
run: git config --global url."${GITHUB_WORKSPACE}".insteadOf "https://github.com/googleapis/google-cloud-python"
60+
61+
- name: Execute Chunk
5462
run: |
5563
export NOX_DEFAULT_VENV_BACKEND=uv
56-
export UV_PRERELEASE=allow
64+
FAILED=0
5765
58-
# Dropped -n 1 to fix the xargs warning.
59-
echo "${{ needs.discover.outputs.packages }}" | tr ' ' '\n' | xargs -P 4 -I {} sh -c '
60-
if [ -f "{}/noxfile.py" ]; then
61-
cd {}
62-
63-
# Run test and capture output
64-
uvx --with "nox[uv]" nox -s "unit-${{ matrix.python }}" > nox_output.log 2>&1
65-
STATUS=$?
66-
67-
# Print output cleanly inside a GitHub group
68-
echo "::group::Testing {} (Python ${{ matrix.python }})"
66+
for pkg in ${{ matrix.chunk }}; do
67+
echo "::group::Testing $pkg (Python ${{ matrix.python }})"
68+
cd "$pkg"
69+
70+
# Run test, pipe to log for clean UI folding
71+
if uvx --with 'nox[uv]' nox -s "unit-${{ matrix.python }}" > nox_output.log 2>&1; then
72+
cat nox_output.log
73+
else
6974
cat nox_output.log
70-
71-
if [ $STATUS -ne 0 ]; then
72-
# Check if it failed just because the Python version isn"t supported by this package
73-
if grep -q "Sessions not found:" nox_output.log; then
74-
echo "⏭️ Session unit-${{ matrix.python }} is not defined for this package. Safely skipping."
75-
else
76-
echo "❌ Tests failed in {}!"
77-
echo "::endgroup::"
78-
# Exit 1 allows other packages to finish testing, but ensures the workflow fails
79-
exit 1
80-
fi
81-
fi
82-
83-
echo "::endgroup::"
75+
# Gracefully skip if the Python version isn't supported by this legacy package
76+
grep -q "Sessions not found:" nox_output.log || FAILED=1
8477
fi
85-
'
78+
79+
cd "$GITHUB_WORKSPACE"
80+
echo "::endgroup::"
81+
done
82+
83+
exit $FAILED
84+
85+
# ==========================================
86+
# 3. GATEKEEPER
87+
# ==========================================
88+
presubmit-passed:
89+
if: always()
90+
needs: [discover, unit-tests]
91+
runs-on: ubuntu-latest
92+
steps:
93+
- name: Evaluate Pipeline Status
94+
run: |
95+
if [[ "${{ contains(needs.*.result, 'failure') }}" == "true" || "${{ contains(needs.*.result, 'cancelled') }}" == "true" ]]; then
96+
echo "::error::One or more required CI jobs failed or were cancelled."
97+
exit 1
98+
fi
99+
echo "All dynamically generated CI jobs completed successfully."

0 commit comments

Comments
 (0)