Skip to content

Commit 10d13ea

Browse files
committed
Test new Repo downloader
1 parent 30f30c3 commit 10d13ea

4 files changed

Lines changed: 206 additions & 58 deletions

File tree

Lines changed: 11 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,9 @@ inputs:
4242
required: false
4343
type: boolean
4444
default: false
45+
github_token:
46+
description: 'GitHub Token'
47+
required: true
4548

4649
outputs:
4750
kernel_version:
@@ -298,6 +301,8 @@ runs:
298301
run: |
299302
set -euo pipefail
300303
CONFIG="$OP_MODEL"
304+
echo "Creating folder for configuration: $CONFIG"
305+
mkdir -p "$CONFIG"
301306
echo "CONFIG=$CONFIG" >> "$GITHUB_ENV"
302307
REPO="/usr/local/bin/repo"
303308
if [ ! -x "$REPO" ]; then
@@ -314,42 +319,12 @@ runs:
314319
git config --global core.fsmonitor true
315320
git config --global pack.sparse true
316321
317-
- name: Initialize and Sync Kernel Source
318-
shell: bash
319-
run: |
320-
set -euo pipefail
321-
echo "::group::Initialize kernel source"
322-
echo "Creating folder for configuration: $CONFIG"
323-
mkdir -p "$CONFIG"
324-
cd "$CONFIG"
325-
echo "Initializing and syncing kernel source..."
326-
327-
if [[ "$OP_MANIFEST" == https://* ]]; then
328-
mkdir -p .repo/manifests
329-
curl --fail --show-error --location --proto '=https' "$OP_MANIFEST" -o .repo/manifests/temp_manifest.xml
330-
"$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b "oneplus/sm8650" -m temp_manifest.xml --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags
331-
elif [[ "$OP_BRANCH" == wild/* ]]; then
332-
mkdir -p .repo/manifests
333-
cp "../manifests/$(echo "$OP_OS_VERSION" | tr '[:upper:]' '[:lower:]')/$OP_MANIFEST" .repo/manifests/temp_manifest.xml
334-
"$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b "oneplus/sm8650" -m temp_manifest.xml --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags
335-
else
336-
"$REPO" init -u https://github.com/OnePlusOSS/kernel_manifest.git -b "$OP_BRANCH" -m "$OP_MANIFEST" --repo-rev=v2.16 --depth=1 --no-clone-bundle --no-tags
337-
fi
338-
339-
"$REPO" --version
340-
success=false
341-
for i in 1 2 3; do
342-
if "$REPO" sync -c --no-clone-bundle --no-tags --optimized-fetch \
343-
-j"$(nproc --all)" --fail-fast; then
344-
success=true
345-
break
346-
fi
347-
echo "⚠️ repo sync attempt $i failed; retrying..."
348-
sleep 30
349-
done
350-
$success || { echo "::error::repo sync failed after 3 attempts"; exit 1; }
351-
echo "✅ Kernel source synced"
352-
echo "::endgroup::"
322+
- name: Sync Kernel Source
323+
id: sync-kernel-source
324+
uses: ./.github/actions/repo-downloader
325+
with:
326+
source_location: ${{ env.CONFIG }}
327+
github_token: ${{ inputs.github_token }}
353328

354329
- name: Set Dir Paths
355330
shell: bash
Lines changed: 175 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,175 @@
1+
name: 'Download and configure Source code'
2+
3+
inputs:
4+
source_location:
5+
description: 'Folder path to save Kernel source'
6+
required: true
7+
type: string
8+
github_token:
9+
description: 'GitHub Token'
10+
required: true
11+
12+
runs:
13+
using: 'composite'
14+
steps:
15+
- name: Download and Prepare Manifest
16+
shell: bash
17+
working-directory: ${{ inputs.source_location }}
18+
run: |
19+
if [[ "$OP_MANIFEST" == https://* ]]; then
20+
curl --fail --show-error --location --proto '=https' "$OP_MANIFEST" -o manifest.xml
21+
elif [[ "$OP_BRANCH" == wild/* ]]; then
22+
cp "../manifests/$(echo "$OP_OS_VERSION" | tr '[:upper:]' '[:lower:]')/$OP_MANIFEST" manifest.xml
23+
else
24+
curl --fail --show-error --location --proto '=https' "https://raw.githubusercontent.com/OnePlusOSS/kernel_manifest/refs/heads/$OP_BRANCH/$OP_MANIFEST" -o manifest.xml
25+
fi
26+
27+
- name: Download Manifest Archives (Parallel)
28+
shell: python
29+
env:
30+
PYTHONUNBUFFERED: "1"
31+
GITHUB_TOKEN: ${{ inputs.github_token }}
32+
working-directory: ${{ inputs.source_location }}
33+
run: |
34+
import xml.etree.ElementTree as ET
35+
import subprocess
36+
import os, shutil
37+
import time
38+
import glob
39+
from concurrent.futures import ThreadPoolExecutor
40+
41+
MAX_WORKERS = (os.cpu_count() or 2) * 4
42+
TARGET_REPO = "${{ github.repository }}"
43+
44+
def sync_project(task):
45+
name, path, url, strip, rev = task
46+
if path not in ["./", "."]:
47+
os.makedirs(path, exist_ok=True)
48+
49+
headers = (
50+
"-H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' "
51+
"-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' "
52+
"-H 'Accept-Encoding: gzip, deflate, br' "
53+
"-H 'Connection: keep-alive' "
54+
"--tcp-fastopen"
55+
)
56+
57+
print(f"🚀 Syncing: {name} -> {path}")
58+
start_time = time.time()
59+
print(f" [PENDING] {name}")
60+
try:
61+
if "clang" in name.lower() or "rust" in name.lower():
62+
label = "rust" if "rust" in name.lower() else "clang"
63+
filename = f"{label}-{rev}.tar.gz"
64+
65+
print(f" [CACHE] Fetching {label} toolchain: {filename}...")
66+
# Download all parts matching the pattern
67+
subprocess.run(
68+
f"gh release download source-cache --repo {TARGET_REPO} --pattern '{filename}*' --clobber",
69+
shell=True, check=True, capture_output=True
70+
)
71+
72+
# Recombine if split parts exist
73+
parts = sorted(glob.glob(f"{filename}.part*"))
74+
if parts:
75+
print(f"🧩 [MERGE] Combining {len(parts)} parts for {rev}...")
76+
with open(filename, 'wb') as outfile:
77+
for part in parts:
78+
with open(part, 'rb') as infile:
79+
shutil.copyfileobj(infile, outfile)
80+
os.remove(part)
81+
82+
# Extract from the local tar.gz
83+
subprocess.run(f"tar -I pigz -x -f {filename} -C {path} {strip}", shell=True, check=True)
84+
if os.path.exists(filename):
85+
os.remove(filename)
86+
else:
87+
cmd = f"curl -LfsS {headers} --retry 5 --connect-timeout 30 '{url}' | tar -I pigz -x -C {path} {strip}"
88+
subprocess.run(cmd, shell=True, check=True)
89+
90+
duration = time.time() - start_time
91+
print(f" [SUCCESS] Synced {name} ({duration:.2f}s)")
92+
return True
93+
except subprocess.CalledProcessError as e:
94+
print(f"❌ [ERROR] Command failed for {name}: {e.cmd}")
95+
print(f" Stderr: {e.stderr.decode() if e.stderr else 'No stderr'}")
96+
return False
97+
except Exception as e:
98+
print(f" [ERROR] Failed to sync {name}")
99+
return False
100+
101+
global_start = time.time()
102+
103+
with open('manifest.xml', 'r') as f:
104+
manifest_content = f.read()
105+
106+
root = ET.fromstring(manifest_content)
107+
top_dir = os.getcwd()
108+
109+
remotes = {r.get('name'): r.get('fetch').rstrip('/') for r in root.findall('remote')}
110+
default = root.find('default')
111+
def_remote = default.get('remote') if default is not None else None
112+
def_rev = default.get('revision') if default is not None else None
113+
114+
sync_tasks = []
115+
post_process_data = []
116+
117+
for project in root.findall('project'):
118+
name = project.get('name')
119+
path = project.get('path', name)
120+
remote_name = project.get('remote', def_remote)
121+
rev = project.get('revision', def_rev)
122+
base_url = remotes.get(remote_name)
123+
124+
if not base_url: continue
125+
126+
if "github.com" in base_url:
127+
url = f"{base_url}/{name}/archive/{rev}.tar.gz"
128+
strip = "--strip-components=1"
129+
elif "googlesource.com" in base_url:
130+
url = f"{base_url}/{name}/+archive/{rev}.tar.gz"
131+
strip = ""
132+
elif "git.codelinaro.org" in base_url:
133+
url = f"{base_url}/{name}/-/archive/{rev}.tar.gz"
134+
strip = "--strip-components=1"
135+
else:
136+
continue
137+
138+
sync_tasks.append((name, path, url, strip, rev))
139+
140+
for child in project:
141+
if child.tag in ['linkfile', 'copyfile']:
142+
post_process_data.append((path, child))
143+
144+
print(f"Starting parallel sync of {len(sync_tasks)} projects...")
145+
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
146+
success_list = list(executor.map(sync_project, sync_tasks))
147+
148+
if not all(success_list):
149+
print("::error::One or more projects failed to sync!")
150+
exit(1)
151+
152+
print("Processing linkfiles and copyfiles...")
153+
for path, child in post_process_data:
154+
src_rel = child.get('src')
155+
dest_rel = child.get('dest')
156+
if not src_rel or not dest_rel: continue
157+
158+
src_path = os.path.join(top_dir, path, src_rel)
159+
dest_path = os.path.join(top_dir, dest_rel)
160+
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
161+
162+
if child.tag == 'linkfile':
163+
if os.path.lexists(dest_path): os.remove(dest_path)
164+
rel_target = os.path.relpath(src_path, os.path.dirname(dest_path))
165+
os.symlink(rel_target, dest_path)
166+
print(f" [Link] {dest_rel} -> {src_rel}")
167+
elif child.tag == 'copyfile':
168+
shutil.copy2(src_path, dest_path)
169+
print(f" [Copy] {dest_rel} from {src_rel}")
170+
171+
172+
total_duration = time.time() - global_start
173+
minutes = int(total_duration // 60)
174+
seconds = total_duration % 60
175+
print(f"Kernel Sync completed in {minutes}m {seconds:.2f}s")

.github/workflows/build-kernel-release.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -571,7 +571,7 @@ jobs:
571571
sudo DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
572572
git curl ca-certificates build-essential clang lld flex bison \
573573
libelf-dev libssl-dev libncurses-dev zlib1g-dev liblz4-tool \
574-
libxml2-utils rsync unzip dwarves file python3 ccache jq bc dos2unix kmod libdw-dev elfutils
574+
libxml2-utils rsync unzip dwarves file python3 ccache jq bc dos2unix kmod libdw-dev elfutils pigz
575575
sudo apt-get clean
576576
echo "✅ Dependencies installed"
577577
echo "::endgroup::"
@@ -604,7 +604,7 @@ jobs:
604604
605605
- name: 🔨 Build Kernel
606606
id: build
607-
uses: ./.github/actions
607+
uses: ./.github/actions/build-kernel
608608
with:
609609
op_config_json: ${{ steps.prepare_config.outputs.config_json }}
610610
ksu_type: ${{ matrix.ksu_type }}
@@ -614,6 +614,7 @@ jobs:
614614
build_timestamp: ${{ inputs.build_timestamp }}
615615
clean: ${{ inputs.clean_build }}
616616
debug: ${{ inputs.debug }}
617+
github_token: ${{ secrets.GITHUB_TOKEN }}
617618

618619
- name: 📊 Build statistics
619620
id: build-stat

.github/workflows/clone-clang.yml

Lines changed: 17 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
name: Mirror clang to GitHub Release
1+
name: Mirror toolchains to GitHub Release
22

33
permissions:
44
contents: write
@@ -17,7 +17,7 @@ jobs:
1717
# Create the release once. If it exists, it fails safely.
1818
gh release create "source-cache" \
1919
--title "Source Mirror Cache" \
20-
--notes "Deduplicated Clang Toolchains" \
20+
--notes "Deduplicated Clang & Rust Toolchains" \
2121
--draft \
2222
--repo "${{ github.repository }}" || echo "Release already exists."
2323
@@ -59,7 +59,7 @@ jobs:
5959
with open('all_configs.json', 'r') as f:
6060
configs = json.load(f)
6161
62-
unique_clang = {}
62+
unique_toolchains = {}
6363
seen_count = 0
6464
6565
print(f"🧐 Processing {len(configs)} configurations...")
@@ -90,26 +90,28 @@ jobs:
9090
9191
for project in root.findall('project'):
9292
name = project.get('name')
93-
if "clang" in name.lower():
93+
if "clang" in name.lower() or "rust" in name.lower():
94+
type_label = "rust" if "rust" in name.lower() else "clang"
9495
remote_name = project.get('remote', def_remote)
9596
rev = project.get('revision', def_rev)
9697
base_url = remotes.get(remote_name, "")
98+
cache_filename = f"{type_label}-{rev}.tar.gz"
9799
98-
if rev not in unique_clang and ("git.codelinaro.org" in base_url or "googlesource.com" in base_url):
100+
if cache_filename not in unique_toolchains and ("git.codelinaro.org" in base_url or "googlesource.com" in base_url):
99101
if "googlesource.com" in base_url:
100102
dl_url = f"{base_url}/{name}/+archive/{rev}.tar.gz"
101103
else:
102104
dl_url = f"{base_url}/{name}/-/archive/{rev}.tar.gz"
103-
unique_clang[rev] = {"rev": rev, "url": dl_url, "name": name}
104-
print(f"🆕 [{op_model}][{op_os_version}] -> New Clang found: {rev}")
105+
unique_toolchains[cache_filename] = {"rev": rev, "url": dl_url, "name": name, "type_label": type_label, "cache_file": cache_filename}
106+
print(f"🆕 [{op_model}][{op_os_version}] -> New {type_label} found: {rev}")
105107
else:
106-
print(f"♻️ [{op_model}][{op_os_version}] -> Using existing Clang: {rev}")
108+
print(f"♻️ [{op_model}][{op_os_version}] -> Using existing {type_label}: {rev}")
107109
except Exception as e:
108110
print(f"⚠️ Failed to process {op_manifest}: {e}")
109-
matrix = {"include": list(unique_clang.values())}
111+
matrix = {"include": list(unique_toolchains.values())}
110112
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
111113
f.write(f"matrix={json.dumps(matrix)}\n")
112-
print(f"✅ Found {len(unique_clang)} unique projects to mirror.")
114+
print(f"✅ Found {len(unique_toolchains)} unique projects to mirror.")
113115
114116
mirror-to-release:
115117
needs: [prepare-release, generate-mirror-matrix]
@@ -124,15 +126,10 @@ jobs:
124126
run: |
125127
TARGET_REPO="${{ github.repository }}"
126128
REV="${{ matrix.rev }}"
127-
FILENAME="${REV}.tar.gz"
129+
FILENAME="${{ matrix.cache_file }}"
128130
129-
# Check cache
130-
HTTP_STATUS=$(curl -I -s -o /dev/null -w "%{http_code}" -L \
131-
-H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" \
132-
"https://github.com/${{ github.repository }}/releases/download/source-cache/${REV}.tar.gz")
133-
134-
if [ "$HTTP_STATUS" != "302" ] && [ "$HTTP_STATUS" != "200" ]; then
135-
echo "📥 Downloading Clang: ${{ matrix.name }}..."
131+
if ! gh release view "source-cache" --repo "$TARGET_REPO" --json assets --jq ".assets[].name" | grep -q "^${FILENAME}$"; then
132+
echo "📥 Downloading ${{ matrix.type_label }} from ${{ matrix.url }}..."
136133
curl -LfsS -H 'User-Agent: Mozilla/5.0' --tcp-fastopen -o "$FILENAME" "${{ matrix.url }}"
137134
138135
FILE_SIZE=$(stat -c%s "$FILENAME")
@@ -142,7 +139,7 @@ jobs:
142139
echo "⚠️ File is > 2GB $FILE_SIZE bytes. Splitting..."
143140
split -b 1500M "$FILENAME" "${FILENAME}.part"
144141
145-
for part in ${FILENAME}.part*; do
142+
for part in "${FILENAME}".part*; do
146143
echo "📤 Uploading $part..."
147144
gh release upload "source-cache" "$part" --clobber --repo "$TARGET_REPO"
148145
done
@@ -151,5 +148,5 @@ jobs:
151148
gh release upload "source-cache" "$FILENAME" --clobber --repo "$TARGET_REPO"
152149
fi
153150
else
154-
echo "✅ Clang revision ${{ matrix.rev }} already cached."
151+
echo "✅ ${{ matrix.type_label }} revision ${{ matrix.rev }} already cached."
155152
fi

0 commit comments

Comments
 (0)