Skip to content

Commit 9336993

Browse files
Add workflow to download and configure kernel source
This workflow downloads and configures the kernel source code by fetching the manifest and the necessary archives, handling different source locations and configurations.
1 parent 1298db2 commit 9336993

1 file changed

Lines changed: 257 additions & 0 deletions

File tree

Lines changed: 257 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
name: 'Download and configure Kernel Source code'
2+
3+
inputs:
4+
source_location:
5+
description: 'Folder path to save Kernel source'
6+
required: true
7+
type: string
8+
github_token:
9+
description: 'GitHub Token'
10+
required: true
11+
debug:
12+
description: 'Enable Logs'
13+
required: false
14+
type: boolean
15+
default: false
16+
17+
runs:
18+
using: 'composite'
19+
steps:
20+
- name: Download and Prepare Manifest
21+
shell: bash
22+
working-directory: ${{ inputs.source_location }}
23+
run: |
24+
# Download and Prepare Manifest
25+
if [[ "$OP_MANIFEST" == https://* ]]; then
26+
curl --fail --show-error --location --proto '=https' "$OP_MANIFEST" -o manifest.xml
27+
elif [[ "$OP_BRANCH" == wild/* ]]; then
28+
cp "../manifests/$(echo "$OP_OS_VERSION" | tr '[:upper:]' '[:lower:]')/$OP_MANIFEST" manifest.xml
29+
else
30+
curl --fail --show-error --location --proto '=https' "https://raw.githubusercontent.com/OnePlusOSS/kernel_manifest/refs/heads/$OP_BRANCH/$OP_MANIFEST" -o manifest.xml
31+
fi
32+
33+
- name: Download Manifest Archives
34+
shell: python
35+
env:
36+
PYTHONUNBUFFERED: "1"
37+
GITHUB_TOKEN: ${{ inputs.github_token }}
38+
DEBUG: ${{ inputs.debug }}
39+
working-directory: ${{ inputs.source_location }}
40+
run: |
41+
# Download Manifest Archives
42+
import xml.etree.ElementTree as ET
43+
import subprocess
44+
import os, shutil
45+
import time
46+
import glob
47+
from concurrent.futures import ThreadPoolExecutor
48+
import requests
49+
50+
MAX_WORKERS = (os.cpu_count() or 2) * 4
51+
NPROC = int(subprocess.check_output("nproc", shell=True).strip())
52+
TARGET_REPO = "${{ github.repository }}"
53+
TOOLCHAIN_MAP = {
54+
"clang/host/linux-x86": "clang",
55+
"prebuilts/rust": "rust",
56+
"prebuilts/clang-tools": "clang-tools",
57+
"prebuilts/build-tools": "build-tools",
58+
"AnyKernel3": "AnyKernel3"
59+
}
60+
DEBUG = os.environ.get("DEBUG", "false").lower() == "true"
61+
aria_quiet_flags = "--quiet --summary-interval=0" if not DEBUG else ""
62+
63+
curl_headers = (
64+
"-H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' "
65+
"-H 'Accept: */*' "
66+
"-H 'Connection: keep-alive' "
67+
"--tcp-fastopen"
68+
)
69+
70+
aria2_headers = (
71+
"--header='User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' "
72+
"--header='Accept: */*' "
73+
"--header='Connection: keep-alive' "
74+
)
75+
76+
print("::group::Download Manifest Archives")
77+
78+
def sync_project(task):
79+
name, path, url, strip, rev = task
80+
if path not in ["./", "."]:
81+
os.makedirs(path, exist_ok=True)
82+
83+
start_time = time.time()
84+
MAX_ATTEMPTS = 3
85+
for attempt in range(1, MAX_ATTEMPTS + 1):
86+
shm_workspace = f"/dev/shm/sync_{hash(name) % 1000000}"
87+
if os.path.exists(shm_workspace):
88+
shutil.rmtree(shm_workspace, ignore_errors=True)
89+
os.makedirs(shm_workspace, exist_ok=True)
90+
91+
print(f"Syncing: {name} -> {path}")
92+
print(f" [PENDING] {name}")
93+
try:
94+
label = None
95+
for repo_key, type_label in TOOLCHAIN_MAP.items():
96+
if repo_key in name:
97+
label = type_label
98+
break
99+
100+
found_in_cache = False
101+
if label:
102+
base_filename = f"{label}-{rev}.tar.gz"
103+
BASE_URL = f"https://github.com/{TARGET_REPO}/releases/download/toolchain-cache"
104+
105+
check_single = f"curl -IsL {curl_headers} --retry 10 --connect-timeout 30 -o /dev/null -w '%{{http_code}}' {BASE_URL}/{base_filename}"
106+
if subprocess.check_output(check_single, shell=True).decode().strip() == "200":
107+
if DEBUG: print(f" [CACHE] Downloading single file: {base_filename}")
108+
aria_cmd = f"aria2c -x16 -s16 -k1M -j16 --retry-wait=5 --max-tries=10 -d {shm_workspace} --file-allocation=none {aria_quiet_flags} {aria2_headers} -o {base_filename} {BASE_URL}/{base_filename}"
109+
subprocess.run(aria_cmd, shell=True, check=True, capture_output=not DEBUG)
110+
found_in_cache = True
111+
else:
112+
if DEBUG: print(f" [CACHE] Single file not found, checking parts for {base_filename}...")
113+
parts_to_download = []
114+
for suffix in [f"a{c}" for c in "abcdefghijklmnopqrstuvwxyz"]:
115+
part_name = f"{base_filename}.part{suffix}"
116+
check_part = f"curl -IsL {curl_headers} --retry 10 --connect-timeout 30 -o /dev/null -w '%{{http_code}}' {BASE_URL}/{part_name}"
117+
if subprocess.check_output(check_part, shell=True).decode().strip() == "200":
118+
parts_to_download.append(f"{BASE_URL}/{part_name}")
119+
else:
120+
break
121+
122+
if parts_to_download:
123+
if DEBUG: print(f" [CACHE] Found {len(parts_to_download)} parts. Downloading...")
124+
125+
input_file_path = f"{base_filename}.urls"
126+
with open(input_file_path, "w") as f:
127+
for part_url in parts_to_download:
128+
p_suffix = part_url.split('.part')[-1]
129+
p_name = f"{base_filename}.part{p_suffix}"
130+
f.write(f"{part_url}\n out={p_name}\n")
131+
132+
aria_bulk_cmd = (
133+
f"aria2c -x16 -s16 -k1M -j16 --retry-wait=5 --max-tries=10 -d {shm_workspace} --file-allocation=none "
134+
f"{aria_quiet_flags} {aria2_headers} "
135+
f"--input-file={input_file_path}"
136+
)
137+
138+
subprocess.run(aria_bulk_cmd, shell=True, check=True, capture_output=not DEBUG)
139+
os.remove(input_file_path)
140+
found_in_cache = True
141+
142+
if not found_in_cache:
143+
print(f"::error::[FATAL ERROR] Toolchain {base_filename} NOT FOUND in GitHub Release cache!")
144+
print(f"::error::[FATAL ERROR] Please Run Mirror Toolchain Workflow or select sync toolchain during build!!!")
145+
return False
146+
147+
if found_in_cache:
148+
parts = sorted(glob.glob(os.path.join(shm_workspace, f"*.part*")))
149+
source = f"cat {' '.join(parts)} | " if parts else f"cat {os.path.join(shm_workspace, base_filename)} | "
150+
extract_cmd = f"{source} tar -I 'pigz -p {NPROC} -b 256' -x --record-size=1M --no-same-owner -C {path} {strip}"
151+
subprocess.run(extract_cmd, shell=True, check=True)
152+
else:
153+
direct_file = f"dynamic_file.tar.gz"
154+
archive_path = os.path.join(shm_workspace, direct_file)
155+
aria_cmd = f"aria2c -x16 -s16 -k1M -j16 --allow-overwrite=true --retry-wait=5 --max-tries=10 -d {shm_workspace} -o {direct_file} {aria_quiet_flags} {aria2_headers} '{url}'"
156+
if DEBUG: print(f" [DIRECT] Fetching: {url}")
157+
subprocess.run(aria_cmd, shell=True, check=True, capture_output=not DEBUG)
158+
extract_cmd = f"tar -I 'pigz -p {NPROC} -b 256' -x --record-size=1M -f {archive_path} -C {path} {strip}"
159+
subprocess.run(extract_cmd, shell=True, check=True)
160+
161+
duration = time.time() - start_time
162+
print(f"Synced {name} successfully! ({duration:.2f}s)")
163+
return True
164+
except subprocess.CalledProcessError as e:
165+
print(f" [RETRY {attempt}/{MAX_ATTEMPTS}] {name} failed with exit code {e.returncode}. Command: {e.cmd}")
166+
print(f" [ERROR] Command failed for {name}: {e.cmd}")
167+
print(f" Stderr: {e.stderr.decode() if e.stderr else 'No stderr'}")
168+
if attempt < MAX_ATTEMPTS:
169+
time.sleep(5)
170+
except Exception as e:
171+
print(f" [RETRY {attempt}/{MAX_ATTEMPTS}] {name} failed with exit code {e.returncode}. Command: {e.cmd}")
172+
print(f" [ERROR] Failed to sync {name}")
173+
if attempt < MAX_ATTEMPTS:
174+
time.sleep(5)
175+
finally:
176+
if os.path.exists(shm_workspace):
177+
shutil.rmtree(shm_workspace, ignore_errors=True)
178+
return False
179+
180+
global_start = time.time()
181+
182+
with open('manifest.xml', 'r') as f:
183+
manifest_content = f.read()
184+
185+
root = ET.fromstring(manifest_content)
186+
top_dir = os.getcwd()
187+
188+
remotes = {r.get('name'): r.get('fetch').rstrip('/') for r in root.findall('remote')}
189+
default = root.find('default')
190+
def_remote = default.get('remote') if default is not None else None
191+
def_rev = default.get('revision') if default is not None else None
192+
193+
sync_tasks = []
194+
post_process_data = []
195+
196+
for project in root.findall('project'):
197+
name = project.get('name')
198+
path = project.get('path', name)
199+
remote_name = project.get('remote', def_remote)
200+
rev = project.get('revision', def_rev)
201+
base_url = remotes.get(remote_name)
202+
203+
if not base_url: continue
204+
205+
if "github.com" in base_url:
206+
url = f"{base_url}/{name}/archive/{rev}.tar.gz"
207+
strip = "--strip-components=1"
208+
elif "googlesource.com" in base_url:
209+
url = f"{base_url}/{name}/+archive/{rev}.tar.gz"
210+
strip = ""
211+
elif "git.codelinaro.org" in base_url:
212+
url = f"{base_url}/{name}/-/archive/{rev}.tar.gz"
213+
strip = "--strip-components=1"
214+
else:
215+
continue
216+
217+
sync_tasks.append((name, path, url, strip, rev))
218+
219+
for child in project:
220+
if child.tag in ['linkfile', 'copyfile']:
221+
post_process_data.append((path, child))
222+
223+
if DEBUG:
224+
print(f"Starting parallel sync of {len(sync_tasks)} projects...")
225+
with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
226+
success_list = list(executor.map(sync_project, sync_tasks))
227+
228+
if not all(success_list):
229+
print("::error::One or more projects failed to sync!")
230+
print("::endgroup::")
231+
exit(1)
232+
233+
print("Processing linkfiles and copyfiles...")
234+
for path, child in post_process_data:
235+
src_rel = child.get('src')
236+
dest_rel = child.get('dest')
237+
if not src_rel or not dest_rel: continue
238+
239+
src_path = os.path.join(top_dir, path, src_rel)
240+
dest_path = os.path.join(top_dir, dest_rel)
241+
os.makedirs(os.path.dirname(dest_path), exist_ok=True)
242+
243+
if child.tag == 'linkfile':
244+
if os.path.lexists(dest_path): os.remove(dest_path)
245+
rel_target = os.path.relpath(src_path, os.path.dirname(dest_path))
246+
os.symlink(rel_target, dest_path)
247+
print(f" [Link] {dest_rel} -> {src_rel}")
248+
elif child.tag == 'copyfile':
249+
shutil.copy2(src_path, dest_path)
250+
print(f" [Copy] {dest_rel} from {src_rel}")
251+
252+
253+
total_duration = time.time() - global_start
254+
minutes = int(total_duration // 60)
255+
seconds = total_duration % 60
256+
print(f"Kernel Sync completed in {minutes}m {seconds:.2f}s")
257+
print("::endgroup::")

0 commit comments

Comments
 (0)