|
| 1 | +name: 'Download and configure Kernel Source code' |
| 2 | + |
| 3 | +inputs: |
| 4 | + source_location: |
| 5 | + description: 'Folder path to save Kernel source' |
| 6 | + required: true |
| 7 | + type: string |
| 8 | + github_token: |
| 9 | + description: 'GitHub Token' |
| 10 | + required: true |
| 11 | + debug: |
| 12 | + description: 'Enable Logs' |
| 13 | + required: false |
| 14 | + type: boolean |
| 15 | + default: false |
| 16 | + |
| 17 | +runs: |
| 18 | + using: 'composite' |
| 19 | + steps: |
| 20 | + - name: Download and Prepare Manifest |
| 21 | + shell: bash |
| 22 | + working-directory: ${{ inputs.source_location }} |
| 23 | + run: | |
| 24 | + # Download and Prepare Manifest |
| 25 | + if [[ "$OP_MANIFEST" == https://* ]]; then |
| 26 | + curl --fail --show-error --location --proto '=https' "$OP_MANIFEST" -o manifest.xml |
| 27 | + elif [[ "$OP_BRANCH" == wild/* ]]; then |
| 28 | + cp "../manifests/$(echo "$OP_OS_VERSION" | tr '[:upper:]' '[:lower:]')/$OP_MANIFEST" manifest.xml |
| 29 | + else |
| 30 | + curl --fail --show-error --location --proto '=https' "https://raw.githubusercontent.com/OnePlusOSS/kernel_manifest/refs/heads/$OP_BRANCH/$OP_MANIFEST" -o manifest.xml |
| 31 | + fi |
| 32 | +
|
| 33 | + - name: Download Manifest Archives |
| 34 | + shell: python |
| 35 | + env: |
| 36 | + PYTHONUNBUFFERED: "1" |
| 37 | + GITHUB_TOKEN: ${{ inputs.github_token }} |
| 38 | + DEBUG: ${{ inputs.debug }} |
| 39 | + working-directory: ${{ inputs.source_location }} |
| 40 | + run: | |
| 41 | + # Download Manifest Archives |
| 42 | + import xml.etree.ElementTree as ET |
| 43 | + import subprocess |
| 44 | + import os, shutil |
| 45 | + import time |
| 46 | + import glob |
| 47 | + from concurrent.futures import ThreadPoolExecutor |
| 48 | + import requests |
| 49 | + |
| 50 | + MAX_WORKERS = (os.cpu_count() or 2) * 4 |
| 51 | + NPROC = int(subprocess.check_output("nproc", shell=True).strip()) |
| 52 | + TARGET_REPO = "${{ github.repository }}" |
| 53 | + TOOLCHAIN_KEYWORDS = ["clang", "rust"] |
| 54 | + DEBUG = os.environ.get("DEBUG", "false").lower() == "true" |
| 55 | + aria_quiet_flags = "--quiet --summary-interval=0" if not DEBUG else "" |
| 56 | + |
| 57 | + print("::group::Download Manifest Archives") |
| 58 | + |
| 59 | + def get_release_parts(label, rev): |
| 60 | + url = f"https://api.github.com/repos/{TARGET_REPO}/releases" |
| 61 | + headers = { |
| 62 | + "Authorization": f"token {os.environ['GITHUB_TOKEN']}", |
| 63 | + "Accept": "application/vnd.github.v3+json" |
| 64 | + } |
| 65 | + |
| 66 | + for attempt in range(3): |
| 67 | + try: |
| 68 | + response = requests.get(url, headers=headers) |
| 69 | + response.raise_for_status() |
| 70 | + releases = response.json() |
| 71 | + release = next((r for r in releases if r['name'] == "Toolchains Mirror Cache" or r['tag_name'] == "toolchain-cache"), None) |
| 72 | + if not release: return [] |
| 73 | + |
| 74 | + prefix = f"{label}-{rev}.tar.gz" |
| 75 | + return [(a['name'], a['url']) for a in release['assets'] if a['name'].startswith(prefix)] |
| 76 | + except Exception as e: |
| 77 | + print(f" [RETRY {attempt+1}] API failed: {e}") |
| 78 | + time.sleep(2) |
| 79 | + return [] |
| 80 | + |
| 81 | + def sync_project(task): |
| 82 | + name, path, url, strip, rev = task |
| 83 | + if path not in ["./", "."]: |
| 84 | + os.makedirs(path, exist_ok=True) |
| 85 | + |
| 86 | + headers = ( |
| 87 | + "-H 'User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' " |
| 88 | + "-H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8' " |
| 89 | + "-H 'Accept-Encoding: gzip, deflate, br' " |
| 90 | + "-H 'Connection: keep-alive' " |
| 91 | + "--tcp-fastopen" |
| 92 | + ) |
| 93 | + |
| 94 | + print(f"Syncing: {name} -> {path}") |
| 95 | + start_time = time.time() |
| 96 | + print(f" [PENDING] {name}") |
| 97 | + try: |
| 98 | + match = next((x for x in TOOLCHAIN_KEYWORDS if x in name.lower()), None) |
| 99 | + if match: |
| 100 | + label = match |
| 101 | + base_filename = f"{label}-{rev}.tar.gz" |
| 102 | + |
| 103 | + asset_data = get_release_parts(label, rev) |
| 104 | + |
| 105 | + if not asset_data: |
| 106 | + print(f" [ERROR] No assets found for {base_filename} in release!") |
| 107 | + return False |
| 108 | + |
| 109 | + if DEBUG: |
| 110 | + print(f" [CACHE] Fetching {label} toolchain: {base_filename}...") |
| 111 | + |
| 112 | + for asset_name, api_url in asset_data: |
| 113 | + aria_cmd = ( |
| 114 | + f"aria2c -x16 -s16 -k1M --file-allocation=none {aria_quiet_flags} " |
| 115 | + f"--header='Authorization: token {os.environ['GITHUB_TOKEN']}' " |
| 116 | + f"--header='Accept: application/octet-stream' " |
| 117 | + f"-o {asset_name} {api_url}" |
| 118 | + ) |
| 119 | + subprocess.run(aria_cmd, shell=True, check=True, capture_output=not DEBUG) |
| 120 | + |
| 121 | + parts = sorted(glob.glob(f"{base_filename}.part*")) |
| 122 | + if parts: |
| 123 | + if DEBUG: |
| 124 | + print(f" [MERGE] Combining {len(parts)} parts for {rev}...") |
| 125 | + subprocess.run(f"cat {base_filename}.part* | tar -I 'pigz -p {NPROC} -b 256' -x --record-size=1M --no-same-owner --no-same-permissions -C {path} {strip}", shell=True, check=True) |
| 126 | + subprocess.run(f"rm {base_filename}.part*", shell=True, check=True) |
| 127 | + else: |
| 128 | + if os.path.exists(base_filename): |
| 129 | + if DEBUG: |
| 130 | + print(f" [EXTRACT] Single file detected...") |
| 131 | + subprocess.run(f"tar -I 'pigz -p {NPROC} -b 256' -x --record-size=1M --no-same-owner --no-same-permissions -f {base_filename} -C {path} {strip}", shell=True, check=True) |
| 132 | + os.remove(base_filename) |
| 133 | + else: |
| 134 | + print(f" [ERROR] {base_filename} missing after download!") |
| 135 | + return False |
| 136 | + else: |
| 137 | + cmd = f"curl -LfsS {headers} --retry 5 --connect-timeout 30 '{url}' | tar -I 'pigz -p {NPROC} -b 256' -x --record-size=1M -C {path} {strip}" |
| 138 | + subprocess.run(cmd, shell=True, check=True) |
| 139 | + |
| 140 | + duration = time.time() - start_time |
| 141 | + print(f"Synced {name} successfully! ({duration:.2f}s)") |
| 142 | + return True |
| 143 | + except subprocess.CalledProcessError as e: |
| 144 | + print(f" [ERROR] Command failed for {name}: {e.cmd}") |
| 145 | + print(f" Stderr: {e.stderr.decode() if e.stderr else 'No stderr'}") |
| 146 | + return False |
| 147 | + except Exception as e: |
| 148 | + print(f" [ERROR] Failed to sync {name}") |
| 149 | + return False |
| 150 | + |
| 151 | + global_start = time.time() |
| 152 | + |
| 153 | + with open('manifest.xml', 'r') as f: |
| 154 | + manifest_content = f.read() |
| 155 | + |
| 156 | + root = ET.fromstring(manifest_content) |
| 157 | + top_dir = os.getcwd() |
| 158 | + |
| 159 | + remotes = {r.get('name'): r.get('fetch').rstrip('/') for r in root.findall('remote')} |
| 160 | + default = root.find('default') |
| 161 | + def_remote = default.get('remote') if default is not None else None |
| 162 | + def_rev = default.get('revision') if default is not None else None |
| 163 | + |
| 164 | + sync_tasks = [] |
| 165 | + post_process_data = [] |
| 166 | + |
| 167 | + for project in root.findall('project'): |
| 168 | + name = project.get('name') |
| 169 | + path = project.get('path', name) |
| 170 | + remote_name = project.get('remote', def_remote) |
| 171 | + rev = project.get('revision', def_rev) |
| 172 | + base_url = remotes.get(remote_name) |
| 173 | + |
| 174 | + if not base_url: continue |
| 175 | + |
| 176 | + if "github.com" in base_url: |
| 177 | + url = f"{base_url}/{name}/archive/{rev}.tar.gz" |
| 178 | + strip = "--strip-components=1" |
| 179 | + elif "googlesource.com" in base_url: |
| 180 | + url = f"{base_url}/{name}/+archive/{rev}.tar.gz" |
| 181 | + strip = "" |
| 182 | + elif "git.codelinaro.org" in base_url: |
| 183 | + url = f"{base_url}/{name}/-/archive/{rev}.tar.gz" |
| 184 | + strip = "--strip-components=1" |
| 185 | + else: |
| 186 | + continue |
| 187 | + |
| 188 | + sync_tasks.append((name, path, url, strip, rev)) |
| 189 | + |
| 190 | + for child in project: |
| 191 | + if child.tag in ['linkfile', 'copyfile']: |
| 192 | + post_process_data.append((path, child)) |
| 193 | + |
| 194 | + if DEBUG: |
| 195 | + print(f"Starting parallel sync of {len(sync_tasks)} projects...") |
| 196 | + with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor: |
| 197 | + success_list = list(executor.map(sync_project, sync_tasks)) |
| 198 | + |
| 199 | + if not all(success_list): |
| 200 | + print("::error::One or more projects failed to sync!") |
| 201 | + print("::endgroup::") |
| 202 | + exit(1) |
| 203 | + |
| 204 | + print("Processing linkfiles and copyfiles...") |
| 205 | + for path, child in post_process_data: |
| 206 | + src_rel = child.get('src') |
| 207 | + dest_rel = child.get('dest') |
| 208 | + if not src_rel or not dest_rel: continue |
| 209 | + |
| 210 | + src_path = os.path.join(top_dir, path, src_rel) |
| 211 | + dest_path = os.path.join(top_dir, dest_rel) |
| 212 | + os.makedirs(os.path.dirname(dest_path), exist_ok=True) |
| 213 | + |
| 214 | + if child.tag == 'linkfile': |
| 215 | + if os.path.lexists(dest_path): os.remove(dest_path) |
| 216 | + rel_target = os.path.relpath(src_path, os.path.dirname(dest_path)) |
| 217 | + os.symlink(rel_target, dest_path) |
| 218 | + print(f" [Link] {dest_rel} -> {src_rel}") |
| 219 | + elif child.tag == 'copyfile': |
| 220 | + shutil.copy2(src_path, dest_path) |
| 221 | + print(f" [Copy] {dest_rel} from {src_rel}") |
| 222 | + |
| 223 | + |
| 224 | + total_duration = time.time() - global_start |
| 225 | + minutes = int(total_duration // 60) |
| 226 | + seconds = total_duration % 60 |
| 227 | + print(f"Kernel Sync completed in {minutes}m {seconds:.2f}s") |
| 228 | + print("::endgroup::") |
0 commit comments