forked from fatalcoder524/OnePlus-Remote-Action-Build
-
Notifications
You must be signed in to change notification settings - Fork 0
203 lines (171 loc) · 8.1 KB
/
mirror-toolchains.yml
File metadata and controls
203 lines (171 loc) · 8.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
name: Mirror static toolchains to GitHub Release
permissions:
contents: write
on:
workflow_dispatch:
workflow_call:
jobs:
prepare-release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
with:
fetch-depth: 0
- name: Configure Git Identity
run: |
git config user.email "github-actions[bot]@users.noreply.github.com"
git config user.name "github-actions[bot]"
- name: Ensure Single Release
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Check if the release already exists
TARGET_REPO="${{ github.repository }}"
TAG_NAME="toolchain-cache"
if ! git rev-parse "$TAG_NAME" >/dev/null 2>&1; then
echo "Tag not found. Creating backdated tag and release..."
GIT_COMMITTER_DATE="2015-01-01T12:00:00" git tag -a "$TAG_NAME" -m "Internal Toolchains Cache"
git push origin "$TAG_NAME"
echo "Release not found. Creating..."
gh release create "$TAG_NAME" --title "Toolchains Mirror Cache" --notes "Deduplicated Toolchains for Kernel Build" --repo "$TARGET_REPO"
else
echo "Release 'toolchain-cache' already exists. Skipping creation."
fi
generate-mirror-matrix:
runs-on: ubuntu-latest
needs: prepare-release
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
steps:
- name: Checkout Code (to access configs/)
uses: actions/checkout@v6
with:
ref: devices
sparse-checkout: |
configs/
sparse-checkout-cone-mode: false
- name: Fetch manifests from main repo
shell: bash
run: |
set -euo pipefail
curl -sSfL https://github.com/WildKernels/OnePlus_KernelSU_SUSFS/archive/refs/heads/main.tar.gz \
| tar xz --wildcards '*/manifests/*' --strip-components=1
- name: Generate Mirror Matrix
id: generate-config
shell: bash
run: |
set -euo pipefail
echo "[" > all_configs.json
mapfile -t files < <(find configs/ -name "*.json")
for i in "${!files[@]}"; do
cat "${files[$i]}" >> all_configs.json
[[ $((i+1)) -lt ${#files[@]} ]] && echo "," >> all_configs.json
done
echo "]" >> all_configs.json
- name: Resolve Unique Projects
id: set-matrix
shell: python
env:
PYTHONUNBUFFERED: "1"
run: |
import xml.etree.ElementTree as ET
import json
import os
import subprocess
import shutil
with open('all_configs.json', 'r') as f:
configs = json.load(f)
unique_toolchains = {}
TOOLCHAIN_MAP = {
"clang/host/linux-x86": "clang",
"prebuilts/rust": "rust",
"prebuilts/clang-tools": "clang-tools",
"prebuilts/build-tools": "build-tools"
}
print(f"Processing {len(configs)} configurations...")
print("-" * 50)
for cfg in configs:
op_model = cfg.get('model', 'Unknown')
op_branch = cfg.get('branch', '')
op_manifest = cfg.get('manifest', '')
op_os_version = cfg.get('os_version', '').lower()
xml_file = "temp_manifest.xml"
try:
if op_manifest.startswith("https://"):
subprocess.run(f"curl -LfsS {op_manifest} -o {xml_file}", shell=True, check=True)
elif op_branch.startswith("wild/"):
shutil.copy(f"manifests/{op_os_version}/{op_manifest}", xml_file)
else:
url = f"https://raw.githubusercontent.com/OnePlusOSS/kernel_manifest/refs/heads/{op_branch}/{op_manifest}"
subprocess.run(f"curl -LfsS {url} -o {xml_file}", shell=True, check=True)
root = ET.parse(xml_file).getroot()
remotes = {r.get('name'): r.get('fetch').rstrip('/') for r in root.findall('remote')}
default = root.find('default')
def_remote = default.get('remote')
def_rev = default.get('revision')
for project in root.findall('project'):
name = project.get('name')
type_label = None
for repo_key, label in TOOLCHAIN_MAP.items():
if repo_key in name:
type_label = label
break
if type_label:
remote_name = project.get('remote', def_remote)
rev = project.get('revision', def_rev)
base_url = remotes.get(remote_name, "")
cache_filename = f"{type_label}-{rev}.tar.gz"
if cache_filename not in unique_toolchains and ("git.codelinaro.org" in base_url or "googlesource.com" in base_url):
if "googlesource.com" in base_url:
dl_url = f"{base_url}/{name}/+archive/{rev}.tar.gz"
else:
dl_url = f"{base_url}/{name}/-/archive/{rev}.tar.gz"
unique_toolchains[cache_filename] = {"rev": rev, "url": dl_url, "name": name, "type_label": type_label, "cache_file": cache_filename}
print(f"🆕 [{op_model}][{op_os_version}] -> New {type_label} found: {rev}")
else:
print(f"♻️ [{op_model}][{op_os_version}] -> Using existing {type_label}: {rev}")
except Exception as e:
print(f"⚠️ Failed to process {op_manifest}: {e}")
matrix = {"include": list(unique_toolchains.values())}
with open(os.environ['GITHUB_OUTPUT'], 'a') as f:
f.write(f"matrix={json.dumps(matrix)}\n")
print(f"✅ Found {len(unique_toolchains)} unique projects to mirror.")
mirror-to-release:
name: build (${{ matrix.type_label }}, ${{ matrix.rev }})
needs: [prepare-release, generate-mirror-matrix]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix: ${{ fromJSON(needs.generate-mirror-matrix.outputs.matrix) }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- name: Sync & Upload
run: |
TARGET_REPO="${{ github.repository }}"
REV="${{ matrix.rev }}"
FILENAME="${{ matrix.cache_file }}"
BASE_URL="https://github.com/${TARGET_REPO}/releases/download/toolchain-cache"
echo "🔍 Checking if $FILENAME exists in mirror..."
STATUS=$(curl -IsL -o /dev/null -w "%{http_code}" "$BASE_URL/$FILENAME")
PART_STATUS=$(curl -IsL -o /dev/null -w "%{http_code}" "$BASE_URL/${FILENAME}.partaa")
if [ "$STATUS" -eq 200 ] || [ "$PART_STATUS" -eq 200 ]; then
echo "✅ ${{ matrix.type_label }} revision ${{ matrix.rev }} already cached."
else
echo "📥 Not found. Downloading ${{ matrix.type_label }} from source..."
aria2c -x16 -s16 -k1M -j5 --file-allocation=none --console-log-level=error --summary-interval=0 --retry-wait=5 --max-tries=10 -o "$FILENAME" "${{ matrix.url }}"
FILE_SIZE=$(stat -c%s "$FILENAME")
MAX_SIZE=2000000000 # ~1.86GB
if [ "$FILE_SIZE" -gt "$MAX_SIZE" ]; then
echo "⚠️ File > 2GB. Splitting..."
split -b 1500M -a 2 "$FILENAME" "${FILENAME}.part"
for part in "${FILENAME}".part*; do
echo "📤 Uploading $part..."
gh release upload "toolchain-cache" "$part" --clobber --repo "$TARGET_REPO"
done
else
echo "📤 Uploading single file..."
gh release upload "toolchain-cache" "$FILENAME" --clobber --repo "$TARGET_REPO"
fi
fi