Skip to content

Commit f6c8524

Browse files
committed
webui/kpm: write parallelly in uploadFile
1 parent 6977a7c commit f6c8524

1 file changed

Lines changed: 60 additions & 15 deletions

File tree

webui/page/kpm.js

Lines changed: 60 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -154,35 +154,80 @@ async function renderKpmList() {
154154

155155
async function uploadFile(file, targetPath, onProgress, signal) {
156156
const CHUNK_SIZE = 96 * 1024; // 96KB chunks
157-
let offset = 0;
157+
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
158+
const CONCURRENCY = 8;
158159

159-
await exec(`mkdir -p "$(dirname "${targetPath}")" && : > "${targetPath}"`);
160+
await exec(`mkdir -p "$(dirname "${targetPath}")"`);
161+
162+
let uploadedBytes = 0;
163+
let nextChunkIdx = 0;
164+
165+
const processChunk = async (index) => {
166+
if (signal?.aborted) return;
167+
168+
const start = index * CHUNK_SIZE;
169+
const end = Math.min(start + CHUNK_SIZE, file.size);
170+
const chunk = file.slice(start, end);
160171

161-
while (offset < file.size) {
162-
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError');
163-
const chunk = file.slice(offset, offset + CHUNK_SIZE);
164172
const base64 = await new Promise((resolve, reject) => {
165173
const reader = new FileReader();
166174
reader.onload = () => resolve(reader.result.split(',')[1]);
167175
reader.onerror = reject;
168176
reader.readAsDataURL(chunk);
169177
});
170178

171-
const result = await new Promise((resolve, reject) => {
172-
const child = spawn(`echo '${base64}' | base64 -d >> "${targetPath}"`);
173-
child.on('exit', (code) => {
174-
if (code === 0) resolve({ errno: 0 });
175-
else resolve({ errno: code, stderr: `Exit code ${code}` });
176-
});
177-
child.on('error', (err) => reject(err));
179+
const partPath = `${targetPath}.part${index}`;
180+
const result = await new Promise((resolve) => {
181+
const child = spawn(`echo '${base64}' | base64 -d > "${partPath}"`);
182+
child.on('exit', (code) => resolve({ errno: code }));
178183
});
179184

180185
if (result.errno !== 0) {
181-
throw new Error(result.stderr || 'Write error');
186+
throw new Error(`Write error at chunk ${index}`);
182187
}
183188

184-
offset += CHUNK_SIZE;
185-
if (onProgress) onProgress(Math.min(offset, file.size) / file.size);
189+
uploadedBytes += (end - start);
190+
if (onProgress) {
191+
onProgress(uploadedBytes / file.size);
192+
}
193+
};
194+
195+
try {
196+
const workers = [];
197+
for (let i = 0; i < Math.min(CONCURRENCY, totalChunks); i++) {
198+
workers.push((async () => {
199+
while (nextChunkIdx < totalChunks && !signal?.aborted) {
200+
const index = nextChunkIdx++;
201+
await processChunk(index);
202+
}
203+
})());
204+
}
205+
206+
await Promise.all(workers);
207+
208+
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError');
209+
210+
// Combine files
211+
await exec(`: > "${targetPath}"`);
212+
const BATCH_SIZE = 500;
213+
for (let i = 0; i < totalChunks; i += BATCH_SIZE) {
214+
const batch = [];
215+
for (let j = i; j < Math.min(i + BATCH_SIZE, totalChunks); j++) {
216+
batch.push(`"${targetPath}.part${j}"`);
217+
}
218+
const partsStr = batch.join(' ');
219+
const combineResult = await new Promise((resolve) => {
220+
const child = spawn(`cat ${partsStr} >> "${targetPath}" && rm -f ${partsStr}`);
221+
child.on('exit', (code) => resolve({ errno: code }));
222+
child.on('error', (err) => resolve({ errno: -1, stderr: err.message }));
223+
});
224+
if (combineResult.errno !== 0) {
225+
throw new Error(combineResult.stderr || 'Merge error');
226+
}
227+
}
228+
} catch (err) {
229+
await exec(`rm -f "${targetPath}.part"*`);
230+
throw err;
186231
}
187232
}
188233

0 commit comments

Comments
 (0)