Skip to content

Commit cee323f

Browse files
committed
webui/kpm: write parallelly in uploadFile, try larger chunk size
1 parent 6977a7c commit cee323f

1 file changed

Lines changed: 55 additions & 16 deletions

File tree

webui/page/kpm.js

Lines changed: 55 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -153,36 +153,75 @@ async function renderKpmList() {
153153
}
154154

155155
async function uploadFile(file, targetPath, onProgress, signal) {
156-
const CHUNK_SIZE = 96 * 1024; // 96KB chunks
157-
let offset = 0;
156+
const CHUNK_SIZE = 256 * 1024; // 256KB chunks
157+
const totalChunks = Math.ceil(file.size / CHUNK_SIZE);
158+
const CONCURRENCY = 8;
158159

159-
await exec(`mkdir -p "$(dirname "${targetPath}")" && : > "${targetPath}"`);
160+
await exec(`mkdir -p "$(dirname "${targetPath}")"`);
161+
162+
let uploadedBytes = 0;
163+
let nextChunkIdx = 0;
164+
165+
const processChunk = async (index) => {
166+
if (signal?.aborted) return;
167+
168+
const start = index * CHUNK_SIZE;
169+
const end = Math.min(start + CHUNK_SIZE, file.size);
170+
const chunk = file.slice(start, end);
160171

161-
while (offset < file.size) {
162-
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError');
163-
const chunk = file.slice(offset, offset + CHUNK_SIZE);
164172
const base64 = await new Promise((resolve, reject) => {
165173
const reader = new FileReader();
166174
reader.onload = () => resolve(reader.result.split(',')[1]);
167175
reader.onerror = reject;
168176
reader.readAsDataURL(chunk);
169177
});
170178

171-
const result = await new Promise((resolve, reject) => {
172-
const child = spawn(`echo '${base64}' | base64 -d >> "${targetPath}"`);
173-
child.on('exit', (code) => {
174-
if (code === 0) resolve({ errno: 0 });
175-
else resolve({ errno: code, stderr: `Exit code ${code}` });
176-
});
177-
child.on('error', (err) => reject(err));
179+
const partPath = `${targetPath}.part${index.toString().padStart(8, '0')}`;
180+
const result = await new Promise((resolve) => {
181+
const child = spawn(`echo '${base64}' | base64 -d > "${partPath}"`);
182+
child.on('exit', (code) => resolve({ errno: code }));
178183
});
179184

180185
if (result.errno !== 0) {
181-
throw new Error(result.stderr || 'Write error');
186+
throw new Error(`Write error at chunk ${index}`);
187+
}
188+
189+
uploadedBytes += (end - start);
190+
if (onProgress) {
191+
onProgress(uploadedBytes / file.size);
192+
}
193+
};
194+
195+
try {
196+
const workers = [];
197+
for (let i = 0; i < Math.min(CONCURRENCY, totalChunks); i++) {
198+
workers.push((async () => {
199+
while (nextChunkIdx < totalChunks && !signal?.aborted) {
200+
const index = nextChunkIdx++;
201+
await processChunk(index);
202+
}
203+
})());
182204
}
183205

184-
offset += CHUNK_SIZE;
185-
if (onProgress) onProgress(Math.min(offset, file.size) / file.size);
206+
await Promise.all(workers);
207+
208+
if (signal?.aborted) throw new DOMException('Aborted', 'AbortError');
209+
210+
if (totalChunks === 0) {
211+
await exec(`: > "${targetPath}"`);
212+
return;
213+
}
214+
215+
const combineResult = await new Promise((resolve) => {
216+
const child = spawn(`cat "${targetPath}.part"* > "${targetPath}" && rm -f "${targetPath}.part"*`);
217+
child.on('exit', (code) => resolve({ errno: code }));
218+
});
219+
if (combineResult.errno !== 0) {
220+
throw new Error('Merge error');
221+
}
222+
} catch (err) {
223+
await exec(`rm -f "${targetPath}.part"*`);
224+
throw err;
186225
}
187226
}
188227

0 commit comments

Comments
 (0)