fix #5058: ensure sequential file and chunk uploads to avoid overload

This commit is contained in:
David Montesinos 2025-02-06 19:21:51 +01:00
parent e173815810
commit 1a7656d8ee

View File

@ -320,7 +320,7 @@ Oqtane.Interop = {
} }
let uploadSize = 0; let uploadSize = 0;
if (!chunksize) { if (!chunksize || chunksize < 1) {
chunksize = 1; // 1 MB default chunksize = 1; // 1 MB default
} }
@ -336,75 +336,73 @@ Oqtane.Interop = {
progressbar.value = 0; progressbar.value = 0;
} }
const uploadFiles = Array.from(fileinput.files).map(file => { const uploadFile = (file) => {
const uploadFile = () => { const chunkSize = chunksize * (1024 * 1024);
const chunkSize = chunksize * (1024 * 1024); const totalParts = Math.ceil(file.size / chunkSize);
const totalParts = Math.ceil(file.size / chunkSize); let partCount = 0;
let partCount = 0;
const uploadPart = () => { const uploadPart = () => {
const start = partCount * chunkSize; const start = partCount * chunkSize;
const end = Math.min(start + chunkSize, file.size); const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end); const chunk = file.slice(start, end);
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
let formdata = new FormData(); let formdata = new FormData();
formdata.append('__RequestVerificationToken', antiforgerytoken); formdata.append('__RequestVerificationToken', antiforgerytoken);
formdata.append('folder', folder); formdata.append('folder', folder);
formdata.append('formfile', chunk, file.name); formdata.append('formfile', chunk, file.name);
var credentials = 'same-origin'; var credentials = 'same-origin';
var headers = new Headers(); var headers = new Headers();
headers.append('PartCount', partCount + 1); headers.append('PartCount', partCount + 1);
headers.append('TotalParts', totalParts); headers.append('TotalParts', totalParts);
if (jwt !== "") { if (jwt !== "") {
headers.append('Authorization', 'Bearer ' + jwt); headers.append('Authorization', 'Bearer ' + jwt);
credentials = 'include'; credentials = 'include';
} }
return fetch(posturl, { return fetch(posturl, {
method: 'POST', method: 'POST',
headers: headers, headers: headers,
credentials: credentials, credentials: credentials,
body: formdata body: formdata
})
.then(response => {
if (!response.ok) {
if (progressinfo !== null) {
progressinfo.innerHTML = 'Error: ' + response.statusText;
}
throw new Error('Failed');
}
return;
}) })
.then(response => { .then(data => {
if (!response.ok) { partCount++;
if (progressinfo !== null) { if (progressbar !== null) {
progressinfo.innerHTML = 'Error: ' + response.statusText; uploadSize += chunk.size;
} var percent = Math.ceil((uploadSize / totalSize) * 100);
throw new Error('Failed'); progressbar.value = (percent / 100);
} }
return; if (partCount < totalParts) {
}) uploadPart().then(resolve).catch(reject);
.then(data => { }
partCount++; else {
if (progressbar !== null) { resolve(data);
uploadSize += chunk.size; }
var percent = Math.ceil((uploadSize / totalSize) * 100); })
progressbar.value = (percent / 100); .catch(error => {
} reject(error);
if (partCount < totalParts) { });
uploadPart().then(resolve).catch(reject); });
}
else {
resolve(data);
}
})
.catch(error => {
reject(error);
});
});
};
return uploadPart();
}; };
return uploadFile(); return uploadPart();
}); };
try { try {
await Promise.all(uploadFiles); for (const file of fileinput.files) {
await uploadFile(file);
}
} catch (error) { } catch (error) {
success = false; success = false;
} }