Merge improved upload method with parallel requests

This commit is contained in:
David Montesinos 2025-02-06 11:38:07 +01:00
commit 1ad8d7918b
5 changed files with 129 additions and 179 deletions

View File

@ -121,9 +121,6 @@
private MessageType _messagetype; private MessageType _messagetype;
private bool _uploading = false; private bool _uploading = false;
private int _maxChunkSizeMB = 1;
private int _maxConcurrentUploads = 0;
[Parameter] [Parameter]
public string Id { get; set; } // optional - for setting the id of the FileManager component for accessibility public string Id { get; set; } // optional - for setting the id of the FileManager component for accessibility
@ -160,6 +157,9 @@
[Parameter] [Parameter]
public bool UploadMultiple { get; set; } = false; // optional - enable multiple file uploads - default false public bool UploadMultiple { get; set; } = false; // optional - enable multiple file uploads - default false
[Parameter]
public int ChunkSize { get; set; } = 1; // optional - size of file chunks to upload in MB
[Parameter] [Parameter]
public EventCallback<int> OnUpload { get; set; } // optional - executes a method in the calling component when a file is uploaded public EventCallback<int> OnUpload { get; set; } // optional - executes a method in the calling component when a file is uploaded
@ -176,9 +176,6 @@
_fileinputid = "FileInput_" + _guid; _fileinputid = "FileInput_" + _guid;
_progressinfoid = "ProgressInfo_" + _guid; _progressinfoid = "ProgressInfo_" + _guid;
_progressbarid = "ProgressBar_" + _guid; _progressbarid = "ProgressBar_" + _guid;
int.TryParse(SettingService.GetSetting(PageState.Site.Settings, "MaxChunkSizeMB", "1"), out _maxChunkSizeMB);
int.TryParse(SettingService.GetSetting(PageState.Site.Settings, "MaxConcurrentChunkUploads", "0"), out _maxConcurrentUploads);
} }
protected override async Task OnParametersSetAsync() protected override async Task OnParametersSetAsync()
@ -389,51 +386,8 @@
StateHasChanged(); StateHasChanged();
} }
await interop.UploadFiles(posturl, folder, _guid, SiteState.AntiForgeryToken, jwt, _maxChunkSizeMB, _maxConcurrentUploads); // upload files
var success = await interop.UploadFiles(posturl, folder, _guid, SiteState.AntiForgeryToken, jwt, ChunkSize);
// uploading is asynchronous so we need to poll to determine if uploads are completed
var success = true;
int upload = 0;
while (upload < uploads.Length && success)
{
success = false;
var filename = uploads[upload].Split(':')[0];
var size = Int64.Parse(uploads[upload].Split(':')[1]); // bytes
var megabits = (size / 1048576.0) * 8; // binary conversion
var uploadspeed = (PageState.Alias.Name.Contains("localhost")) ? 100 : 3; // 3 Mbps is FCC minimum for broadband upload
var uploadtime = (megabits / uploadspeed); // seconds
var maxattempts = 5; // polling (minimum timeout duration will be 5 seconds)
var sleep = (int)Math.Ceiling(uploadtime / maxattempts) * 1000; // milliseconds
int attempts = 0;
while (attempts < maxattempts && !success)
{
attempts += 1;
Thread.Sleep(sleep);
if (Folder == Constants.PackagesFolder)
{
var files = await FileService.GetFilesAsync(folder);
if (files != null && files.Any(item => item.Name == filename))
{
success = true;
}
}
else
{
var file = await FileService.GetFileAsync(int.Parse(folder), filename);
if (file != null)
{
success = true;
}
}
}
if (success)
{
upload++;
}
}
// reset progress indicators // reset progress indicators
if (ShowProgress) if (ShowProgress)
@ -458,7 +412,7 @@
} }
else else
{ {
await logger.LogInformation("File Upload Failed Or Is Still In Progress {Files}", uploads); await logger.LogInformation("File Upload Failed {Files}", uploads);
_message = Localizer["Error.File.Upload"]; _message = Localizer["Error.File.Upload"];
_messagetype = MessageType.Error; _messagetype = MessageType.Error;
} }

View File

@ -127,7 +127,7 @@
<value>Error Loading Files</value> <value>Error Loading Files</value>
</data> </data>
<data name="Error.File.Upload" xml:space="preserve"> <data name="Error.File.Upload" xml:space="preserve">
<value>File Upload Failed Or Is Still In Progress</value> <value>File Upload Failed</value>
</data> </data>
<data name="Message.File.NotSelected" xml:space="preserve"> <data name="Message.File.NotSelected" xml:space="preserve">
<value>You Have Not Selected A File To Upload</value> <value>You Have Not Selected A File To Upload</value>

View File

@ -209,24 +209,23 @@ namespace Oqtane.UI
} }
} }
[Obsolete("This function is deprecated. Use UploadFiles with MaxChunkSize and MaxConcurrentUploads parameters instead.", false)]
public Task UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt) public Task UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt)
{ {
return UploadFiles(posturl, folder, id, antiforgerytoken, jwt, 1, 0); UploadFiles(posturl, folder, id, antiforgerytoken, jwt, 1);
return Task.CompletedTask;
} }
public Task UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt, int maxChunkSizeMB, int maxConcurrentUploads) public ValueTask<bool> UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt, int chunksize)
{ {
try try
{ {
_jsRuntime.InvokeVoidAsync( return _jsRuntime.InvokeAsync<bool>(
"Oqtane.Interop.uploadFiles", "Oqtane.Interop.uploadFiles",
posturl, folder, id, antiforgerytoken, jwt, maxChunkSizeMB, maxConcurrentUploads); posturl, folder, id, antiforgerytoken, jwt, chunksize);
return Task.CompletedTask;
} }
catch catch
{ {
return Task.CompletedTask; return new ValueTask<bool>(Task.FromResult(false));
} }
} }

View File

@ -21,6 +21,7 @@ using System.Net.Http;
using Microsoft.AspNetCore.Cors; using Microsoft.AspNetCore.Cors;
using System.IO.Compression; using System.IO.Compression;
using Oqtane.Services; using Oqtane.Services;
using Microsoft.Extensions.Primitives;
// ReSharper disable StringIndexOfIsCultureSpecific.1 // ReSharper disable StringIndexOfIsCultureSpecific.1
@ -427,7 +428,7 @@ namespace Oqtane.Controllers
// POST api/<controller>/upload // POST api/<controller>/upload
[EnableCors(Constants.MauiCorsPolicy)] [EnableCors(Constants.MauiCorsPolicy)]
[HttpPost("upload")] [HttpPost("upload")]
public async Task<IActionResult> UploadFile(string folder, IFormFile formfile) public async Task<IActionResult> UploadFile([FromForm] string folder, IFormFile formfile)
{ {
if (formfile == null || formfile.Length <= 0) if (formfile == null || formfile.Length <= 0)
{ {
@ -435,13 +436,20 @@ namespace Oqtane.Controllers
} }
// ensure filename is valid // ensure filename is valid
string token = ".part_"; if (!formfile.FileName.IsPathOrFileValid() || !HasValidFileExtension(formfile.FileName))
if (!formfile.FileName.IsPathOrFileValid() || !formfile.FileName.Contains(token) || !HasValidFileExtension(formfile.FileName.Substring(0, formfile.FileName.IndexOf(token))))
{ {
_logger.Log(LogLevel.Error, this, LogFunction.Security, "File Name Is Invalid Or Contains Invalid Extension {File}", formfile.FileName); _logger.Log(LogLevel.Error, this, LogFunction.Security, "File Name Is Invalid Or Contains Invalid Extension {File}", formfile.FileName);
return NoContent(); return NoContent();
} }
// ensure headers exist
if (!Request.Headers.TryGetValue("PartCount", out StringValues partCount) || !Request.Headers.TryGetValue("TotalParts", out StringValues totalParts))
{
_logger.Log(LogLevel.Error, this, LogFunction.Security, "File Upload Request Is Missing Required Headers");
return NoContent();
}
string fileName = formfile.FileName + ".part_" + int.Parse(partCount).ToString("000") + "_" + int.Parse(totalParts).ToString("000");
string folderPath = ""; string folderPath = "";
int FolderId; int FolderId;
@ -465,12 +473,12 @@ namespace Oqtane.Controllers
if (!string.IsNullOrEmpty(folderPath)) if (!string.IsNullOrEmpty(folderPath))
{ {
CreateDirectory(folderPath); CreateDirectory(folderPath);
using (var stream = new FileStream(Path.Combine(folderPath, formfile.FileName), FileMode.Create)) using (var stream = new FileStream(Path.Combine(folderPath, fileName), FileMode.Create))
{ {
await formfile.CopyToAsync(stream); await formfile.CopyToAsync(stream);
} }
string upload = await MergeFile(folderPath, formfile.FileName); string upload = await MergeFile(folderPath, fileName);
if (upload != "" && FolderId != -1) if (upload != "" && FolderId != -1)
{ {
var file = CreateFile(upload, FolderId, Path.Combine(folderPath, upload)); var file = CreateFile(upload, FolderId, Path.Combine(folderPath, upload));

View File

@ -308,139 +308,128 @@ Oqtane.Interop = {
} }
return files; return files;
}, },
uploadFiles: function (posturl, folder, id, antiforgerytoken, jwt, maxChunkSizeMB, maxConcurrentUploads) { uploadFiles: async function (posturl, folder, id, antiforgerytoken, jwt, chunksize) {
var success = true;
var fileinput = document.getElementById('FileInput_' + id); var fileinput = document.getElementById('FileInput_' + id);
var progressinfo = document.getElementById('ProgressInfo_' + id); var progressinfo = document.getElementById('ProgressInfo_' + id);
var progressbar = document.getElementById('ProgressBar_' + id); var progressbar = document.getElementById('ProgressBar_' + id);
var totalSize = 0;
for (var i = 0; i < fileinput.files.length; i++) {
totalSize += fileinput.files[i].size;
}
let uploadSize = 0;
if (!chunksize || chunksize < 1) {
chunksize = 1; // 1 MB default
}
let maxConcurrentUploads = 5;
if (progressinfo !== null && progressbar !== null) { if (progressinfo !== null && progressbar !== null) {
progressinfo.setAttribute("style", "display: inline;"); progressinfo.setAttribute('style', 'display: inline;');
progressinfo.innerHTML = ''; if (fileinput.files.length > 1) {
progressbar.setAttribute("style", "width: 100%; display: inline;"); progressinfo.innerHTML = fileinput.files[0].name + ', ...';
}
else {
progressinfo.innerHTML = fileinput.files[0].name;
}
progressbar.setAttribute('style', 'width: 100%; display: inline;');
progressbar.value = 0; progressbar.value = 0;
} }
var files = fileinput.files; const uploadFiles = Array.from(fileinput.files).map(file => {
var totalSize = 0; const uploadFile = () => {
for (var i = 0; i < files.length; i++) { const chunkSize = chunksize * (1024 * 1024);
totalSize = totalSize + files[i].size; const totalParts = Math.ceil(file.size / chunkSize);
} let partCount = 0;
maxChunkSizeMB = Math.ceil(maxChunkSizeMB); let activeUploads = 0;
if (maxChunkSizeMB < 1) {
maxChunkSizeMB = 1;
}
else if (maxChunkSizeMB > 50) {
maxChunkSizeMB = 50;
}
var bufferChunkSize = maxChunkSizeMB * (1024 * 1024); const uploadPart = (partCount) => {
var uploadedSize = 0; const start = partCount * chunkSize;
const end = Math.min(start + chunkSize, file.size);
const chunk = file.slice(start, end);
maxConcurrentUploads = Math.ceil(maxConcurrentUploads); return new Promise((resolve, reject) => {
var hasConcurrencyLimit = maxConcurrentUploads > 0; let formdata = new FormData();
var uploadQueue = []; formdata.append('__RequestVerificationToken', antiforgerytoken);
var activeUploads = 0; formdata.append('folder', folder);
formdata.append('formfile', chunk, file.name);
for (var i = 0; i < files.length; i++) { var credentials = 'same-origin';
var fileChunk = []; var headers = new Headers();
var file = files[i]; headers.append('PartCount', partCount + 1);
var fileStreamPos = 0; headers.append('TotalParts', totalParts);
var endPos = bufferChunkSize;
while (fileStreamPos < file.size) {
fileChunk.push(file.slice(fileStreamPos, endPos));
fileStreamPos = endPos;
endPos = fileStreamPos + bufferChunkSize;
}
var totalParts = fileChunk.length;
var partCount = 0;
while (chunk = fileChunk.shift()) {
partCount++;
var fileName = file.name + ".part_" + partCount.toString().padStart(3, '0') + "_" + totalParts.toString().padStart(3, '0');
var data = new FormData();
data.append('__RequestVerificationToken', antiforgerytoken);
data.append('folder', folder);
data.append('formfile', chunk, fileName);
var request = new XMLHttpRequest();
request.open('POST', posturl, true);
if (jwt !== "") { if (jwt !== "") {
request.setRequestHeader('Authorization', 'Bearer ' + jwt); headers.append('Authorization', 'Bearer ' + jwt);
request.withCredentials = true; credentials = 'include';
} }
request.upload.onloadstart = function (e) {
if (progressinfo !== null && progressbar !== null && progressinfo.innerHTML === '') { return fetch(posturl, {
if (files.length === 1) { method: 'POST',
progressinfo.innerHTML = file.name; headers: headers,
credentials: credentials,
body: formdata
})
.then(response => {
if (!response.ok) {
if (progressinfo !== null) {
progressinfo.innerHTML = 'Error: ' + response.statusText;
} }
else { throw new Error('Failed');
progressinfo.innerHTML = file.name + ", ...";
} }
} return;
}; })
request.upload.onprogress = function (e) { .then(data => {
if (progressinfo !== null && progressbar !== null) { if (progressbar !== null) {
var percent = Math.ceil(((uploadedSize + e.loaded) / totalSize) * 100); uploadSize += chunk.size;
var percent = Math.ceil((uploadSize / totalSize) * 100);
progressbar.value = (percent / 100); progressbar.value = (percent / 100);
} }
}; resolve(data);
request.upload.onloadend = function (e) { })
if (hasConcurrencyLimit) { .catch(error => {
activeUploads--; reject(error);
processUploads(); });
} });
if (progressinfo !== null && progressbar !== null) {
uploadedSize = uploadedSize + e.total;
var percent = Math.ceil((uploadedSize / totalSize) * 100);
progressbar.value = (percent / 100);
}
};
request.upload.onerror = function () {
if (hasConcurrencyLimit) {
activeUploads--;
processUploads();
}
if (progressinfo !== null && progressbar !== null) {
if (files.length === 1) {
progressinfo.innerHTML = file.name + ' Error: ' + request.statusText;
}
else {
progressinfo.innerHTML = ' Error: ' + request.statusText;
}
}
}; };
if (hasConcurrencyLimit) { return new Promise((resolve, reject) => {
uploadQueue.push({ data, request }); function processNextUpload() {
processUploads(); if (partCount >= totalParts) {
} if (activeUploads === 0) resolve(); // Done uploading all parts
else {
request.send(data);
}
}
if (i === files.length - 1) {
fileinput.value = '';
}
}
function processUploads() {
if (uploadQueue.length === 0 || activeUploads >= maxConcurrentUploads) {
return; return;
} }
while (activeUploads < maxConcurrentUploads && uploadQueue.length > 0) { while (activeUploads < maxConcurrentUploads && partCount < totalParts) {
activeUploads++; uploadPart(partCount)
.then(() => {
activeUploads--;
processNextUpload();
})
.catch(reject);
let { data, request } = uploadQueue.shift(); activeUploads++;
request.send(data); partCount++;
} }
} }
processNextUpload();
});
};
return uploadFile();
});
try {
await Promise.all(uploadFiles);
} catch (error) {
success = false;
}
fileinput.value = '';
return success;
}, },
refreshBrowser: function (verify, wait) { refreshBrowser: function (verify, wait) {
async function attemptReload (verify) { async function attemptReload (verify) {