Merge improved upload method with parallel requests
This commit is contained in:
commit
1ad8d7918b
|
@ -121,9 +121,6 @@
|
|||
private MessageType _messagetype;
|
||||
private bool _uploading = false;
|
||||
|
||||
private int _maxChunkSizeMB = 1;
|
||||
private int _maxConcurrentUploads = 0;
|
||||
|
||||
[Parameter]
|
||||
public string Id { get; set; } // optional - for setting the id of the FileManager component for accessibility
|
||||
|
||||
|
@ -160,6 +157,9 @@
|
|||
[Parameter]
|
||||
public bool UploadMultiple { get; set; } = false; // optional - enable multiple file uploads - default false
|
||||
|
||||
[Parameter]
|
||||
public int ChunkSize { get; set; } = 1; // optional - size of file chunks to upload in MB
|
||||
|
||||
[Parameter]
|
||||
public EventCallback<int> OnUpload { get; set; } // optional - executes a method in the calling component when a file is uploaded
|
||||
|
||||
|
@ -176,9 +176,6 @@
|
|||
_fileinputid = "FileInput_" + _guid;
|
||||
_progressinfoid = "ProgressInfo_" + _guid;
|
||||
_progressbarid = "ProgressBar_" + _guid;
|
||||
|
||||
int.TryParse(SettingService.GetSetting(PageState.Site.Settings, "MaxChunkSizeMB", "1"), out _maxChunkSizeMB);
|
||||
int.TryParse(SettingService.GetSetting(PageState.Site.Settings, "MaxConcurrentChunkUploads", "0"), out _maxConcurrentUploads);
|
||||
}
|
||||
|
||||
protected override async Task OnParametersSetAsync()
|
||||
|
@ -389,51 +386,8 @@
|
|||
StateHasChanged();
|
||||
}
|
||||
|
||||
await interop.UploadFiles(posturl, folder, _guid, SiteState.AntiForgeryToken, jwt, _maxChunkSizeMB, _maxConcurrentUploads);
|
||||
|
||||
// uploading is asynchronous so we need to poll to determine if uploads are completed
|
||||
var success = true;
|
||||
int upload = 0;
|
||||
while (upload < uploads.Length && success)
|
||||
{
|
||||
success = false;
|
||||
var filename = uploads[upload].Split(':')[0];
|
||||
|
||||
var size = Int64.Parse(uploads[upload].Split(':')[1]); // bytes
|
||||
var megabits = (size / 1048576.0) * 8; // binary conversion
|
||||
var uploadspeed = (PageState.Alias.Name.Contains("localhost")) ? 100 : 3; // 3 Mbps is FCC minimum for broadband upload
|
||||
var uploadtime = (megabits / uploadspeed); // seconds
|
||||
var maxattempts = 5; // polling (minimum timeout duration will be 5 seconds)
|
||||
var sleep = (int)Math.Ceiling(uploadtime / maxattempts) * 1000; // milliseconds
|
||||
|
||||
int attempts = 0;
|
||||
while (attempts < maxattempts && !success)
|
||||
{
|
||||
attempts += 1;
|
||||
Thread.Sleep(sleep);
|
||||
|
||||
if (Folder == Constants.PackagesFolder)
|
||||
{
|
||||
var files = await FileService.GetFilesAsync(folder);
|
||||
if (files != null && files.Any(item => item.Name == filename))
|
||||
{
|
||||
success = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var file = await FileService.GetFileAsync(int.Parse(folder), filename);
|
||||
if (file != null)
|
||||
{
|
||||
success = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (success)
|
||||
{
|
||||
upload++;
|
||||
}
|
||||
}
|
||||
// upload files
|
||||
var success = await interop.UploadFiles(posturl, folder, _guid, SiteState.AntiForgeryToken, jwt, ChunkSize);
|
||||
|
||||
// reset progress indicators
|
||||
if (ShowProgress)
|
||||
|
@ -458,7 +412,7 @@
|
|||
}
|
||||
else
|
||||
{
|
||||
await logger.LogInformation("File Upload Failed Or Is Still In Progress {Files}", uploads);
|
||||
await logger.LogInformation("File Upload Failed {Files}", uploads);
|
||||
_message = Localizer["Error.File.Upload"];
|
||||
_messagetype = MessageType.Error;
|
||||
}
|
||||
|
|
|
@ -127,7 +127,7 @@
|
|||
<value>Error Loading Files</value>
|
||||
</data>
|
||||
<data name="Error.File.Upload" xml:space="preserve">
|
||||
<value>File Upload Failed Or Is Still In Progress</value>
|
||||
<value>File Upload Failed</value>
|
||||
</data>
|
||||
<data name="Message.File.NotSelected" xml:space="preserve">
|
||||
<value>You Have Not Selected A File To Upload</value>
|
||||
|
|
|
@ -209,24 +209,23 @@ namespace Oqtane.UI
|
|||
}
|
||||
}
|
||||
|
||||
[Obsolete("This function is deprecated. Use UploadFiles with MaxChunkSize and MaxConcurrentUploads parameters instead.", false)]
|
||||
public Task UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt)
|
||||
{
|
||||
return UploadFiles(posturl, folder, id, antiforgerytoken, jwt, 1, 0);
|
||||
UploadFiles(posturl, folder, id, antiforgerytoken, jwt, 1);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt, int maxChunkSizeMB, int maxConcurrentUploads)
|
||||
public ValueTask<bool> UploadFiles(string posturl, string folder, string id, string antiforgerytoken, string jwt, int chunksize)
|
||||
{
|
||||
try
|
||||
{
|
||||
_jsRuntime.InvokeVoidAsync(
|
||||
return _jsRuntime.InvokeAsync<bool>(
|
||||
"Oqtane.Interop.uploadFiles",
|
||||
posturl, folder, id, antiforgerytoken, jwt, maxChunkSizeMB, maxConcurrentUploads);
|
||||
return Task.CompletedTask;
|
||||
posturl, folder, id, antiforgerytoken, jwt, chunksize);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
return new ValueTask<bool>(Task.FromResult(false));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ using System.Net.Http;
|
|||
using Microsoft.AspNetCore.Cors;
|
||||
using System.IO.Compression;
|
||||
using Oqtane.Services;
|
||||
using Microsoft.Extensions.Primitives;
|
||||
|
||||
// ReSharper disable StringIndexOfIsCultureSpecific.1
|
||||
|
||||
|
@ -427,7 +428,7 @@ namespace Oqtane.Controllers
|
|||
// POST api/<controller>/upload
|
||||
[EnableCors(Constants.MauiCorsPolicy)]
|
||||
[HttpPost("upload")]
|
||||
public async Task<IActionResult> UploadFile(string folder, IFormFile formfile)
|
||||
public async Task<IActionResult> UploadFile([FromForm] string folder, IFormFile formfile)
|
||||
{
|
||||
if (formfile == null || formfile.Length <= 0)
|
||||
{
|
||||
|
@ -435,13 +436,20 @@ namespace Oqtane.Controllers
|
|||
}
|
||||
|
||||
// ensure filename is valid
|
||||
string token = ".part_";
|
||||
if (!formfile.FileName.IsPathOrFileValid() || !formfile.FileName.Contains(token) || !HasValidFileExtension(formfile.FileName.Substring(0, formfile.FileName.IndexOf(token))))
|
||||
if (!formfile.FileName.IsPathOrFileValid() || !HasValidFileExtension(formfile.FileName))
|
||||
{
|
||||
_logger.Log(LogLevel.Error, this, LogFunction.Security, "File Name Is Invalid Or Contains Invalid Extension {File}", formfile.FileName);
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
// ensure headers exist
|
||||
if (!Request.Headers.TryGetValue("PartCount", out StringValues partCount) || !Request.Headers.TryGetValue("TotalParts", out StringValues totalParts))
|
||||
{
|
||||
_logger.Log(LogLevel.Error, this, LogFunction.Security, "File Upload Request Is Missing Required Headers");
|
||||
return NoContent();
|
||||
}
|
||||
|
||||
string fileName = formfile.FileName + ".part_" + int.Parse(partCount).ToString("000") + "_" + int.Parse(totalParts).ToString("000");
|
||||
string folderPath = "";
|
||||
|
||||
int FolderId;
|
||||
|
@ -465,12 +473,12 @@ namespace Oqtane.Controllers
|
|||
if (!string.IsNullOrEmpty(folderPath))
|
||||
{
|
||||
CreateDirectory(folderPath);
|
||||
using (var stream = new FileStream(Path.Combine(folderPath, formfile.FileName), FileMode.Create))
|
||||
using (var stream = new FileStream(Path.Combine(folderPath, fileName), FileMode.Create))
|
||||
{
|
||||
await formfile.CopyToAsync(stream);
|
||||
}
|
||||
|
||||
string upload = await MergeFile(folderPath, formfile.FileName);
|
||||
string upload = await MergeFile(folderPath, fileName);
|
||||
if (upload != "" && FolderId != -1)
|
||||
{
|
||||
var file = CreateFile(upload, FolderId, Path.Combine(folderPath, upload));
|
||||
|
|
|
@ -308,139 +308,128 @@ Oqtane.Interop = {
|
|||
}
|
||||
return files;
|
||||
},
|
||||
uploadFiles: function (posturl, folder, id, antiforgerytoken, jwt, maxChunkSizeMB, maxConcurrentUploads) {
|
||||
uploadFiles: async function (posturl, folder, id, antiforgerytoken, jwt, chunksize) {
|
||||
var success = true;
|
||||
var fileinput = document.getElementById('FileInput_' + id);
|
||||
var progressinfo = document.getElementById('ProgressInfo_' + id);
|
||||
var progressbar = document.getElementById('ProgressBar_' + id);
|
||||
|
||||
var totalSize = 0;
|
||||
for (var i = 0; i < fileinput.files.length; i++) {
|
||||
totalSize += fileinput.files[i].size;
|
||||
}
|
||||
let uploadSize = 0;
|
||||
|
||||
if (!chunksize || chunksize < 1) {
|
||||
chunksize = 1; // 1 MB default
|
||||
}
|
||||
|
||||
let maxConcurrentUploads = 5;
|
||||
|
||||
if (progressinfo !== null && progressbar !== null) {
|
||||
progressinfo.setAttribute("style", "display: inline;");
|
||||
progressinfo.innerHTML = '';
|
||||
progressbar.setAttribute("style", "width: 100%; display: inline;");
|
||||
progressinfo.setAttribute('style', 'display: inline;');
|
||||
if (fileinput.files.length > 1) {
|
||||
progressinfo.innerHTML = fileinput.files[0].name + ', ...';
|
||||
}
|
||||
else {
|
||||
progressinfo.innerHTML = fileinput.files[0].name;
|
||||
}
|
||||
progressbar.setAttribute('style', 'width: 100%; display: inline;');
|
||||
progressbar.value = 0;
|
||||
}
|
||||
|
||||
var files = fileinput.files;
|
||||
var totalSize = 0;
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
totalSize = totalSize + files[i].size;
|
||||
}
|
||||
const uploadFiles = Array.from(fileinput.files).map(file => {
|
||||
const uploadFile = () => {
|
||||
const chunkSize = chunksize * (1024 * 1024);
|
||||
const totalParts = Math.ceil(file.size / chunkSize);
|
||||
let partCount = 0;
|
||||
|
||||
maxChunkSizeMB = Math.ceil(maxChunkSizeMB);
|
||||
if (maxChunkSizeMB < 1) {
|
||||
maxChunkSizeMB = 1;
|
||||
}
|
||||
else if (maxChunkSizeMB > 50) {
|
||||
maxChunkSizeMB = 50;
|
||||
}
|
||||
let activeUploads = 0;
|
||||
|
||||
var bufferChunkSize = maxChunkSizeMB * (1024 * 1024);
|
||||
var uploadedSize = 0;
|
||||
const uploadPart = (partCount) => {
|
||||
const start = partCount * chunkSize;
|
||||
const end = Math.min(start + chunkSize, file.size);
|
||||
const chunk = file.slice(start, end);
|
||||
|
||||
maxConcurrentUploads = Math.ceil(maxConcurrentUploads);
|
||||
var hasConcurrencyLimit = maxConcurrentUploads > 0;
|
||||
var uploadQueue = [];
|
||||
var activeUploads = 0;
|
||||
return new Promise((resolve, reject) => {
|
||||
let formdata = new FormData();
|
||||
formdata.append('__RequestVerificationToken', antiforgerytoken);
|
||||
formdata.append('folder', folder);
|
||||
formdata.append('formfile', chunk, file.name);
|
||||
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
var fileChunk = [];
|
||||
var file = files[i];
|
||||
var fileStreamPos = 0;
|
||||
var endPos = bufferChunkSize;
|
||||
|
||||
while (fileStreamPos < file.size) {
|
||||
fileChunk.push(file.slice(fileStreamPos, endPos));
|
||||
fileStreamPos = endPos;
|
||||
endPos = fileStreamPos + bufferChunkSize;
|
||||
}
|
||||
|
||||
var totalParts = fileChunk.length;
|
||||
var partCount = 0;
|
||||
|
||||
while (chunk = fileChunk.shift()) {
|
||||
partCount++;
|
||||
var fileName = file.name + ".part_" + partCount.toString().padStart(3, '0') + "_" + totalParts.toString().padStart(3, '0');
|
||||
|
||||
var data = new FormData();
|
||||
data.append('__RequestVerificationToken', antiforgerytoken);
|
||||
data.append('folder', folder);
|
||||
data.append('formfile', chunk, fileName);
|
||||
var request = new XMLHttpRequest();
|
||||
request.open('POST', posturl, true);
|
||||
var credentials = 'same-origin';
|
||||
var headers = new Headers();
|
||||
headers.append('PartCount', partCount + 1);
|
||||
headers.append('TotalParts', totalParts);
|
||||
if (jwt !== "") {
|
||||
request.setRequestHeader('Authorization', 'Bearer ' + jwt);
|
||||
request.withCredentials = true;
|
||||
headers.append('Authorization', 'Bearer ' + jwt);
|
||||
credentials = 'include';
|
||||
}
|
||||
request.upload.onloadstart = function (e) {
|
||||
if (progressinfo !== null && progressbar !== null && progressinfo.innerHTML === '') {
|
||||
if (files.length === 1) {
|
||||
progressinfo.innerHTML = file.name;
|
||||
|
||||
return fetch(posturl, {
|
||||
method: 'POST',
|
||||
headers: headers,
|
||||
credentials: credentials,
|
||||
body: formdata
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
if (progressinfo !== null) {
|
||||
progressinfo.innerHTML = 'Error: ' + response.statusText;
|
||||
}
|
||||
else {
|
||||
progressinfo.innerHTML = file.name + ", ...";
|
||||
throw new Error('Failed');
|
||||
}
|
||||
}
|
||||
};
|
||||
request.upload.onprogress = function (e) {
|
||||
if (progressinfo !== null && progressbar !== null) {
|
||||
var percent = Math.ceil(((uploadedSize + e.loaded) / totalSize) * 100);
|
||||
return;
|
||||
})
|
||||
.then(data => {
|
||||
if (progressbar !== null) {
|
||||
uploadSize += chunk.size;
|
||||
var percent = Math.ceil((uploadSize / totalSize) * 100);
|
||||
progressbar.value = (percent / 100);
|
||||
}
|
||||
};
|
||||
request.upload.onloadend = function (e) {
|
||||
if (hasConcurrencyLimit) {
|
||||
activeUploads--;
|
||||
processUploads();
|
||||
}
|
||||
|
||||
if (progressinfo !== null && progressbar !== null) {
|
||||
uploadedSize = uploadedSize + e.total;
|
||||
var percent = Math.ceil((uploadedSize / totalSize) * 100);
|
||||
progressbar.value = (percent / 100);
|
||||
}
|
||||
};
|
||||
request.upload.onerror = function () {
|
||||
if (hasConcurrencyLimit) {
|
||||
activeUploads--;
|
||||
processUploads();
|
||||
}
|
||||
|
||||
if (progressinfo !== null && progressbar !== null) {
|
||||
if (files.length === 1) {
|
||||
progressinfo.innerHTML = file.name + ' Error: ' + request.statusText;
|
||||
}
|
||||
else {
|
||||
progressinfo.innerHTML = ' Error: ' + request.statusText;
|
||||
}
|
||||
}
|
||||
resolve(data);
|
||||
})
|
||||
.catch(error => {
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
if (hasConcurrencyLimit) {
|
||||
uploadQueue.push({ data, request });
|
||||
processUploads();
|
||||
}
|
||||
else {
|
||||
request.send(data);
|
||||
}
|
||||
}
|
||||
|
||||
if (i === files.length - 1) {
|
||||
fileinput.value = '';
|
||||
}
|
||||
}
|
||||
|
||||
function processUploads() {
|
||||
if (uploadQueue.length === 0 || activeUploads >= maxConcurrentUploads) {
|
||||
return new Promise((resolve, reject) => {
|
||||
function processNextUpload() {
|
||||
if (partCount >= totalParts) {
|
||||
if (activeUploads === 0) resolve(); // Done uploading all parts
|
||||
return;
|
||||
}
|
||||
|
||||
while (activeUploads < maxConcurrentUploads && uploadQueue.length > 0) {
|
||||
activeUploads++;
|
||||
while (activeUploads < maxConcurrentUploads && partCount < totalParts) {
|
||||
uploadPart(partCount)
|
||||
.then(() => {
|
||||
activeUploads--;
|
||||
processNextUpload();
|
||||
})
|
||||
.catch(reject);
|
||||
|
||||
let { data, request } = uploadQueue.shift();
|
||||
request.send(data);
|
||||
activeUploads++;
|
||||
partCount++;
|
||||
}
|
||||
}
|
||||
|
||||
processNextUpload();
|
||||
});
|
||||
};
|
||||
|
||||
return uploadFile();
|
||||
});
|
||||
|
||||
try {
|
||||
await Promise.all(uploadFiles);
|
||||
} catch (error) {
|
||||
success = false;
|
||||
}
|
||||
|
||||
fileinput.value = '';
|
||||
return success;
|
||||
},
|
||||
refreshBrowser: function (verify, wait) {
|
||||
async function attemptReload (verify) {
|
||||
|
|
Loading…
Reference in New Issue
Block a user