Large file streaming uploads
This commit is contained in:
98
Oqtane.Server/Controllers/FileController.cs
Normal file
98
Oqtane.Server/Controllers/FileController.cs
Normal file
@ -0,0 +1,98 @@
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace Oqtane.Controllers
|
||||
{
|
||||
[Route("{site}/api/[controller]")]
|
||||
public class FileController : Controller
|
||||
{
|
||||
private readonly IWebHostEnvironment environment;
|
||||
|
||||
public FileController(IWebHostEnvironment environment)
|
||||
{
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
// GET api/<controller>/current
|
||||
[HttpPost("upload")]
|
||||
public async Task UploadFile(string folder, IFormFile file)
|
||||
{
|
||||
if (file.Length > 0)
|
||||
{
|
||||
if (!folder.Contains(":\\"))
|
||||
{
|
||||
folder = folder.Replace("/", "\\");
|
||||
if (folder.StartsWith("\\")) folder = folder.Substring(1);
|
||||
folder = Path.Combine(environment.WebRootPath, folder);
|
||||
}
|
||||
if (!Directory.Exists(folder))
|
||||
{
|
||||
Directory.CreateDirectory(folder);
|
||||
}
|
||||
using (var stream = new FileStream(Path.Combine(folder, file.FileName), FileMode.Create))
|
||||
{
|
||||
await file.CopyToAsync(stream);
|
||||
}
|
||||
await MergeFile(folder, file.FileName);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task MergeFile(string folder, string filename)
|
||||
{
|
||||
// parse the filename which is in the format of filename.ext.part_x_y
|
||||
string token = ".part_";
|
||||
string parts = Path.GetExtension(filename).Replace(token, ""); // returns "x_y"
|
||||
int totalparts = int.Parse(parts.Substring(parts.IndexOf("_") + 1));
|
||||
filename = filename.Substring(0, filename.IndexOf(token)); // base filename
|
||||
string[] fileparts = Directory.GetFiles(folder, filename + token + "*"); // list of all file parts
|
||||
|
||||
// if all of the file parts exist ( note that file parts can arrive out of order )
|
||||
if (fileparts.Length == totalparts)
|
||||
{
|
||||
// merge file parts
|
||||
bool success = true;
|
||||
using (var stream = new FileStream(Path.Combine(folder, filename), FileMode.Create))
|
||||
{
|
||||
foreach (string filepart in fileparts)
|
||||
{
|
||||
try
|
||||
{
|
||||
using (FileStream chunk = new FileStream(filepart, FileMode.Open))
|
||||
{
|
||||
await chunk.CopyToAsync(stream);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// delete file parts
|
||||
if (success)
|
||||
{
|
||||
foreach (string filepart in fileparts)
|
||||
{
|
||||
System.IO.File.Delete(filepart);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// clean up file parts which are more than 2 hours old ( which can happen if a file upload failed )
|
||||
fileparts = Directory.GetFiles(folder, "*" + token + "*");
|
||||
foreach (string filepart in fileparts)
|
||||
{
|
||||
DateTime createddate = System.IO.File.GetCreationTime(filepart);
|
||||
if (createddate < DateTime.Now.AddHours(-2))
|
||||
{
|
||||
System.IO.File.Delete(filepart);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -95,6 +95,7 @@ namespace Oqtane.Server
|
||||
services.AddScoped<IRoleService, RoleService>();
|
||||
services.AddScoped<IUserRoleService, UserRoleService>();
|
||||
services.AddScoped<ISettingService, SettingService>();
|
||||
services.AddScoped<IFileService, FileService>();
|
||||
|
||||
services.AddSingleton<IHttpContextAccessor, HttpContextAccessor>();
|
||||
|
||||
|
@ -55,5 +55,61 @@ window.interop = {
|
||||
|
||||
document.body.appendChild(form);
|
||||
form.submit();
|
||||
},
|
||||
uploadFiles: function (posturl, folder, name) {
|
||||
var files = document.getElementById(name + 'FileInput').files;
|
||||
var progressinfo = document.getElementById(name + 'ProgressInfo');
|
||||
var progressbar = document.getElementById(name + 'ProgressBar');
|
||||
var filename = '';
|
||||
|
||||
for (var i = 0; i < files.length; i++) {
|
||||
var FileChunk = [];
|
||||
var file = files[i];
|
||||
var MaxFileSizeMB = 1;
|
||||
var BufferChunkSize = MaxFileSizeMB * (1024 * 1024);
|
||||
var FileStreamPos = 0;
|
||||
var EndPos = BufferChunkSize;
|
||||
var Size = file.size;
|
||||
|
||||
progressbar.setAttribute("style", "visibility: visible;");
|
||||
|
||||
if (files.length > 1) {
|
||||
filename = file.name;
|
||||
}
|
||||
|
||||
while (FileStreamPos < Size) {
|
||||
FileChunk.push(file.slice(FileStreamPos, EndPos));
|
||||
FileStreamPos = EndPos;
|
||||
EndPos = FileStreamPos + BufferChunkSize;
|
||||
}
|
||||
|
||||
var TotalParts = FileChunk.length;
|
||||
var PartCount = 0;
|
||||
|
||||
while (Chunk = FileChunk.shift()) {
|
||||
PartCount++;
|
||||
var FileName = file.name + ".part_" + PartCount + "_" + TotalParts;
|
||||
|
||||
var data = new FormData();
|
||||
data.append('folder', folder);
|
||||
data.append('file', Chunk, FileName);
|
||||
var request = new XMLHttpRequest();
|
||||
request.open('POST', posturl, true);
|
||||
request.upload.onloadstart = function (e) {
|
||||
progressbar.value = 0;
|
||||
progressinfo.innerHTML = filename + ' 0%';
|
||||
};
|
||||
request.upload.onprogress = function (e) {
|
||||
var percent = Math.ceil((e.loaded / e.total) * 100);
|
||||
progressbar.value = (percent / 100);
|
||||
progressinfo.innerHTML = filename + '[' + PartCount + '] ' + percent + '%';
|
||||
};
|
||||
request.upload.onloadend = function (e) {
|
||||
progressbar.value = 1;
|
||||
progressinfo.innerHTML = filename + ' 100%';
|
||||
};
|
||||
request.send(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
Reference in New Issue
Block a user