i'm trying to do chunk uplod for large file so i slice the file and send each slice in request.
i send the chunk and the chunk number and total chank number withrequest with XmlHttpRequest to the controller.
but in the method on controller i c'ant acces to the chunk index or the total chunk il always 0 .
[HttpPost]
public string UploadChunks( int CHUNK_INDEX, int TOTAL_CHUNK)
{
var chunks = Request.Body;
string path = Path.Combine(_hostEnvironment.WebRootPath, #"file\temp");
string newpath = Path.Combine(path, Guid.NewGuid()+".tmp"+CHUNK_INDEX.ToString());
using (System.IO.FileStream fs = System.IO.File.Create(newpath))
{
byte[] bytes = new byte[775700];
int bytesRead;
while ((bytesRead = chunks.Read(bytes, 0, bytes.Length)) > 0)
{
fs.Write(bytes, 0, bytesRead);
}
}
return "succes" ;
}
the js function
function upload(file) {
var blob = file;
var BYTES_PER_CHUNK = 775700;
var SIZE = file.size;
var start = 0;
var end = BYTES_PER_CHUNK;
var completed = 0;
var count = SIZE % BYTES_PER_CHUNK == 0 ? SIZE / BYTES_PER_CHUNK : Math.floor(SIZE / BYTES_PER_CHUNK) + 1;
var index = 1;
while (start < SIZE) {
var chunk = blob.slice(start, end);
var data = new FormData();
data.append("TOTAL_CHUNK", count);
data.append("CHUNK_INDEX", index++);
data.append("CHUNK", chunk);
var xhr = new XMLHttpRequest();
xhr.open("POST", "UploadChunks", true);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.send(data);
/* fetch("/UploadFile/MultiUpload", {
method: 'post',
body: data
});*/
start = end
end = start + BYTES_PER_CHUNK;
}
}
Change your js like below:
while (start < SIZE) {
index++;
var xhr = new XMLHttpRequest();
xhr.open("POST", "UploadChunks", true);
xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
xhr.send("TOTAL_CHUNK=" + count + "&CHUNK_INDEX=" + index);
}
Related
I am trying to upload large binary files from a web client to a .NET 4.6.1 Framework MVC API. These files could range anywhere from 5GB to 20GB.
I have tried splitting the file into chunks to upload each chunk and merge the results at the end, but the merged file is always corrupted. If I work with small files and don't split, the binary will work correctly. However, when I split and merge the file is "corrupted". It won't load or behave as expected.
I have looked all over and haven't seen a proper solution to this so i'm hoping someone can help me here.
I followed this https://forums.asp.net/t/1742612.aspx?How+to+upload+a+big+file+in+Mvc, but I can't get it to work and the corrected solution was never posted. I am keeping track of the order of files before merging on the server.
Javascript (Call to uploadData is made to initiate)
function uploadComplete(file) {
var formData = new FormData();
formData.append('fileName', file.name);
formData.append('completed', true);
var xhr3 = new XMLHttpRequest();
xhr3.open("POST", "api/CompleteUpload", true); //combine the chunks together
xhr3.send(formData);
return;
}
function uploadData(item) {
var blob = item.zipFile;
var BYTES_PER_CHUNK = 750000000; // sample chunk sizes.
var SIZE = blob.size;
//upload content
var start = 0;
var end = BYTES_PER_CHUNK;
var completed = 0;
var count = SIZE % BYTES_PER_CHUNK == 0 ? SIZE / BYTES_PER_CHUNK : Math.floor(SIZE / BYTES_PER_CHUNK) + 1;
while (start < SIZE) {
var chunk = blob.slice(start, end);
var xhr = new XMLHttpRequest();
xhr.onload = function () {
completed = completed + 1;
if (completed === count) {
uploadComplete(item.zipFile);
}
};
xhr.open("POST", "/api/MultiUpload", true);
xhr.setRequestHeader("contentType", false);
xhr.setRequestHeader("processData", false);
xhr.send(chunk);
start = end;
end = start + BYTES_PER_CHUNK;
}
}
Server Controller
//global vars
public static List<string> myList = new List<string>();
[HttpPost]
[Route("CompleteUpload")]
public string CompleteUpload()
{
var request = HttpContext.Current.Request;
//verify all parameters were defined
var form = request.Form;
string fileName;
bool completed;
if (!string.IsNullOrEmpty(request.Form["fileName"]) &&
!string.IsNullOrEmpty(request.Form["completed"]))
{
fileName = request.Form["fileName"];
completed = bool.Parse(request.Form["completed"]);
}
else
{
return "Invalid upload request";
}
if (completed)
{
string path = HttpContext.Current.Server.MapPath("~/Data/uploads/Tamp");
string newpath = Path.Combine(path, fileName);
string[] filePaths = Directory.GetFiles(path);
foreach (string item in myList)
{
MergeFiles(newpath, item);
}
}
//Remove all items from list after request is done
myList.Clear();
return "success";
}
private static void MergeFiles(string file1, string file2)
{
FileStream fs1 = null;
FileStream fs2 = null;
try
{
fs1 = System.IO.File.Open(file1, FileMode.Append);
fs2 = System.IO.File.Open(file2, FileMode.Open);
byte[] fs2Content = new byte[fs2.Length];
fs2.Read(fs2Content, 0, (int)fs2.Length);
fs1.Write(fs2Content, 0, (int)fs2.Length);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message + " : " + ex.StackTrace + " " + file2);
}
finally
{
if(fs1 != null) fs1.Close();
if (fs2 != null)
{
fs2.Close();
System.IO.File.Delete(file2);
}
}
}
[HttpPost]
[Route("MultiUpload")]
public string MultiUpload()
{
try
{
var request = HttpContext.Current.Request;
var chunks = request.InputStream;
string path = HttpContext.Current.Server.MapPath("~/Data/uploads/Tamp");
string fileName = Path.GetTempFileName();
string newpath = Path.Combine(path, fileName);
myList.Add(newpath);
using (System.IO.FileStream fs = System.IO.File.Create(newpath))
{
byte[] bytes = new byte[77570];
int bytesRead;
while ((bytesRead = request.InputStream.Read(bytes, 0, bytes.Length)) > 0)
{
fs.Write(bytes, 0, bytesRead);
}
}
return "test";
}
catch (Exception exception)
{
return exception.Message;
}
}
I want to get a file from client side to parse it into json object and send it to the backend, i am able to parse the file thanks to Sheet-js.
My problem is i can not get files from client side
I am using js, SAPUI5
handleUploadPress: function(oEvent) {
var oFileUploader = this.getView().byId("fileUploader");
if (!oFileUploader.getValue().toString()) {
MessageToast.show("Choose a xlsx file first");
return;
}
var url = "/resources/test.xlsx";
var oReq = new XMLHttpRequest();
oReq.open("GET", url, true);
oReq.responseType = "arraybuffer";
oReq.onload = function(e) {
var arraybuffer = oReq.response;
var data = new Uint8Array(arraybuffer);
var arr = [];
for (var i = 0; i !== data.length; ++i) {
arr[i] = String.fromCharCode(data[i]);
}
var bstr = arr.join("");
var workbook = XLSX.read(bstr, {
type: "binary"
});
var firstSheetName = workbook.SheetNames[0];
var worksheet = workbook.Sheets[firstSheetName];
var json = XLSX.utils.sheet_to_json(worksheet, {
raw: true
});
var jsonStr = JSON.stringify(json);
MessageBox.show("JSON String: " + jsonStr);
};
oReq.send();
},
The answer is:
UploadFile.view.xml
<VBox>
<u:FileUploader id="idfileUploader" typeMissmatch="handleTypeMissmatch" change="handleValueChange" maximumFileSize="10" fileSizeExceed="handleFileSize" maximumFilenameLength="50" filenameLengthExceed="handleFileNameLength" multiple="false" width="50%" sameFilenameAllowed="false" buttonText="Browse" fileType="CSV" style="Emphasized" placeholder="Choose a CSV file"/>
<Button text="Upload your file" press="onUpload" type="Emphasized"/>
</VBox>
UploadFile.controller.js
handleTypeMissmatch: function(oEvent) {
var aFileTypes = oEvent.getSource().getFileType();
jQuery.each(aFileTypes, function(key, value) {
aFileTypes[key] = "*." + value;
});
var sSupportedFileTypes = aFileTypes.join(", ");
MessageToast.show("The file type *." + oEvent.getParameter("fileType") +
" is not supported. Choose one of the following types: " +
sSupportedFileTypes);
},
handleValueChange: function(oEvent) {
MessageToast.show("Press 'Upload File' to upload file '" + oEvent.getParameter("newValue") + "'");
},
handleFileSize: function(oEvent) {
MessageToast.show("The file size should not exceed 10 MB.");
},
handleFileNameLength: function(oEvent) {
MessageToast.show("The file name should be less than that.");
},
onUpload: function(e) {
var oResourceBundle = this.getView().getModel("i18n").getResourceBundle();
var fU = this.getView().byId("idfileUploader");
var domRef = fU.getFocusDomRef();
var file = domRef.files[0];
var reader = new FileReader();
var params = "EmployeesJson=";
reader.onload = function(oEvent) {
var strCSV = oEvent.target.result;
var arrCSV = strCSV.match(/[\w .]+(?=,?)/g);
var noOfCols = 6;
var headerRow = arrCSV.splice(0, noOfCols);
var data = [];
while (arrCSV.length > 0) {
var obj = {};
var row = arrCSV.splice(0, noOfCols);
for (var i = 0; i < row.length; i++) {
obj[headerRow[i]] = row[i].trim();
}
data.push(obj);
}
var Len = data.length;
data.reverse();
params += "[";
for (var j = 0; j < Len; j++) {
params += JSON.stringify(data.pop()) + ", ";
}
params = params.substring(0, params.length - 2);
params += "]";
// MessageBox.show(params);
var http = new XMLHttpRequest();
var url = oResourceBundle.getText("UploadEmployeesFile").toString();
http.onreadystatechange = function() {
if (http.readyState === 4 && http.status === 200) {
var json = JSON.parse(http.responseText);
var status = json.status.toString();
switch (status) {
case "Success":
MessageToast.show("Data is uploaded succesfully.");
break;
default:
MessageToast.show("Data was not uploaded.");
}
}
};
http.open("POST", url, true);
http.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
http.send(params);
};
reader.readAsBinaryString(file);
}
i try to send an DataUri image using xhr and FormData() ,
so i find a method tho convert DataUri To blob
function dataURItoBlob(dataURI) {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
and i send the blob var using xhr
var blob = dataURItoBlob(dataURL);
var data = new FormData();
data.append('photos[]', blob, "file.png");
var xhr = new XMLHttpRequest();
xhr.open('POST', '/upload', true);
xhr.upload.onprogress = function(e) {
if (e.lengthComputable) {
var percentComplete = (e.loaded / e.total) * 100;
console.log(percentComplete + '% uploaded');
}
};
xhr.onload = function() {
alert(this.response);
var resp = JSON.parse(this.response);
console.log('Server got:', resp);
};
xhr.send(data);
next , i recive the var photos[] and i get the blob object , but i cant save it as file
function storeBlob($blob, $fileName)
{
$blobRE = '/^data:((\w+)\/(\w+));base64,(.*)$/';
if (preg_match($blobRE, $blob, $m))
{
$imageName = (preg_match('/\.\w{2,4}$/', $fileName) ? $fileName : sprintf('%s.%s', $fileName, $m[3]));
return file_put_contents($imageName,base64_decode($m[4]));
}
return false; // error
}
I'm currently implementing an upload for files. Because I've to handle huge files here and there I've started to slice files and send them in 1mb chunks which works great as long as file are <~500MB after that it seems that memory isn't freed anyone randomly and I can't figure out what I'm missing here.
Prepare chunks
var sliceCount = 0;
var sendCount = 0;
var fileID = generateUUID();
var maxChunks = 0;
var userNotified = false;
function parseFile(file)
{
var fileSize = file.size;
var chunkSize = 1024 * 1024;//64 * 1024; // bytes
var offset = 0;
var self = this; // we need a reference to the current object
var chunkReaderBlock = null;
var numberOfChunks = fileSize / chunkSize;
maxChunks = Math.ceil(numberOfChunks);
// gets called if chunk is read into memory
var readEventHandler = function (evt)
{
if (evt.target.error == null) {
offset += evt.target.result.byteLength;
sendChunkAsBinary(evt.target.result);
}
else
{
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
// of to the next chunk
chunkReaderBlock(offset, chunkSize, file);
}
chunkReaderBlock = function (_offset, length, _file)
{
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
sliceCount++;
console.log("Slicecount: " + sliceCount);
r.onload = readEventHandler;
r.readAsArrayBuffer(blob);
blob = null;
r = null;
}
// now let's start the read with the first block
chunkReaderBlock(offset, chunkSize, file);
}
Send Chunks
function sendChunkAsBinary(chunk)
{
var progressbar = $("#progressbar"), bar = progressbar.find('.uk-progress-bar');
// create XHR instance
var xhr = new XMLHttpRequest();
// send the file through POST
xhr.open("POST", 'upload.php', true);
var progressHandler = function (e)
{
// get percentage of how much of the current file has been sent
var position = e.loaded || e.position;
var total = e.total || e.totalSize;
var percentage = Math.round((sendCount / maxChunks) * 100);
// set bar width to keep track of progress
bar.css("width", percentage + "%").text(percentage + "%");
}
// let's track upload progress
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", progressHandler);
// state change observer - we need to know when and if the file was successfully uploaded
xhr.onreadystatechange = function ()
{
if (xhr.readyState == 4)
{
if (xhr.status == 200)
{
eventSource.removeEventListener("progress", progressHandler);
if (sendCount == maxChunks && !userNotified)
{
userNotified = true;
notifyUserSuccess("Datei hochgeladen!");
setTimeout(function ()
{
progressbar.addClass("uk-invisible");
bar.css("width", "0%").text("0%");
}, 250);
updateDocList();
}
}
else
{
notifyUser("Fehler beim hochladen der Datei!");
}
}
};
var blob;
if (typeof window.Blob == "function") {
blob = new Blob([chunk]);
} else {
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(chunk);
blob = bb.getBlob();
}
sendCount++;
var formData = new FormData();
formData.append("chunkNumber", sendCount);
formData.append("maxChunks", maxChunks);
formData.append("fileID", fileID);
formData.append("chunkpart", blob);
xhr.send(formData);
progressbar.removeClass("uk-invisible");
console.log("Sendcount: " + sendCount);
}
If I attach to the debugger within Visual Studio 2015 it take a bit but soon I get an OutOfMemoryException in the send function at exactly this line: blob = new Blob([chunk]);. It's all the time the same line where the exception occures.
As soon as the Exception happens I get POST [...]/upload.php net::ERR_FILE_NOT_FOUND however I still got the chunks in my php-file.
Here's a Timeline-graph of my error
What I dont understand, I'm not able to see increasing memory inside the Task-Manager (a few mb of course but not close to 16gb ram I got).
So can anyone tell me where this leak comes from? What am I missing?
I'm using a chunk upload js script I found in another post on here. When I run it on local host, it's fast. But when I run it on my live url, there's about 20 seconds of Request Sent under Chrome's Network Panel for timing. The Request Sent isn't running on localhost. I'm worried it's sending the complete file in the first chunk instead of just 1 MB.
Is there a different security measure for uploads, like is chrome running a virus scan? Is there any way I can confirm?
const BYTES_PER_CHUNK = 1024 * 1024; // 1MB chunk sizes.
var slices;
var slices2;
function sendRequest() {
var xhr;
var blob = document.getElementById('fileToUpload').files[0];
var start = 0;
var end;
var index = 0;
responseCount=0;
// calculate the number of slices we will need
slices = Math.ceil(blob.size / BYTES_PER_CHUNK);
slices2 = slices;
while(start < blob.size) {
end = start + BYTES_PER_CHUNK;
if(end > blob.size) {
end = blob.size;
}
uploadFile(blob, index, start, end);
start = end;
index++;
}
}
function uploadFile(blob, index, start, end) {
var xhr;
var end;
var fd;
var chunk;
var url;
xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if(xhr.readyState == 4) {
if(xhr.responseText) {
responseCount++;
jQuery('#uploadMessage').text("Uploading Progress: "+Math.ceil((responseCount/slices2)*100)+"%");
}
slices--;
// if we have finished all slices
if(slices == 0) {
mergeFile(blob);
}
}
};
if (blob.webkitSlice) {
chunk = blob.webkitSlice(start, end);
} else if (blob.mozSlice) {
chunk = blob.mozSlice(start, end);
}
chunk = blob.slice(start, end);
fd = new FormData();
fd.append("file", chunk);
fd.append("name", blob.name);
fd.append("index", index);
xhr.open("POST", "/index.php?chunkupload=upload", true);
xhr.send(fd);
}
function mergeFile(blob) {
jQuery('#uploadMessage').text("Closing Upload");
var xhr;
var fd;
xhr = new XMLHttpRequest();
fd = new FormData();
fd.append("name", blob.name);
fd.append("index", slices2);
fd.append("userinputname",jQuery('#userinputname').val());
fd.append("userinputdescr",jQuery('#userinputdescr').val());
xhr.open("POST", "/index.php?chunkmerge=upload", true);
xhr.send(fd);
jQuery('#uploadMessage').text("Upload Complete. Video will be published upon site approval.");
}