I'm using a chunk upload js script I found in another post on here. When I run it on local host, it's fast. But when I run it on my live url, there's about 20 seconds of Request Sent under Chrome's Network Panel for timing. The Request Sent isn't running on localhost. I'm worried it's sending the complete file in the first chunk instead of just 1 MB.
Is there a different security measure for uploads, like is chrome running a virus scan? Is there any way I can confirm?
const BYTES_PER_CHUNK = 1024 * 1024; // 1MB chunk sizes.
var slices;
var slices2;
function sendRequest() {
var xhr;
var blob = document.getElementById('fileToUpload').files[0];
var start = 0;
var end;
var index = 0;
responseCount=0;
// calculate the number of slices we will need
slices = Math.ceil(blob.size / BYTES_PER_CHUNK);
slices2 = slices;
while(start < blob.size) {
end = start + BYTES_PER_CHUNK;
if(end > blob.size) {
end = blob.size;
}
uploadFile(blob, index, start, end);
start = end;
index++;
}
}
function uploadFile(blob, index, start, end) {
var xhr;
var end;
var fd;
var chunk;
var url;
xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if(xhr.readyState == 4) {
if(xhr.responseText) {
responseCount++;
jQuery('#uploadMessage').text("Uploading Progress: "+Math.ceil((responseCount/slices2)*100)+"%");
}
slices--;
// if we have finished all slices
if(slices == 0) {
mergeFile(blob);
}
}
};
if (blob.webkitSlice) {
chunk = blob.webkitSlice(start, end);
} else if (blob.mozSlice) {
chunk = blob.mozSlice(start, end);
}
chunk = blob.slice(start, end);
fd = new FormData();
fd.append("file", chunk);
fd.append("name", blob.name);
fd.append("index", index);
xhr.open("POST", "/index.php?chunkupload=upload", true);
xhr.send(fd);
}
function mergeFile(blob) {
jQuery('#uploadMessage').text("Closing Upload");
var xhr;
var fd;
xhr = new XMLHttpRequest();
fd = new FormData();
fd.append("name", blob.name);
fd.append("index", slices2);
fd.append("userinputname",jQuery('#userinputname').val());
fd.append("userinputdescr",jQuery('#userinputdescr').val());
xhr.open("POST", "/index.php?chunkmerge=upload", true);
xhr.send(fd);
jQuery('#uploadMessage').text("Upload Complete. Video will be published upon site approval.");
}
Related
i'm trying to do chunk uplod for large file so i slice the file and send each slice in request.
i send the chunk and the chunk number and total chank number withrequest with XmlHttpRequest to the controller.
but in the method on controller i c'ant acces to the chunk index or the total chunk il always 0 .
[HttpPost]
public string UploadChunks( int CHUNK_INDEX, int TOTAL_CHUNK)
{
var chunks = Request.Body;
string path = Path.Combine(_hostEnvironment.WebRootPath, #"file\temp");
string newpath = Path.Combine(path, Guid.NewGuid()+".tmp"+CHUNK_INDEX.ToString());
using (System.IO.FileStream fs = System.IO.File.Create(newpath))
{
byte[] bytes = new byte[775700];
int bytesRead;
while ((bytesRead = chunks.Read(bytes, 0, bytes.Length)) > 0)
{
fs.Write(bytes, 0, bytesRead);
}
}
return "succes" ;
}
the js function
function upload(file) {
var blob = file;
var BYTES_PER_CHUNK = 775700;
var SIZE = file.size;
var start = 0;
var end = BYTES_PER_CHUNK;
var completed = 0;
var count = SIZE % BYTES_PER_CHUNK == 0 ? SIZE / BYTES_PER_CHUNK : Math.floor(SIZE / BYTES_PER_CHUNK) + 1;
var index = 1;
while (start < SIZE) {
var chunk = blob.slice(start, end);
var data = new FormData();
data.append("TOTAL_CHUNK", count);
data.append("CHUNK_INDEX", index++);
data.append("CHUNK", chunk);
var xhr = new XMLHttpRequest();
xhr.open("POST", "UploadChunks", true);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.send(data);
/* fetch("/UploadFile/MultiUpload", {
method: 'post',
body: data
});*/
start = end
end = start + BYTES_PER_CHUNK;
}
}
Change your js like below:
while (start < SIZE) {
index++;
var xhr = new XMLHttpRequest();
xhr.open("POST", "UploadChunks", true);
xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
xhr.send("TOTAL_CHUNK=" + count + "&CHUNK_INDEX=" + index);
}
I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle
I'm currently implementing an upload for files. Because I've to handle huge files here and there I've started to slice files and send them in 1mb chunks which works great as long as file are <~500MB after that it seems that memory isn't freed anyone randomly and I can't figure out what I'm missing here.
Prepare chunks
var sliceCount = 0;
var sendCount = 0;
var fileID = generateUUID();
var maxChunks = 0;
var userNotified = false;
function parseFile(file)
{
var fileSize = file.size;
var chunkSize = 1024 * 1024;//64 * 1024; // bytes
var offset = 0;
var self = this; // we need a reference to the current object
var chunkReaderBlock = null;
var numberOfChunks = fileSize / chunkSize;
maxChunks = Math.ceil(numberOfChunks);
// gets called if chunk is read into memory
var readEventHandler = function (evt)
{
if (evt.target.error == null) {
offset += evt.target.result.byteLength;
sendChunkAsBinary(evt.target.result);
}
else
{
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
// of to the next chunk
chunkReaderBlock(offset, chunkSize, file);
}
chunkReaderBlock = function (_offset, length, _file)
{
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
sliceCount++;
console.log("Slicecount: " + sliceCount);
r.onload = readEventHandler;
r.readAsArrayBuffer(blob);
blob = null;
r = null;
}
// now let's start the read with the first block
chunkReaderBlock(offset, chunkSize, file);
}
Send Chunks
function sendChunkAsBinary(chunk)
{
var progressbar = $("#progressbar"), bar = progressbar.find('.uk-progress-bar');
// create XHR instance
var xhr = new XMLHttpRequest();
// send the file through POST
xhr.open("POST", 'upload.php', true);
var progressHandler = function (e)
{
// get percentage of how much of the current file has been sent
var position = e.loaded || e.position;
var total = e.total || e.totalSize;
var percentage = Math.round((sendCount / maxChunks) * 100);
// set bar width to keep track of progress
bar.css("width", percentage + "%").text(percentage + "%");
}
// let's track upload progress
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", progressHandler);
// state change observer - we need to know when and if the file was successfully uploaded
xhr.onreadystatechange = function ()
{
if (xhr.readyState == 4)
{
if (xhr.status == 200)
{
eventSource.removeEventListener("progress", progressHandler);
if (sendCount == maxChunks && !userNotified)
{
userNotified = true;
notifyUserSuccess("Datei hochgeladen!");
setTimeout(function ()
{
progressbar.addClass("uk-invisible");
bar.css("width", "0%").text("0%");
}, 250);
updateDocList();
}
}
else
{
notifyUser("Fehler beim hochladen der Datei!");
}
}
};
var blob;
if (typeof window.Blob == "function") {
blob = new Blob([chunk]);
} else {
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(chunk);
blob = bb.getBlob();
}
sendCount++;
var formData = new FormData();
formData.append("chunkNumber", sendCount);
formData.append("maxChunks", maxChunks);
formData.append("fileID", fileID);
formData.append("chunkpart", blob);
xhr.send(formData);
progressbar.removeClass("uk-invisible");
console.log("Sendcount: " + sendCount);
}
If I attach to the debugger within Visual Studio 2015 it take a bit but soon I get an OutOfMemoryException in the send function at exactly this line: blob = new Blob([chunk]);. It's all the time the same line where the exception occures.
As soon as the Exception happens I get POST [...]/upload.php net::ERR_FILE_NOT_FOUND however I still got the chunks in my php-file.
Here's a Timeline-graph of my error
What I dont understand, I'm not able to see increasing memory inside the Task-Manager (a few mb of course but not close to 16gb ram I got).
So can anyone tell me where this leak comes from? What am I missing?
I'm using Trix (https://github.com/basecamp/trix) as a text-editor in my app and allow for file uploads through there. I want to add a 5mb max file size to my uploads, but am a little lost at how to go about it. How would you implement it?
(function() {
var createStorageKey, host, uploadAttachment;
document.addEventListener("trix-attachment-add", function(event) {
var attachment;
attachment = event.attachment;
if (attachment.file) {
return uploadAttachment(attachment);
}
});
host = "https://my.cloudfront.net/";
uploadAttachment = function(attachment) {
var file, form, key, xhr;
file = attachment.file;
key = createStorageKey(file);
form = new FormData;
form.append("key", key);
form.append("Content-Type", file.type);
form.append("file", file);
xhr = new XMLHttpRequest;
xhr.open("POST", host, true);
xhr.upload.onprogress = function(event) {
var progress;
progress = event.loaded / event.total * 100;
return attachment.setUploadProgress(progress);
};
xhr.onload = function() {
var href, url;
if (xhr.status === 204) {
url = href = host + key;
return attachment.setAttributes({
url: url,
href: href
});
}
};
return xhr.send(form);
};
createStorageKey = function(file) {
var date, day, time;
date = new Date();
day = date.toISOString().slice(0, 10);
time = date.getTime();
return "tmp/" + day + "/" + time + "-" + file.name;
};
}).call(this);
You should do it client side AND server side. For the client side, just add one condition like so :
file = attachment.file;
if (file.size == 0) {
attachment.remove();
alert("The file you submitted looks empty.");
return;
} else if (file.size / (1024*2)) > 5) {
attachment.remove();
alert("Your file seems too big for uploading.");
return;
}
Also you can look at this Gist i wrote, showing a full implementation : https://gist.github.com/pmhoudry/a0dc6905872a41a316135d42a5537ddb
You should do it on server side and return an exception if the file size exceeds 5mb. You can also validate it on client-side through event.file, it has an "size" attribute, you can get the fize size from there.
if((event.file.size / (1024*2)) > 5) {
console.log('do your logic here');
}
I am trying to upload some large files to the server using XMLHttpRequest and file.slice.
I've manage doing this with the help of documentations and other various links.
Since uploading large file is a lengthily job, i would like to provide the user with a progress bar.
After some more readings i've come across on an example that, theoretically, does exactly what i need.
By taking the sample code and adapting it to my needs i reached
var upload =
{
blobs: [],
pageName: '',
bytesPerChunk: 20 * 1024 * 1024,
currentChunk: 0,
loaded: 0,
total: 0,
file: null,
fileName: "",
uploadChunk: function (blob, fileName, fileType) {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.responseText) {
// alert(xhr.responseText);
}
}
};
xhr.addEventListener("load", function (evt) {
$("#dvProgressPrcent").html("100%");
$get('dvProgress').style.width = '100%';
}, false);
xhr.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}, false);
xhr.upload.addEventListener("progress", function (evt) {
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}, false);
xhr.open('POST', upload.pageName, false);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("X-File-Name", fileName);
xhr.setRequestHeader("X-File-Type", fileType);
xhr.send(blob);
},
upload: function (file) {
var start = 0;
var end = 0;
var size = file.size;
var date = new Date();
upload.fileName = date.format("dd.MM.yyyy_HH.mm.ss") + "_" + file.name;
upload.loaded = 0;
upload.total = file.size;
while (start < size) {
end = start + upload.bytesPerChunk;
if (end > size) {
end = size;
}
var blob = file.slice(start, end);
upload.uploadChunk(blob, upload.fileName, file.type);
start = end;
upload.loaded += start;
}
return upload.fileName;
}
};
The call is like (without the validations)
upload.upload(document.getElementById("#upload").files[0]);
My problem is that the progress event doesn't trigger.
I've tried xhr.addEventListener and with xhr.upload.addEventListener (each at a time and both at a time) for the progress event but it never triggers. The onreadystatechange and load events trigger just fine.
I would greatly appreciate help with what i am doing wrong
Update
After many attempts i've manage to simulate a progress but i've ran into another problem: Chrome's UI is not updating during the upload.
The code looks like this now
var upload =
{
pageName: '',
bytesPerChunk: 20 * 1024 * 1024,
loaded: 0,
total: 0,
file: null,
fileName: "",
uploadFile: function () {
var size = upload.file.size;
if (upload.loaded > size) return;
var end = upload.loaded + upload.bytesPerChunk;
if (end > size) { end = size; }
var blob = upload.file.slice(upload.loaded, end);
var xhr = new XMLHttpRequest();
xhr.open('POST', upload.pageName, false);
xhr.setRequestHeader("Content-Type", "multipart/form-data");
xhr.setRequestHeader("X-File-Name", upload.fileName);
xhr.setRequestHeader("X-File-Type", upload.file.type);
xhr.send(blob);
upload.loaded += upload.bytesPerChunk;
setTimeout(upload.updateProgress, 100);
setTimeout(upload.uploadFile, 100);
},
upload: function (file) {
upload.file = file;
var date = new Date();
upload.fileName = date.format("dd.MM.yyyy_HH.mm.ss") + "_" + file.name;
upload.loaded = 0;
upload.total = file.size;
setTimeout(upload.uploadFile, 100);
return upload.fileName;
},
updateProgress: function () {
var progress = Math.ceil(((upload.loaded) / upload.total) * 100);
if (progress > 100) progress = 100;
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
};
Update 2
I've managed to fix it and simulate a progress bar that works in chrome too.
i've updated previous code sample with the one that works.
You can make the bar 'refresh' more often by reducing the size of the chunk uploaded at a time
Tahnk you for your help
As stated in https://stackoverflow.com/a/3694435/460368, you could do :
if(xhr.upload)
xhr.upload.onprogress=upload.updateProgress;
and
updateProgress: function updateProgress(evt)
{
if (evt.lengthComputable) {
var progress = Math.ceil(((upload.loaded + evt.loaded) / upload.total) * 100);
$("#dvProgressPrcent").html(progress + "%");
$get('dvProgress').style.width = progress + '%';
}
}
There is my solution:
function addImages(id) {
var files = $("#files").prop("files");
var file = files[loopGallery];
var cList = files.length;
var fd = new FormData();
fd.append("file", file);
fd.append("galerie", id);
var xhr = new XMLHttpRequest();
xhr.open("POST", "moduls/galerie/uploadimages.php", true);
xhr.upload.onprogress = function(e) {
var percentComplete = Math.ceil((e.loaded / e.total) * 100);
$("#progress").css("display","");
$("#progressText").text((loopGallery+1)+" z "+cList);
$("#progressBar").css("width",percentComplete+"%");
};
xhr.onload = function() {
if(this.status == 200) {
$("#progressObsah").load("moduls/galerie/showimages.php?ids="+id);
if((loopGallery+1) == cList) {
loopGallery = 0;
} else {
$("#progressBar").css("width", "0%");
loopGallery++;
addImages(id);
}
}
}
if(cList > 0) {
xhr.send(fd);
}
}