Empty files uploaded in Android Native browser - javascript

I'm creating a website for mobile phones that resizes photos and uploads them.
$('#ImgPreview canvas').each(function(pIndex) {
vFormData.append(pIndex, canvasToJpegBlob($(this)[0]), vIssueId +'-attachment0'+ pIndex +'.jpg');
});
$.ajax({
url: '/api/ob/issuefileupload',
data: vFormData,
processData: false,
contentType: false,
type: 'POST'
}).done(function(pData) {
window.location = '/issue?id='+ vIssueId;
}).fail(function(pJqXHR) {
alert(My.Strings.UploadFailed);
});
This works in Chrome for Android and in Safari on iOS, but in the native Android browser, the files have a content-length of 0 and name Blob + a UID. When the file is added to the formdata the size also seems rather large (900k opposed to 50k in Chrome).
The canvasToJpegBlob function:
function canvasToJpegBlob(pCanvas) {
var vMimeType = "image/jpeg",
vDataURI,
vByteString,
vBlob,
vArrayBuffer,
vUint8Array, i,
vBlobBuilder;
vDataURI = pCanvas.toDataURL(vMimeType, 0.8);
vByteString = atob(vDataURI.split(',')[1]);
vArrayBuffer = new ArrayBuffer(vByteString.length);
vUint8Array = new Uint8Array(vArrayBuffer);
for (i = 0; i < vByteString.length; i++) {
vUint8Array[i] = vByteString.charCodeAt(i);
}
try {
vBlob = new Blob([vUint8Array.buffer], {type : vMimeType});
} catch(e) {
window.BlobBuilder = window.BlobBuilder ||
window.WebKitBlobBuilder ||
window.MozBlobBuilder ||
window.MSBlobBuilder;
if (e.name === 'TypeError' && window.BlobBuilder) {
vBlobBuilder = new BlobBuilder();
vBlobBuilder.append(vUint8Array.buffer);
vBlob = vBlobBuilder.getBlob(vMimeType);
} else if (e.name === 'InvalidStateError') {
vBlob = new Blob([vUint8Array.buffer], {type : vMimeType});
} else {
alert(My.Strings.UnsupportedFile);
}
}
return vBlob;
}
Is there any way to get this working in the native Android browser?

I also ran into this problem and needed to come up with a more generic solution as in some cases I won't have control over the server-side code.
Eventually I reached a solution that is almost completely transparent. The approach was to polyfill the broken FormData with a blob that appends data in the necessary format for multipart/form-data. It overrides XHR's send() with a version that reads the blob into a buffer that gets sent in the request.
Here's the main code:
var
// Android native browser uploads blobs as 0 bytes, so we need a test for that
needsFormDataShim = (function () {
var bCheck = ~navigator.userAgent.indexOf('Android')
&& ~navigator.vendor.indexOf('Google')
&& !~navigator.userAgent.indexOf('Chrome');
return bCheck && navigator.userAgent.match(/AppleWebKit\/(\d+)/).pop() <= 534;
})(),
// Test for constructing of blobs using new Blob()
blobConstruct = !!(function () {
try { return new Blob(); } catch (e) {}
})(),
// Fallback to BlobBuilder (deprecated)
XBlob = blobConstruct ? window.Blob : function (parts, opts) {
var bb = new (window.BlobBuilder || window.WebKitBlobBuilder || window.MSBlobBuilder);
parts.forEach(function (p) {
bb.append(p);
});
return bb.getBlob(opts ? opts.type : undefined);
};
function FormDataShim () {
var
// Store a reference to this
o = this,
// Data to be sent
parts = [],
// Boundary parameter for separating the multipart values
boundary = Array(21).join('-') + (+new Date() * (1e16*Math.random())).toString(36),
// Store the current XHR send method so we can safely override it
oldSend = XMLHttpRequest.prototype.send;
this.append = function (name, value, filename) {
parts.push('--' + boundary + '\nContent-Disposition: form-data; name="' + name + '"');
if (value instanceof Blob) {
parts.push('; filename="'+ (filename || 'blob') +'"\nContent-Type: ' + value.type + '\n\n');
parts.push(value);
}
else {
parts.push('\n\n' + value);
}
parts.push('\n');
};
// Override XHR send()
XMLHttpRequest.prototype.send = function (val) {
var fr,
data,
oXHR = this;
if (val === o) {
// Append the final boundary string
parts.push('--' + boundary + '--');
// Create the blob
data = new XBlob(parts);
// Set up and read the blob into an array to be sent
fr = new FileReader();
fr.onload = function () { oldSend.call(oXHR, fr.result); };
fr.onerror = function (err) { throw err; };
fr.readAsArrayBuffer(data);
// Set the multipart content type and boudary
this.setRequestHeader('Content-Type', 'multipart/form-data; boundary=' + boundary);
XMLHttpRequest.prototype.send = oldSend;
}
else {
oldSend.call(this, val);
}
};
}
And just use it like so, calling fd.append(name, value) as normal afterwards:
var fd = needsFormDataShim ? new FormDataShim() : new FormData();

How about trying to draw it on canvas, using matrix to scale it to the size you wish and then sending it to server using canvas.toDataURL. Check out this question.

i use this to fix the problem:
// not use blob, simply use key value
var form = new FormData();
// get you content type and raw data from data url
form.append( 'content_type', type);
form.append( 'content', raw);

Related

Get Byte Position during Upload Loop

I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle

Multiple XMLHttpRequest.send or eventlisteners memory leak?

I'm currently implementing an upload for files. Because I've to handle huge files here and there I've started to slice files and send them in 1mb chunks which works great as long as file are <~500MB after that it seems that memory isn't freed anyone randomly and I can't figure out what I'm missing here.
Prepare chunks
var sliceCount = 0;
var sendCount = 0;
var fileID = generateUUID();
var maxChunks = 0;
var userNotified = false;
function parseFile(file)
{
var fileSize = file.size;
var chunkSize = 1024 * 1024;//64 * 1024; // bytes
var offset = 0;
var self = this; // we need a reference to the current object
var chunkReaderBlock = null;
var numberOfChunks = fileSize / chunkSize;
maxChunks = Math.ceil(numberOfChunks);
// gets called if chunk is read into memory
var readEventHandler = function (evt)
{
if (evt.target.error == null) {
offset += evt.target.result.byteLength;
sendChunkAsBinary(evt.target.result);
}
else
{
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
// of to the next chunk
chunkReaderBlock(offset, chunkSize, file);
}
chunkReaderBlock = function (_offset, length, _file)
{
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
sliceCount++;
console.log("Slicecount: " + sliceCount);
r.onload = readEventHandler;
r.readAsArrayBuffer(blob);
blob = null;
r = null;
}
// now let's start the read with the first block
chunkReaderBlock(offset, chunkSize, file);
}
Send Chunks
function sendChunkAsBinary(chunk)
{
var progressbar = $("#progressbar"), bar = progressbar.find('.uk-progress-bar');
// create XHR instance
var xhr = new XMLHttpRequest();
// send the file through POST
xhr.open("POST", 'upload.php', true);
var progressHandler = function (e)
{
// get percentage of how much of the current file has been sent
var position = e.loaded || e.position;
var total = e.total || e.totalSize;
var percentage = Math.round((sendCount / maxChunks) * 100);
// set bar width to keep track of progress
bar.css("width", percentage + "%").text(percentage + "%");
}
// let's track upload progress
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", progressHandler);
// state change observer - we need to know when and if the file was successfully uploaded
xhr.onreadystatechange = function ()
{
if (xhr.readyState == 4)
{
if (xhr.status == 200)
{
eventSource.removeEventListener("progress", progressHandler);
if (sendCount == maxChunks && !userNotified)
{
userNotified = true;
notifyUserSuccess("Datei hochgeladen!");
setTimeout(function ()
{
progressbar.addClass("uk-invisible");
bar.css("width", "0%").text("0%");
}, 250);
updateDocList();
}
}
else
{
notifyUser("Fehler beim hochladen der Datei!");
}
}
};
var blob;
if (typeof window.Blob == "function") {
blob = new Blob([chunk]);
} else {
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(chunk);
blob = bb.getBlob();
}
sendCount++;
var formData = new FormData();
formData.append("chunkNumber", sendCount);
formData.append("maxChunks", maxChunks);
formData.append("fileID", fileID);
formData.append("chunkpart", blob);
xhr.send(formData);
progressbar.removeClass("uk-invisible");
console.log("Sendcount: " + sendCount);
}
If I attach to the debugger within Visual Studio 2015 it take a bit but soon I get an OutOfMemoryException in the send function at exactly this line: blob = new Blob([chunk]);. It's all the time the same line where the exception occures.
As soon as the Exception happens I get POST [...]/upload.php net::ERR_FILE_NOT_FOUND however I still got the chunks in my php-file.
Here's a Timeline-graph of my error
What I dont understand, I'm not able to see increasing memory inside the Task-Manager (a few mb of course but not close to 16gb ram I got).
So can anyone tell me where this leak comes from? What am I missing?

Add a file size limit to my XMLHttpRequest file upload

I'm using Trix (https://github.com/basecamp/trix) as a text-editor in my app and allow for file uploads through there. I want to add a 5mb max file size to my uploads, but am a little lost at how to go about it. How would you implement it?
(function() {
var createStorageKey, host, uploadAttachment;
document.addEventListener("trix-attachment-add", function(event) {
var attachment;
attachment = event.attachment;
if (attachment.file) {
return uploadAttachment(attachment);
}
});
host = "https://my.cloudfront.net/";
uploadAttachment = function(attachment) {
var file, form, key, xhr;
file = attachment.file;
key = createStorageKey(file);
form = new FormData;
form.append("key", key);
form.append("Content-Type", file.type);
form.append("file", file);
xhr = new XMLHttpRequest;
xhr.open("POST", host, true);
xhr.upload.onprogress = function(event) {
var progress;
progress = event.loaded / event.total * 100;
return attachment.setUploadProgress(progress);
};
xhr.onload = function() {
var href, url;
if (xhr.status === 204) {
url = href = host + key;
return attachment.setAttributes({
url: url,
href: href
});
}
};
return xhr.send(form);
};
createStorageKey = function(file) {
var date, day, time;
date = new Date();
day = date.toISOString().slice(0, 10);
time = date.getTime();
return "tmp/" + day + "/" + time + "-" + file.name;
};
}).call(this);
You should do it client side AND server side. For the client side, just add one condition like so :
file = attachment.file;
if (file.size == 0) {
attachment.remove();
alert("The file you submitted looks empty.");
return;
} else if (file.size / (1024*2)) > 5) {
attachment.remove();
alert("Your file seems too big for uploading.");
return;
}
Also you can look at this Gist i wrote, showing a full implementation : https://gist.github.com/pmhoudry/a0dc6905872a41a316135d42a5537ddb
You should do it on server side and return an exception if the file size exceeds 5mb. You can also validate it on client-side through event.file, it has an "size" attribute, you can get the fize size from there.
if((event.file.size / (1024*2)) > 5) {
console.log('do your logic here');
}

Length of uploaded couchDB attachment always 0 Bytes

So...I'm new to all this stuff and I'm developing an app for android with AngularJS and Ionic Framework and try to upload an audiofile I have recorded with the cordova capture Plugin like this:
// gets called from scope
$scope.captureAudio = function() {
var options = { limit: 1, duration: 10 };
$cordovaCapture.captureAudio(options).then(function(audioData) {
uploadFile(documentID, audioData);
}, function(err) {
console.log('error code: ' + err);
});
};
var uploadFile = function (document, file) {
var baseUrl = 'urltomydatabase';
var name = encodeURIComponent'test.3gpp'),
type = file[0].type,
fileReader = new FileReader(),
putRequest = new XMLHttpRequest();
$http.get(baseUrl + encodeURIComponent(document))
.success(function (data) {
putRequest.open('PUT', baseUrl + encodeURIComponent(document) + '/' + name + '?rev=' + data._rev, true);
putRequest.setRequestHeader('Content-Type', type);
fileReader.readAsArrayBuffer(file[0]);
fileReader.onload = function (readerEvent) {
putRequest.send(readerEvent);
};
putRequest.onreadystatechange = function (response) {
if (putRequest.readyState == 4) {
//success - be happy
}
};
})
.error(function () {
// failure
});
};
How the file looks in the console.log:
Playing the recorded file on the device works nice.
But everytime I upload the recording and the upload has finished, the uploaded attachment inside the document has the length '0' in the couchDB.
How the created file looks in the database after the upload:
What am I doing wrong?
EDIT: I just found out, when I upload an image, passed from this function as blob, it works well:
function upload(imageURL) {
var image = new Image();
var onload = function () {
var canvas = document.createElement("canvas");
canvas.width = this.width;
canvas.height = this.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(this, 0, 0);
canvas.toBlob(function (blob) {
uploadFile(documentID, blob);
});
};
image.onload = onload;
image.src = imageURL;
}
So maybe the solution is creating a blob from the audiofile? But everytime I try it, my blob has the size of 0 bytes even before uploading it and I don't find somewhere a great explanation of how to convert a MediaFile object to blob...
It looks like your code does not send the content of your file as multipart attachment. To see what is really send to couchdb, capture the traffic with wireshark (https://www.wireshark.org/) or such.
This thread brought me to the solution, PouchDB purifies it. Now my upload function looks like this and can handle every file format
// e.g capture Audio
$scope.captureAudio = function () {
var options = {limit: 1, duration: 10};
$cordovaCapture.captureAudio(options).then(function (audioData) {
uploadFile(documentID, audioData, 'audio');
}, function (err) {
console.log('error code: ' + err);
});
};
var uploadFile = function (id, file, mediatype) {
var fileName = makeID();
if (mediatype == 'image') var name = encodeURIComponent(fileName + '.jpg');
if (mediatype == 'audio') var name = encodeURIComponent(fileName + '.3gpp');
if (mediatype == 'video') var name = encodeURIComponent(fileName + '.3gp');
db.get(id).then(function (doc) {
var path = file.fullPath;
window.resolveLocalFileSystemURL(path, function (fileEntry) {
return fileEntry.file(function (data) {
var reader = new FileReader();
reader.onloadend = function (e) {
var blob = b64toBlobAlt(e.target.result, file.type);
if (blob) {
db.putAttachment(id, name, doc._rev, blob, file.type).then(function () {
if (mediatype == 'video' || mediatype == 'image') getMedia();
if (mediatype == 'audio') $scope.audios.push(source);
});
}
};
return reader.readAsDataURL(data);
});
});
});
};
// creating the blob from the base64 string
function b64toBlobAlt(dataURI, contentType) {
var ab, byteString, i, ia;
byteString = atob(dataURI.split(',')[1]);
ab = new ArrayBuffer(byteString.length);
ia = new Uint8Array(ab);
i = 0;
while (i < byteString.length) {
ia[i] = byteString.charCodeAt(i);
i++;
}
return new Blob([ab], {
type: contentType
});
}

Safari iOS 6 - ajax request blob image

I have a function that mainly download images in a blob object, and it's working fine on chrome, FF, iOS 7+, but not on iOS 6...
downloadImage: function( url ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve, reject ) {
var xhr = new XMLHttpRequest();
xhr.open('GET', url);
xhr.onreadystatechange = function() {
if (this.readyState === this.DONE) {
that.chart.incrementProgress();
if (this.status === 200) {
var blob = this.response;
resolve( that.imageStore.writeImage( that, url, blob ) );
}
else {
resolve();
}
}
};
xhr.responseType = 'blob';
xhr.send();
});
}
In iOS6 in the console debugger, when I want to see my blob object, its seems to be a string with super weird character in it.. I'm not sure if it normal or my request doesn't work properly on this version of iOS.
After that I need to convert it into a base64, so I use FileReader for that like this :
this.writeImage = function( controller, url, blob ) {
var that = this;
return new Ember.RSVP.Promise(function( resolve ) {
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
var base64 = reader.result;
var object = { id: url, key: url, base64: base64 };
//controller.store.update('image', object).save();
controller.store.findQuery('image', { key: url })
.then(function( result ) {
var record = result.content[0];
record._data.base64 = base64;
record.save().then( resolve );
})
.catch(function() {
controller.store.createRecord('image', object).save().then( resolve );
});
};
});
};
Don't pay attention to the Promise thing and other arguments, but the blob is the same as the one in the downloadImage function.
And for a mysterious reason, the reader.loadend is never triggered because the state in reader is always at 0.
Should I do something particular for iOS6 or my code is wrong ?
[edit] : It's like the onloadend callback is not triggered ??
[edit2] : After further investigation, it seems that the response from the ajax request is a string instead of a blob... And my responseType is set as "" as well ?
I have found a workaround for now, I convert my binaryString into a blob like this :
function binaryStringToBlob( byteCharacters, contentType ) {
var sliceSize = 1024;
var bytesLength = byteCharacters.length;
var slicesCount = Math.ceil(bytesLength / sliceSize);
var byteArrays = new Array(slicesCount);
for (var sliceIndex = 0; sliceIndex < slicesCount; ++sliceIndex) {
var begin = sliceIndex * sliceSize;
var end = Math.min(begin + sliceSize, bytesLength);
var bytes = new Array(end - begin);
for (var offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
You just need to get the content-type and here you go !

Categories