So, i have a script for capture a video from webcam and after create a gif with :
var base64data;
var img = document.createElement('img');
var reader = new window.FileReader();
reader.readAsDataURL(blob);
reader.onloadend = function() {
base64data = reader.result;
img.src = base64data;
}
So, with this script, i take the blob object from webcam, i encode this blob into base64 and i put the data into my img.src.
After that, i need to send this data into PHP with jquery ajax.
The base64data is very big, like 2 000 000 characters, so the ajax request is very long (20-50sec).
I just send the data like that :
var gif = $('.generated-img').attr('src').replace('data:image/gif;base64,', '');
gif = gif.match(/.{1,500000}/g);
$.ajax({
type:"POST",
url: "/webcam/",
data: {image_gif:gif, crop_x:x, crop_y:y, crop_w:w, crop_h:h, gif:true},
success: function () {
parent.$.fancybox.close();
}
});
I need to break the data into several chunks of 500 000 characters each.
But it's soooo long.. How can i do for optimize that ? I just need to retrieve this data into my php script for do some stuff...
I don't see exactly why you need to split it up; the idea of using AJAX - where A stands for asynchronous - is precisely that you can do big slow things in the background.
But rather than using a regular expression to do the split, I would just break it up using substr, e.g. something like (not tested, so may be offset errors - e.g. you may need a + 1 or - 1 here and there):
var chunkSize = 500000;
var chunkId = 0;
var isLastChunk = false;
for(var i = 0; i < gif.length; i += chunkSize)
{
var chunk;
if(gif.length > i + chunkSize) {
chunk = gif.substr(0, i, chunkSize);
} else {
chunk = gif.substr(0, i);
isLastChunk = true;
}
$.ajax({
type:"POST",
url: "/webcam/",
data: {
chunk: chunk, chunkId: chunkId, isLastChunk: isLastChunk,
crop_x:x, crop_y:y, crop_w:w, crop_h:h, gif:true},
success: function () {
// Chunk handled succesfully
}
});
}
In /webcam/ you could check chunkId, and isLastChunk.
Related
I am using javascript and the Flask framework
I would like to retrieve in Flask in python the bytes of one or more files that the user will have chosen in my HTML page. To do this, when the user has chosen the files and has clicked on the send button, it triggers a function that uses the FileReader API to retrieve the base64 content of the file(s) that the user has selected.
After that, I would like to send the base64 data to flask with ajax.
But here is my problem, when I get the base64 string in python and compare it with the one in javascript, I notice the number of characters is exactly the same, but some characters are different as you can see on the screenshots below
size of character strings
And when I decode the base64 string, I get different bytes :
bytes variables
This makes me think that the problem is with the use of ajax
Code Python :
#app.route('/test', methods=['POST'])
def test():
if request.method == "POST":
files = eval(request.form.get("files"))
python_data = file.get("data")
javascript_data = "" # Copy from chrome console
len_python_data = len(python_data)
len_javascript_data = len(javascript_data)
base64_bytes_javascript = base64.b64decode(javascript_data)
base64_bytes_python = base64.b64decode(python_data)
Code Javascript :
let array_files = [];
let files_input = document.getElementById("files_input");
let files = files_input.files;
let reader = new FileReader();
function readFile(index) {
if( index >= files.length ) {
let data = "files=" + JSON.stringify(array_files);
$.ajax({
url: '/test',
type: 'POST',
data: data,
success: function (msg) {
console.log(msg)
}
});
return;
}
let file = files[index];
reader.name = file.name;
reader.onload = function() {
let file_info = {};
// get file content
let bin = this.result;
console.log(bin)
let data = bin.split(";")[1].replace("base64,", "");
file_info.name = reader.name;
file_info.data = data;
array_files.push(file_info);
readFile(index + 1)
}
reader.readAsDataURL(file);
}
readFile(0);
The problem is solved
In the base64 character string, the "+" were replaced by spaces after sending it to ajax.
I have a file uploaded by a user, and I'd like to achieve the following.
Divide the file into smaller chunks about a megabyte.
Upload each chunk, and wait for it to finish before starting to upload the next chunk.
For every chunk get success or failure report.
Re-upload the failed chunks.
Get progress in percentages.
Here's some rough JavaScript. I'm literally lost. Got some code online and tried modifying it.
$.chunky = function(file, name){
var loaded = 0;
var step = 1048576//1024*1024;
var total = file.size;
var start = 0;
var reader = new FileReader();
reader.onload = function(e){
var d = {file:reader.result}
$.ajax({
url:"../record/c/index.php",
type:"POST",
data:d}).done(function(r){
$('.record_reply_g').html(r);
loaded += step;
$('.upload_rpogress').html((loaded/total) * 100);
if(loaded <= total){
blob = file.slice(loaded,loaded+step);
reader.readAsBinaryString(blob);
} else {
loaded = total;
}
})
};
var blob = file.slice(start,step);
reader.readAsBinaryString(blob);
}
How can I achieve the above. Please do explain what's happening if there's a viable solution.
You are not doing anything for failure of any chunk upload.
$.chunky = function(file, name){
var loaded = 0;
var step = 1048576//1024*1024; size of one chunk
var total = file.size; // total size of file
var start = 0; // starting position
var reader = new FileReader();
var blob = file.slice(start,step); //a single chunk in starting of step size
reader.readAsBinaryString(blob); // reading that chunk. when it read it, onload will be invoked
reader.onload = function(e){
var d = {file:reader.result}
$.ajax({
url:"../record/c/index.php",
type:"POST",
data:d // d is the chunk got by readAsBinaryString(...)
}).done(function(r){ // if 'd' is uploaded successfully then ->
$('.record_reply_g').html(r); //updating status in html view
loaded += step; //increasing loaded which is being used as start position for next chunk
$('.upload_rpogress').html((loaded/total) * 100);
if(loaded <= total){ // if file is not completely uploaded
blob = file.slice(loaded,loaded+step); // getting next chunk
reader.readAsBinaryString(blob); //reading it through file reader which will call onload again. So it will happen recursively until file is completely uploaded.
} else { // if file is uploaded completely
loaded = total; // just changed loaded which could be used to show status.
}
})
};
}
EDIT
To upload failed chunk again you can do following :
var totalFailures = 0;
reader.onload = function(e) {
....
}).done(function(r){
totalFailures = 0;
....
}).fail(function(r){ // if upload failed
if((totalFailure++) < 3) { // atleast try 3 times to upload file even on failure
reader.readAsBinaryString(blob);
} else { // if file upload is failed 4th time
// show message to user that file uploading process is failed
}
});
I've modified afzalex's answer to use readAsArrayBuffer(), and upload the chunk as a file.
var loaded = 0;
var reader = new FileReader();
var blob = file.slice(loaded, max_chunk_size);
reader.readAsArrayBuffer(blob);
reader.onload = function(e) {
var fd = new FormData();
fd.append('filedata', new File([reader.result], 'filechunk'));
fd.append('loaded', loaded);
$.ajax(url, {
type: "POST",
contentType: false,
data: fd,
processData: false
}).done(function(r) {
loaded += max_chunk_size;
if (loaded < file.size) {
blob = file.slice(loaded, loaded + max_chunk_size);
reader.readAsArrayBuffer(blob);
}
});
};
Stucked in this situation:
I upload several inputs fields and images using ajax. Images are encoded to base64 string with FileReader api.
Everything seems to work except sending encoded images.
My code (simplified):
var groups = {},
objects = {};
objects["name"] = {},
objects["group"] = {};
objects["image"] = {};
var objectCount = $(".layout-object").length;
$(".layout-object").each(function(i,v) {
var k = objectCount-i;
objects["name"][i] = $(v).val();
objects["group"][i] = $("#set-object-dropdown-"+k).val();
// get an image
var file = document.getElementById('image-'+k).files[0];
var reader = new FileReader();
reader.onload = function(event) {
objects["image"][i] = event.target.result; // get image in base64
};
reader.readAsDataURL(file);
});
$(".layout-group").each(function(i,v) {
groups[i] = $(v).val();
});
// prepare object for ajax request...
var data = {
name: $("#name").val(),
groups: groups,
objects: objects
};
// console log shows all data correctly in place = in objects there is a propery "image"...
console.log(data);
// sending ajax POST request
var posting = $.post( location.href, { data: data } );
posting.done(function(response){
console.log(response);
});
Issue: in ajax request form data property "image" is missing, but before posting object with data is correct.
Maybe is there a problem with readAsDataURL function and its onload event?
Something like this:
reader.onload = function(event) {
objects["image"][i] = event.target.result; // get image in base64
if (k == 1) //when the last object is iterated and it's image is set:
{
fireAjax();
}
};
function fireAjax(){
// sending ajax POST request
var posting = $.post( location.href, { data: data } );
posting.done(function(response){
console.log(response);
});
}
I have to create an image uploader for a future project (No flash, IE10+, FF7+ etc.) that does image resizing/converting/cropping on the clientside and not on the server.
So I made a javascript interface where the user can 'upload' their files and get resized/cropped in the browser directly, without ever contacting the server. The performance is OK, not that good, but it works.
The endresult is an array of canvas elements. The user can edit/crop the images after they got resized, so I keep them as canvas instead of converting them to jpeg. (Which would worsen the initial performance)
Now this works fine, but I don't know what's the best way to actually upload the finished canvas elements to the server now. (Using a asp.net 4 generic handler on the server)
I have tried creating a json object from all elements containing the dataurl of each canvas.
The problem is, when I got 10-40 pictures, the browser starts freezing when creating the dataurls, especially for images that are larger than 2 megabyte.
//images = array of UploadImage
for (var i = 0; i < images.length; i++) {
var data = document.getElementById('cv_' + i).toDataURL('image/jpg');
images[i].data = data.substr(data.indexOf('base64') + 7);
}
Also converting them to a json object (I am using json2.js) usually crashes my browser. (FF7)
My object
var UploadImage = function (pFileName, pName, pDescription) {
this.FileName = pFileName;
this.Name = pName;
this.Description = pDescription;
this.data = null;
}
The upload routine
//images = array of UploadImage
for (var i = 0; i < images.length; i++) {
var data = document.getElementById('cv_' + i).toDataURL('image/jpg');
images[i].data = data.substr(data.indexOf('base64') + 7);
}
var xhr, provider;
xhr = jQuery.ajaxSettings.xhr();
if (xhr.upload) {
xhr.upload.addEventListener('progress', function (e) {
console.log(Math.round((e.loaded * 100) / e.total) + '% done');
}, false);
}
provider = function () {
return xhr;
};
var ddd = JSON.stringify(images); //usually crash here
$.ajax({
type: 'POST',
url: 'upload.ashx',
xhr: provider,
dataType: 'json',
success: function (data) {
alert('ajax success: data = ' + data);
},
error: function () {
alert('ajax error');
},
data: ddd
});
What would be the best way to send the canvas elements to the server?
Should I send them all at once or one by one?
Uploading files one by one is better. Requires less memory and as soon as one file ready to upload, the upload can be started instead of waiting while all files will be prepared.
Use FormData to send files. Allows to upload files in binary format instead of base64 encoded.
var formData = new FormData;
If Firefox use canvas.mozGetAsFile('image.jpg') instead of canvas.toDataUrl(). Allow to avoid unnecessary conversion from base64 to binary.
var file = canvas.mozGetAsFile('image.jpg');
formData.append(file);
In Chrome use BlobBuilder to convert base64 into blob (see dataURItoBlob function
accepted
After playing around with a few things, I managed to figure this out myself.
First of all, this will convert a dataURI to a Blob:
//added for quick reference
function dataURItoBlob(dataURI) {
// convert base64/URLEncoded data component to raw binary data held in a string
var byteString;
if (dataURI.split(',')[0].indexOf('base64') >= 0)
byteString = atob(dataURI.split(',')[1]);
else
byteString = unescape(dataURI.split(',')[1]);
// separate out the mime component
var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0];
// write the bytes of the string to a typed array
var ia = new Uint8Array(byteString.length);
for (var i = 0; i < byteString.length; i++) {
ia[i] = byteString.charCodeAt(i);
}
return new Blob([ia], {type:mimeString});
}
From this question):
var blob = dataURItoBlob(canvas.toDataURL('image/jpg'));
formData.append(blob);
And then send the formData object. I'm not sure how to do it in jQuery, but with plain xhr object it like so:
var xhr = new XMLHttpRequest;
xhr.open('POST', 'upload.ashx', false);
xhr.send(formData);
On server you can get files from Files collection:
context.Request.Files[0].SaveAs(...);
Admittedly, there are similar questions lying around on Stack Overflow, but it seems none quite meet my requirements.
Here is what I'm looking to do:
Upload an entire form of data, one piece of which is a single file
Work with Codeigniter's file upload library
Up until here, all is well. The data gets in my database as I need it. But I'd also like to submit my form via an AJAX post:
Using the native HTML5 File API, not flash or an iframe solution
Preferably interfacing with the low-level .ajax() jQuery method
I think I could imagine how to do this by auto-uploading the file when the field's value changes using pure javascript, but I'd rather do it all in one fell swoop on for submit in jQuery. I'm thinking it's not possible to do via query strings as I need to pass the entire file object, but I'm a little lost on what to do at this point.
Can this be achieved?
It's not too hard. Firstly, take a look at FileReader Interface.
So, when the form is submitted, catch the submission process and
var file = document.getElementById('fileBox').files[0]; //Files[0] = 1st file
var reader = new FileReader();
reader.readAsText(file, 'UTF-8');
reader.onload = shipOff;
//reader.onloadstart = ...
//reader.onprogress = ... <-- Allows you to update a progress bar.
//reader.onabort = ...
//reader.onerror = ...
//reader.onloadend = ...
function shipOff(event) {
var result = event.target.result;
var fileName = document.getElementById('fileBox').files[0].name; //Should be 'picture.jpg'
$.post('/myscript.php', { data: result, name: fileName }, continueSubmission);
}
Then, on the server side (i.e. myscript.php):
$data = $_POST['data'];
$fileName = $_POST['name'];
$serverFile = time().$fileName;
$fp = fopen('/uploads/'.$serverFile,'w'); //Prepends timestamp to prevent overwriting
fwrite($fp, $data);
fclose($fp);
$returnData = array( "serverFile" => $serverFile );
echo json_encode($returnData);
Or something like it. I may be mistaken (and if I am, please, correct me), but this should store the file as something like 1287916771myPicture.jpg in /uploads/ on your server, and respond with a JSON variable (to a continueSubmission() function) containing the fileName on the server.
Check out fwrite() and jQuery.post().
On the above page it details how to use readAsBinaryString(), readAsDataUrl(), and readAsArrayBuffer() for your other needs (e.g. images, videos, etc).
With jQuery (and without FormData API) you can use something like this:
function readFile(file){
var loader = new FileReader();
var def = $.Deferred(), promise = def.promise();
//--- provide classic deferred interface
loader.onload = function (e) { def.resolve(e.target.result); };
loader.onprogress = loader.onloadstart = function (e) { def.notify(e); };
loader.onerror = loader.onabort = function (e) { def.reject(e); };
promise.abort = function () { return loader.abort.apply(loader, arguments); };
loader.readAsBinaryString(file);
return promise;
}
function upload(url, data){
var def = $.Deferred(), promise = def.promise();
var mul = buildMultipart(data);
var req = $.ajax({
url: url,
data: mul.data,
processData: false,
type: "post",
async: true,
contentType: "multipart/form-data; boundary="+mul.bound,
xhr: function() {
var xhr = jQuery.ajaxSettings.xhr();
if (xhr.upload) {
xhr.upload.addEventListener('progress', function(event) {
var percent = 0;
var position = event.loaded || event.position; /*event.position is deprecated*/
var total = event.total;
if (event.lengthComputable) {
percent = Math.ceil(position / total * 100);
def.notify(percent);
}
}, false);
}
return xhr;
}
});
req.done(function(){ def.resolve.apply(def, arguments); })
.fail(function(){ def.reject.apply(def, arguments); });
promise.abort = function(){ return req.abort.apply(req, arguments); }
return promise;
}
var buildMultipart = function(data){
var key, crunks = [], bound = false;
while (!bound) {
bound = $.md5 ? $.md5(new Date().valueOf()) : (new Date().valueOf());
for (key in data) if (~data[key].indexOf(bound)) { bound = false; continue; }
}
for (var key = 0, l = data.length; key < l; key++){
if (typeof(data[key].value) !== "string") {
crunks.push("--"+bound+"\r\n"+
"Content-Disposition: form-data; name=\""+data[key].name+"\"; filename=\""+data[key].value[1]+"\"\r\n"+
"Content-Type: application/octet-stream\r\n"+
"Content-Transfer-Encoding: binary\r\n\r\n"+
data[key].value[0]);
}else{
crunks.push("--"+bound+"\r\n"+
"Content-Disposition: form-data; name=\""+data[key].name+"\"\r\n\r\n"+
data[key].value);
}
}
return {
bound: bound,
data: crunks.join("\r\n")+"\r\n--"+bound+"--"
};
};
//----------
//---------- On submit form:
var form = $("form");
var $file = form.find("#file");
readFile($file[0].files[0]).done(function(fileData){
var formData = form.find(":input:not('#file')").serializeArray();
formData.file = [fileData, $file[0].files[0].name];
upload(form.attr("action"), formData).done(function(){ alert("successfully uploaded!"); });
});
With FormData API you just have to add all fields of your form to FormData object and send it via $.ajax({ url: url, data: formData, processData: false, contentType: false, type:"POST"})