Multifile JavaScript sending wrong index order (also repeating index) - javascript

I'm trying to send multiple files to dropbox using JavaScript and Ruby/Rails. First I have to decode an image with qr code and send it to my server in Ruby, then send it to dropbox. The problem is, when I'm trying to decode a file and send it, it is not going in the right order. I'm aware that this is probably because of the onload function of the reader. But how can I solve this and why this is the way it is?
PS: My qr code have to be a number (a code from the db), that's why I'm using 'isnum'
ps2: I tried 10 files and in my console.log that has 'j' have printed in this orded: 1, 7, 7, 4, 6, 6, 6, 6, 3, and 0. How can I make it to not repeat these indexes?
function setupReader(file, j){
var reader = new FileReader();
reader.onload = function (e) {
qrcode.decode(e.target.result);
qrcode.callback = function(a){
console.log("callback", j);
var isnum = /^\d+$/.test(a);
var form_data = new FormData();
count_send++;
if (isnum){
form_data.append("code", a);
form_data.append("file", file);
// console.log(form_data);
var request = new XMLHttpRequest();
request.open('POST', '/docs/baixaponto', /* async = */ false);
request.setRequestHeader('X-CSRF-Token', $('meta[name="csrf-token"]').attr('content'));
request.send(form_data);
if (returnedStatus != 200){
error_files_dropbox_baixa_filesimgs.push(e.target.result);
e.target.result = "";
error_files_dropbox_baixa_arquivos.push( returnedStringFinal );
error_files_dropbox_baixa.push( file );
error_files_dropbox_baixacount++;
}
else{
success_files_filesimgs.push(e.target.result);
e.target.result = "";
success_files_arquivos.push( returnedStringFinal );
success_files.push( file );
success_filescount++;
}
}
else {
error_files_decode_filesimgs.push(imgcoded64);
e.target.result = "";
error_files_decode.push( file );
error_files_decodecount++;
}
if ( count_send == count ){
//function that opens a modal with file results
setTimeout(showdetalhedfiles, 2000);
}
};
}
reader.readAsDataURL(file);
}
The other function is:
function readqrcode(){
f = document.getElementById('myfiles');
count = f.files.length;
if (f.files && f.files[0]) {
for (var i = 0; i < count; i++) {
setupReader(f.files[i], i);
}
}
else{
alert("Nenhum arquivo selecionado");
}
}

Related

Merge Mutilple pdf blobs into one

Okay so I'm converting html into pdfs, the pdf returned from my backend I convert to a new Blob with type: 'application/pdf', this all works fine. I now want to merge multiple Blobs into one. I'm using the following function to do so.
function ConcatenateBlobs(blobs, type, callback) {
var buffers = [];
var index = 0;
function readAsArrayBuffer() {
if (!blobs[index]) {
return concatenateBuffers();
}
var reader = new FileReader();
reader.onload = function(event) {
buffers.push(event.target.result);
index++;
readAsArrayBuffer();
};
reader.readAsArrayBuffer(blobs[index]);
}
readAsArrayBuffer();
function concatenateBuffers() {
var byteLength = 0;
buffers.forEach(function(buffer) {
byteLength += buffer.byteLength;
});
var tmp = new Uint16Array(byteLength);
var lastOffset = 0;
buffers.forEach(function(buffer) {
// BYTES_PER_ELEMENT == 2 for Uint16Array
var reusableByteLength = buffer.byteLength;
if (reusableByteLength % 2 != 0) {
buffer = buffer.slice(0, reusableByteLength - 1)
}
tmp.set(new Uint16Array(buffer), lastOffset);
lastOffset += reusableByteLength;
});
var blob = new Blob([tmp.buffer], {
type: type
});
callback(blob);
}
}
But for some reason I am only getting the last pdf in the array to show as the result.

Javascript file reader does not get stored in array

I am trying to upload multiple images. So I read that I can generate a temporary url and send them with ajax.
The idea is push the url created with filereader into an array and the send with ajax but the url's are not pushed properly. When I see the result I got like an empty array:
But if I click the arrow I can see the url's inside
But them seems Inaccessible.
This is my code:
$('form').on('submit',function(e) {
e.preventDefault();
var filesToUpload = document.getElementById("myFile");
var files = filesToUpload.files;
var fd = new FormData();
var arr = [];
if (FileReader && files && files.length) {
for (i=0; i< files.length; i++){
(function(file) {
var name = file.name;
var fr = new FileReader();
fr.onload = function () {
arr.push(fr.result);
}
fr.readAsDataURL(file);
})(files[i]);
}
console.log(arr);
}
});
The final idea is convert to string JSON.stringify(arr) and then parse in php json_decode($_POST['arr']).
Of course this is not working because JSON.stringify(arr) gets empty.
Maybe the following simple solution works for you? I placed your console.log() and your ajax call into the fr.onload() method but fire it only, after your results array has been filled up with all values:
$('form').on('submit',function(e) {
e.preventDefault();
var filesToUpload = document.getElementById("myFile");
var files = filesToUpload.files;
var fd = new FormData();
var arr = [];
if (FileReader && files && files.length) {
for (var i=0; i< files.length; i++){
(function(file) {
var name = file.name;
var fr = new FileReader();
fr.onload = function () {
arr.push(fr.result);
if(arr.length==files.length) {
console.log(arr);
// place your ajax call here!
}
}
fr.readAsDataURL(file);
})(files[i]);
}
}
});

Get Byte Position during Upload Loop

I am working on a function that will write data to a remote server in chunks using a 3rd party API. Through some help on Stack Overflow I was able to accomplish this, where it is now working as expected. The problem is that I can only get a single 16kb chunk to write as I will need to advance the pos of where the next bytes are written to.
The initial write starts at 0 easily enough. Due to my unfamiliarity with this though, I am unsure if the next pos should just be 16 or what. If it helps, the API call writeFileChunk() takes 3 parameters, filepath (str), pos (int64), and data (base64 encoded string).
reader.onload = function(evt)
{
// Get SERVER_ID from URL
var server_id = getUrlParameter('id');
$("#upload_status").text('Uploading File...');
$("#upload_progress").progressbar('value', 0);
var chunkSize = 16<<10;
var buffer = evt.target.result;
var fileSize = buffer.byteLength;
var segments = Math.ceil(fileSize / chunkSize); // How many segments do we need to divide into for upload
var count = 0;
// start the file upload
(function upload()
{
var segSize = Math.min(chunkSize, fileSize - count * chunkSize);
if (segSize > 0)
{
$("#upload_progress").progressbar('value', (count / segments));
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize); // get a chunk
var chunkEncoded = btoa(String.fromCharCode.apply(null, chunk));
// Send Chunk data to server
$.ajax({
type: "POST",
url: "filemanagerHandler.php",
data: { 'action': 'writeFileChunk', 'server_id': server_id, 'filepath': filepath, 'pos': 0, 'chunk': chunkEncoded },
dataType: 'json',
success: function(data)
{
console.log(data);
setTimeout(upload, 100);
},
error: function(XMLHttpRequest, textStatus, errorThrown)
{
alert("Status: " + textStatus); alert("Error: " + errorThrown); alert("Message: " + XMLHttpRequest.responseText);
}
});
}
else
{
$("#upload_status").text('Finished!');
$("#upload_progress").progressbar('value', 100);
getDirectoryListing(curDirectory);
}
})()
};
The current position for the file on client side would be represented by this line, or more specifically the second argument at the pre-incremental step:
var chunk = new Uint8Array(buffer, count++ * chunkSize, segSize);
though, in this case it advances (count++) before you can reuse it so if you need the actual position (below as pos) you can extract it by simply rewriting the line into:
var pos = count++ * chunkSize; // here chunkSize = 16kb
var chunk = new Uint8Array(buffer, pos, segSize);
Here each position update will increment 16kb as that is the chunk-size. For progress then it is calculated pos / fileSize * 100. This of course assuming using the unencoded buffer size.
The only special case is the last chunk, but when there are no more chunks left to read the position should be equal to the file length (fileSize) so it should be pretty straight-forward.
When the ajax call return the server should have the same position unless something went wrong (connection, write access change, disk full etc.).
You can use Filereader API to read the chunks and send it to your remote server.
HTML
<input type="file" id="files" name="file" /> Read bytes:
<span class="readBytesButtons">
<button>Read entire file in chuncks</button>
</span>
Javascript
// Post data to your server.
function postChunk(obj) {
var url = "https://your.remote.server";
return new Promise(function(resolve, reject) {
var xhr = new XMLHttpRequest();
xhr.open('post', url, true);
xhr.responseType = 'json';
xhr.onload = function() {
var status = xhr.status;
if (status == 200) {
resolve(xhr.response);
} else {
reject(status);
}
};
var params = "";
// check that obj has the proper keys and create the url parameters
if (obj.hasOwnProperty(action) && obj.hasOwnProperty(server_id) && obj.hasOwnProperty(filepath) && obj.hasOwnProperty(pos) && obj.hasOwnProperty(chunk)) {
params += "action="+obj[action]+"&server_id="+obj[server_id]+"&filepath="+obj[filepath]+"&pos="+obj[pos]+"&chunk="+obj[chunk];
}
if(params.length>0) {
xhr.send(params);
} else {
alert('Error');
}
});
}
// add chunk to "obj" object and post it to server
function addChunk(reader,obj,divID) {
reader.onloadend = function(evt) {
if (evt.target.readyState == FileReader.DONE) { // DONE == 2
obj.chunk = evt.target.result;
console.log(obj);
document.getElementById(divID).textContent +=
['Sending bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),
'\n'].join('');
// post data to server
postChunk(obj).then(function(data) {
if(data!=="" && data!==null && typeof data!=="undefined") {
// chunk was sent successfully
document.getElementById(divID).textContent +=
['Sent bytes: ', obj.pos*16000, ' - ', ((obj.pos*16000)+(obj.pos+1)*obj.chunk.length),'\n'].join('');
} else {
alert('Error! Empty response');
}
}, function(status) {
alert('Resolve Error');
});
}
};
}
// read and send Chunk
function readChunk() {
var files = document.getElementById('files').files;
if (!files.length) {
alert('Please select a file!');
return;
}
var file = files[0];
var size = parseInt(file.size);
var chunkSize = 16000;
var chunks = Math.ceil(size/chunkSize);
var start,stop = 0;
var blob = [];
for(i=0;i<chunks;i++) {
start = i*chunkSize;
stop = (i+1)*chunkSize-1;
if(i==(chunks-1)) {
stop = size;
}
var reader = new FileReader();
blob = file.slice(start, stop);
reader.readAsBinaryString(blob);
var obj = {action: 'writeFileChunk', server_id: 'sid', filepath: 'path', pos: i, chunk: ""};
var div = document.createElement('div');
div.id = "bytes"+i;
document.body.appendChild(div);
addChunk(reader,obj,div.id);
}
}
// Check for the various File API support.
if (window.File && window.FileReader && window.FileList && window.Blob) {
console.log(' Great success! All the File APIs are supported.');
} else {
alert('The File APIs are not fully supported in this browser.');
}
document.querySelector('.readBytesButtons').addEventListener('click', function(evt) {
if (evt.target.tagName.toLowerCase() == 'button') {
readChunk();
}
}, false);
You can check this example in this Fiddle

Multiple XMLHttpRequest.send or eventlisteners memory leak?

I'm currently implementing an upload for files. Because I've to handle huge files here and there I've started to slice files and send them in 1mb chunks which works great as long as file are <~500MB after that it seems that memory isn't freed anyone randomly and I can't figure out what I'm missing here.
Prepare chunks
var sliceCount = 0;
var sendCount = 0;
var fileID = generateUUID();
var maxChunks = 0;
var userNotified = false;
function parseFile(file)
{
var fileSize = file.size;
var chunkSize = 1024 * 1024;//64 * 1024; // bytes
var offset = 0;
var self = this; // we need a reference to the current object
var chunkReaderBlock = null;
var numberOfChunks = fileSize / chunkSize;
maxChunks = Math.ceil(numberOfChunks);
// gets called if chunk is read into memory
var readEventHandler = function (evt)
{
if (evt.target.error == null) {
offset += evt.target.result.byteLength;
sendChunkAsBinary(evt.target.result);
}
else
{
console.log("Read error: " + evt.target.error);
return;
}
if (offset >= fileSize) {
console.log("Done reading file");
return;
}
// of to the next chunk
chunkReaderBlock(offset, chunkSize, file);
}
chunkReaderBlock = function (_offset, length, _file)
{
var r = new FileReader();
var blob = _file.slice(_offset, length + _offset);
sliceCount++;
console.log("Slicecount: " + sliceCount);
r.onload = readEventHandler;
r.readAsArrayBuffer(blob);
blob = null;
r = null;
}
// now let's start the read with the first block
chunkReaderBlock(offset, chunkSize, file);
}
Send Chunks
function sendChunkAsBinary(chunk)
{
var progressbar = $("#progressbar"), bar = progressbar.find('.uk-progress-bar');
// create XHR instance
var xhr = new XMLHttpRequest();
// send the file through POST
xhr.open("POST", 'upload.php', true);
var progressHandler = function (e)
{
// get percentage of how much of the current file has been sent
var position = e.loaded || e.position;
var total = e.total || e.totalSize;
var percentage = Math.round((sendCount / maxChunks) * 100);
// set bar width to keep track of progress
bar.css("width", percentage + "%").text(percentage + "%");
}
// let's track upload progress
var eventSource = xhr.upload || xhr;
eventSource.addEventListener("progress", progressHandler);
// state change observer - we need to know when and if the file was successfully uploaded
xhr.onreadystatechange = function ()
{
if (xhr.readyState == 4)
{
if (xhr.status == 200)
{
eventSource.removeEventListener("progress", progressHandler);
if (sendCount == maxChunks && !userNotified)
{
userNotified = true;
notifyUserSuccess("Datei hochgeladen!");
setTimeout(function ()
{
progressbar.addClass("uk-invisible");
bar.css("width", "0%").text("0%");
}, 250);
updateDocList();
}
}
else
{
notifyUser("Fehler beim hochladen der Datei!");
}
}
};
var blob;
if (typeof window.Blob == "function") {
blob = new Blob([chunk]);
} else {
var bb = new (window.MozBlobBuilder || window.WebKitBlobBuilder || window.BlobBuilder)();
bb.append(chunk);
blob = bb.getBlob();
}
sendCount++;
var formData = new FormData();
formData.append("chunkNumber", sendCount);
formData.append("maxChunks", maxChunks);
formData.append("fileID", fileID);
formData.append("chunkpart", blob);
xhr.send(formData);
progressbar.removeClass("uk-invisible");
console.log("Sendcount: " + sendCount);
}
If I attach to the debugger within Visual Studio 2015 it take a bit but soon I get an OutOfMemoryException in the send function at exactly this line: blob = new Blob([chunk]);. It's all the time the same line where the exception occures.
As soon as the Exception happens I get POST [...]/upload.php net::ERR_FILE_NOT_FOUND however I still got the chunks in my php-file.
Here's a Timeline-graph of my error
What I dont understand, I'm not able to see increasing memory inside the Task-Manager (a few mb of course but not close to 16gb ram I got).
So can anyone tell me where this leak comes from? What am I missing?

read a text file and parse using comma separator and store it to array of objects?

i am trying to read a file and save the comma separated values to objects in javascript. i tryed like this.. ?
here i an reading text file to variable out, splitting it and save it objects..
is this is correct way toassin valus to objects
<script type="text/javascript">
var patternDetails = [
{
patternIndex :0,
patternLineCount : 0,
patternPointCount : 0,
firstPtIndx:[
{
i:0
}
],
patternPointName:[
{
i=0
}
],
patternPoint: [
{
X:0,
Y:0,
Z:0
}
],
secondPtIndx:[
{
i:0
}
],
}
]
patternDetails=new Array();// Is this object creation correct ?
function checkFileAPI() {
if (window.File && window.FileReader && window.FileList && window.Blob) {
reader = new FileReader();
return true;
} else {
alert('The File APIs are not fully supported by your browser. Fallback required.');
return false;
}
}
/**
* read text input
*/
function readText(filePath) {
var output = ""; //placeholder for text output
if(filePath.files && filePath.files[0]) {
reader.onload = function (e) {
output = e.target.result;
displayContents(output.split("\n"));
};//end onload()
reader.readAsText(filePath.files[0]);
}//end if html5 filelist support
/*else if(ActiveXObject && filePath) { //fallback to IE 6-8 support via ActiveX
try {
reader = new ActiveXObject("Scripting.FileSystemObject");
var file = reader.OpenTextFile(filePath, 1); //ActiveX File Object
//output = file.ReadAll(); //text contents of file
file.Close(); //close file "input stream"
displayContents(output);
} catch (e) {
if (e.number == -2146827859) {
alert('Unable to access local files due to browser security settings. ' +
'To overcome this, go to Tools->Internet Options->Security->Custom Level. ' +
'Find the setting for "Initialize and script ActiveX controls not marked as safe" and change it to "Enable" or "Prompt"');
}
}
}
else { //this is where you could fallback to Java Applet, Flash or similar
return false;
}*/
return true;
}
/**
* display content using a basic HTML replacement
*/
function displayContents(txt)
{
var ver =txt[4].split(",");
var i=0;
var z=0;
patternDetails[i].patternIndex=new patternIndex(ver[++z]);// can i do this . is this is correct wary?
patternDetails[i].patternLineCount=new patternLineCount(ver[++z]);
patternDetails[i].patternPointCount=new patternPointCount(ver[++z]);
for(var b=0;b<patternDetails[i].patternLineCount;b++)
{
patternDetails[i].firstPtIndx[b]=new firstPtIndx[b](ver[++z]);
patternDetails[i].patternPointName[patternDetails[i].firstPtIndx[b]]= new patternPointName(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].firstPtIndx[b]].X= new patternPoint(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].firstPtIndx[b]].Y= new patternPoint(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].firstPtIndx[b]].Z= new patternPoint(ver[++z]);
patternDetails[i].secondPtIndx[b]=new firstPtIndx[b](ver[++z]);
patternDetails[i].patternPointName[patternDetails[i].secondPtIndx[b]]= new patternPointName(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].secondPtIndx[b]].X= new patternPoint(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].secondPtIndx[b]].Y= new patternPoint(ver[++z]);
patternDetails[i].patternPoint[patternDetails[i].secondPtIndx[b]].Z= new patternPoint(ver[++z]);
}
var p= patternDetails[0].patternPoint[patternDetails[0].firstPtIndx[0]].X;
var q= patternDetails[0].patternPoint[patternDetails[0].firstPtIndx[0]].Y;
var r= patternDetails[0].patternPoint[patternDetails[0].firstPtIndx[0]].Z;
DRAW_SP(p,q,r);
var el = document.getElementById('info');
el.innerHTML = ver; //display output in DOM
}
the tlr file thet i am readong is
//TLR:Format:Vishama Creations:v1.0
//Pattern_no,Pattern_NumLines,FrstPointIndx,PointName,aX,aY,aZ,bX,bY,bZ,SecondPointIndex,PointName,aX,aY,aZ,bX,bY,bZ
2
L
,0,23,24,0,f:p0,0.008,0.235,0.051,1,f:p1,0.008,0.147,0.085,10,f:p10,0.060,0.053,0.109,11,f:p11,0.108,0.050,0.094,12,f:p12,-0.045,0.053,0.111,13,f:p13,-0.091,0.049,0.090,14,f:p14,0.063,-0.049,0.095,15,f:p15,0.081,-0.050,0.082,15,f:p15,0.081,-0.050,0.082,16,f:p16,0.081,-0.050,0.082,17,f:p17,-0.048,-0.049,0.096,18,f:p18,-0.063,-0.050,0.084,18,f:p18,-0.063,-0.050,0.084,19,f:p19,-0.063,-0.050,0.084,1,f:p1,0.008,0.147,0.085,2,f:p2,0.008,0.051,0.102,1,f:p1,0.008,0.147,0.085,4,f:p4,-0.049,0.156,0.108,1,f:p1,0.008,0.147,0.085,7,f:p7,0.065,0.156,0.107,2,f:p2,0.008,0.051,0.102,10,f:p10,0.060,0.053,0.109,2,f:p2,0.008,0.051,0.102,12,f:p12,-0.045,0.053,0.111,2,f:p2,0.008,0.051,0.102,3,f:p3,0.008,-0.049,0.106,3,f:p3,0.008,-0.049,0.106,14,f:p14,0.063,-0.049,0.095,3,f:p3,0.008,-0.049,0.106,17,f:p17,-0.048,-0.049,0.096,4,f:p4,-0.049,0.156,0.108,21,f:p21,-0.055,0.181,0.084,4,f:p4,-0.049,0.156,0.108,5,f:p5,-0.105,0.151,0.095,5,f:p5,-0.105,0.151,0.095,20,f:p20,-0.103,0.165,0.082,5,f:p5,-0.105,0.151,0.095,6,f:p6,-0.118,0.146,0.082,7,f:p7,0.065,0.156,0.107,22,f:p22,0.071,0.179,0.083,7,f:p7,0.065,0.156,0.107,8,f:p8,0.120,0.151,0.094,8,f:p8,0.120,0.151,0.094,23,f:p23,0.119,0.163,0.083,8,f:p8,0.120,0.151,0.094,9,f:p9,0.133,0.147,0.084

Categories