I'm trying to upload multiple attachments.
First I'm getting attachments from user interface, then I'm converting them into JSON , then I need to make a server call.
In this I'm using FileReader.
//showing ajax loader
component.set("v.showLoadingSpinner", true);
//getting attached files
var files = component.find("fileId").get("v.files");
var details = {}; //JS Object need to send server
details.files = [];
for (var i = 0; i < files.length; i++)
{
(function(file) {
var name = file.name;
var reader = new FileReader();
reader.fName = files[i]['name'];
reader.fType = files[i]['type'];
reader.i = i;
reader.onload = function(e) {
var fileContents = reader.result;
var base64 = 'base64,';
var dataStart = fileContents.indexOf(base64) + base64.length;
fileContents = fileContents.substring(dataStart);
var startPosition = 0;
var endPosition = Math.min(fileContents.length, startPosition + 750000);
var getchunk = fileContents.substring(startPosition, endPosition);
var fDetails = {};
fDetails.fileName = reader.fName;
fDetails.base64Data = encodeURIComponent(getchunk);
fDetails.contentType = reader.fType;
details.files.push(fDetails);
}
reader.readAsDataURL(file);
})(files[i]);
// I want to make a server call here with data in "details" object.
console.log(details);
But I'm not getting data in above console log.
Please help me to achieve this.
You can use promises :
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
https://davidwalsh.name/promises
https://developers.google.com/web/fundamentals/primers/promises
Also jQuery provide $.when() function :
https://api.jquery.com/jquery.when/
And with promisejs you can do something like this :
function readJSON(filename){
return new Promise(function (fulfill, reject){
readFile(filename, 'utf8').done(function (res){
try {
fulfill(JSON.parse(res));
} catch (ex) {
reject(ex);
}
}, reject);
});
}
Related
I stored some jpeg files (exactly 350, same files same size. Total: 336.14 MB) as Blob in IndexedDB. It took around 1 second to complete the transaction. Then I read all the data from IndexedDB to an array and again sored to IndexedDB. But this time it takes around 15 Seconds. I observed this as a consistent behavior. Anything wrong here? I used performance.now() to get the time difference
Files: 350,
Size of each: 937 KB,
Browser: Chrome and Chromium Edge
//Open
var dbOpen = indexedDB.open(INDEXED_DB_NAME, INDEXED_DB_VERSION);
dbOpen.onupgradeneeded = function (e) {
console.log("onupgradeneeded");
var store = e.currentTarget.result.createObjectStore(
IMAGE_DATA_STORE, { autoIncrement: true });
};
dbOpen.onsuccess = function (e) {
image_data_db = dbOpen.result;
console.log("indexed DB opened");
};
//Initial Write
var inputFiles = document.getElementById('inputFiles');
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
var b = new Blob([file], { type: file.type });
fileblobs.push(b);
}
StoreIdb(fileblobs); // < First write
//StoreIdb()
t0 = performace.now();
var trx = image_data_db.transaction(IMAGE_DATA_STORE, 'readwrite');
var imagestore = trx.objectStore(IMAGE_DATA_STORE);
for (i = 0; i < fileblobs.length; i++) {
request = imagestore.add(fileblobs[i]);
request.onsuccess = function (e) {
console.log('added');
};
request.onerror = function (e) {
console.error("Request Error", this.error);
};
}
trx.onabort = function (e) {
console.error("Exception:", this.error, this.error.name);
};
trx.oncomplete = function (e) {
console.log('completed');
t1 = performance.now();
timetaken = t1 - t0;
}
//Read
var objectStore = image_data_db.transaction(IMAGE_DATA_STORE).objectStore(IMAGE_DATA_STORE);
objectStore.openCursor().onsuccess = function (e) {
var cursor = e.target.result;
if (cursor) {
blobArray.push(cursor.value.blob);
cursor.continue();
}
else
{
// completed
}
}
// blobArray will be used for second time << Second Write
I figured it out. First time it was storing file instance blob.
I ve changed file instance blob to Array buffer just to want to ensure data type similar in both cases. Now it is taking same time.
for (var i = 0; i < inputFiles.files.length; i++) {
let file = inputFiles.files[i];
file.arrayBuffer().then((arrayBuffer) => {
let blob = new Blob([new Uint8Array(arrayBuffer)], {type: file.type });
blobs.push(blob);
if ( blobs.length == inputFiles.files.length){
callback(blobs);
}
});
}
I have tried to pass the uploaded files value to apex method but facing some issues in javascript logic.
I have added alert after reader.onload = function(e) but didn't get any alert when I'm hitting this javascript function.
HTML CODE
function SponsorshipLetter() {
var files = document.getElementById('fileUpload');
var appId = getCookie('apex__app');
var fileName = 'Passport';
var reader = new FileReader();
reader.file = files[0];
reader.onload = function(e) {
alert('Hello 1' + document.getElementById('fileUpload').value);
var att = new sforce.SObject("Attachment");
att = fileName;
att.ContentType = this.file.type;
var binary = "";
var bytes = new Uint8Array(e.target.result);
var length = bytes.byteLength;
for (var i = 0; i < length; i++) {
binary += String.fromCharCode(bytes[i]);
}
att.Body = (new sforce.Base64Binary(binary)).toString();
alert('attt');
PAP_Finances.sponsorFileUpload(att.Name, att.Body, att.ContentType, appId,
function(result, event) {
return 'Success';
});
}
reader.readAsDataURL(e.target.files[0]);
}
I am trying to upload multiple images. So I read that I can generate a temporary url and send them with ajax.
The idea is push the url created with filereader into an array and the send with ajax but the url's are not pushed properly. When I see the result I got like an empty array:
But if I click the arrow I can see the url's inside
But them seems Inaccessible.
This is my code:
$('form').on('submit',function(e) {
e.preventDefault();
var filesToUpload = document.getElementById("myFile");
var files = filesToUpload.files;
var fd = new FormData();
var arr = [];
if (FileReader && files && files.length) {
for (i=0; i< files.length; i++){
(function(file) {
var name = file.name;
var fr = new FileReader();
fr.onload = function () {
arr.push(fr.result);
}
fr.readAsDataURL(file);
})(files[i]);
}
console.log(arr);
}
});
The final idea is convert to string JSON.stringify(arr) and then parse in php json_decode($_POST['arr']).
Of course this is not working because JSON.stringify(arr) gets empty.
Maybe the following simple solution works for you? I placed your console.log() and your ajax call into the fr.onload() method but fire it only, after your results array has been filled up with all values:
$('form').on('submit',function(e) {
e.preventDefault();
var filesToUpload = document.getElementById("myFile");
var files = filesToUpload.files;
var fd = new FormData();
var arr = [];
if (FileReader && files && files.length) {
for (var i=0; i< files.length; i++){
(function(file) {
var name = file.name;
var fr = new FileReader();
fr.onload = function () {
arr.push(fr.result);
if(arr.length==files.length) {
console.log(arr);
// place your ajax call here!
}
}
fr.readAsDataURL(file);
})(files[i]);
}
}
});
I have tried to read excel file to follow the following tutorial.
http://code.psjinx.com/xlsx.js/
But I have failed to read excel file for undefine situation in the following highlighted line.... I have tried it in IE11.
var reader = new FileReader();
reader.onload = function(e) {
var data = e.target.result;
var workbook = XLSX.read(data, {
type: 'binary'
});
obj.sheets = XLSXReader.utils.parseWorkbook(workbook, readCells, toJSON);
handler(obj);
}
**reader.readAsBinaryString(file)**;
The following answer describe, if you are going to load xlsx file from server. For uploading there is another code.
OPTION 1: This is a procedure, which works in Alasql library:
See files: 15utility.js and 84from.js for example
readBinaryFile(filename,true,function(a){
var workbook = X.read(data,{type:'binary'});
// do what you need with parsed xlsx
});
// Read Binary reading procedure
// path - path to the file
// asy - true - async / false - sync
var readBinaryFile = utils.loadBinaryFile = function(path, asy, success, error) {
if(typeof exports == 'object') {
// For Node.js
var fs = require('fs');
var data = fs.readFileSync(path);
var arr = new Array();
for(var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
success(arr.join(""));
} else {
// For browser
var xhr = new XMLHttpRequest();
xhr.open("GET", path, asy); // Async
xhr.responseType = "arraybuffer";
xhr.onload = function() {
var data = new Uint8Array(xhr.response);
var arr = new Array();
for(var i = 0; i != data.length; ++i) arr[i] = String.fromCharCode(data[i]);
success(arr.join(""));
};
xhr.send();
};
};
OPTION 2: you can use Alasql library itself, which, probably, can be easier option.
alasql('SELECT * FROM XLSX("myfile.xlsx",{headers:true,sheetid:"Sheet2",range:"A1:D100"})',
[],function(data) {
console.log(res);
});
See the example here (simple Excel reading demo) or here (d3.js demo from Excel).
I have following two methods that uploads an image to a remote servlet. For some reason the second parameter which is the arraybuffer is not written to the post request and I am trying to figure out why this is happening. Could some one assist me on this.
setupBinaryMessage = function(metadata) {
log(metadata);
var msglen = metadata.length;
var localcanvas =document.getElementById("image");
var fullBuffer;
var myArray;
if(localcanvas){
var localcontext = localcanvas.getContext('2d');
//FOLLOWING 2 LINE OF CODE CONVERTS THE IMAGEDATA TO BINARY
var imagedata = localcontext.getImageData(0, 0, localcanvas.width, localcanvas.height);
var canvaspixelarray = imagedata.data;
var canvaspixellen = canvaspixelarray.length;
var msghead= msglen+"";
var fbuflen = msglen +canvaspixellen+msghead.length;
myArray = new ArrayBuffer(fbuflen);
fullBuffer = new Uint8Array(myArray);
for (var i=0; i< msghead.length; i++) {
fullBuffer[i] = msghead.charCodeAt(i);
}
for (var i=msglen+msghead.length;i<fbuflen;i++) {
fullBuffer[i] = canvaspixelarray[count];
count++;
};
return myArray;
} else
return null;
};
upladlImageWithPost= function() {
var message =JSON.stringify(this.data);
var fullBuffer = this.setupBinaryMessage(message);
var formdata = {command : "post", imagedata : fullBuffer,};
alert(jQuery.isPlainObject( formdata ));
var imgPostRequest = $.post( "http://localhost:8080/RestClient/RestClientPOST",fullBuffer, function(response) {
response = response.trim();
console.log(response);
if(response == "SERVER_READY"){
alert(response);
try {
}catch (error) {
alert("Web Socket Error "+error.message);
}
} else {
alert("SERVER ERROR");
}
}.bind(this))
Alright After some help from a GURU I figured the issue. Apparently ARRAYBUFFER is obsolete and real solution is to post the unisinged buffer as it is. But even for that I need to set the AJAX response type to ARRAYBUFFER and then not use JQuery $.post but
use original pure XHTTPRequest.
Source