How can I make .push work in FileReader.onload()? - javascript

I have the following code in which I'm trying to read a file using FileReader and put its contents in an array. Only after all the data has been pushed do I want to continue. Here's what I have currently:
const confirm = () => {
var reader = new FileReader();
let images = [];
reader.onload = function(e) {
images.push(e.target.result);
};
reader.readAsDataURL(formValues.images[0].file);
console.log('images base 64');
console.log(images); // this prints the empty array.
};
I want to continue on only after images have been updated with the file contents. How can I do that?
-- edit --
I want to in fact add multiple files to the array, so I tried the following.
var reader = new FileReader();
let images = [];
reader.onload = function(e) {
images.push(e.target.result);
console.log('images base 64');
console.log(images);
};
for (let i = 0; i < formValues.images.length; i++) {
reader.readAsDataURL(formValues.images[i].file);
}
But this gives the error "InvalidStateError: The object is in an invalid state."

You are trying to the result before it load. Move the console.log(images) inside onload function.
const confirm = () => {
var reader = new FileReader();
let images = [];
reader.onload = function(e) {
images.push(e.target.result);
console.log(images);
};
reader.readAsDataURL(formValues.images[0].file);
console.log('images base 64');
};

it works for me try like this , surly
this.file64=[];
reader.onloadend =(e) =>{
var result = reader.result;
console.log(result)
this.file64.push(result)
};

Related

In node.js, how to write binary data from fetch into a file?

Let's say I want to save an image to disk.
var path = require('path');
var webdriver = require("selenium-webdriver");
var chrome = require('selenium-webdriver/chrome');
var By = webdriver.By;
var until = webdriver.until
const fs = require("fs");
var o = new chrome.Options();
o.addArguments("--disable-web-security");
var browser = new webdriver.Builder().forBrowser('chrome').setChromeOptions(o).build();
fetch("https://upload.wikimedia.org/wikipedia/commons/9/9d/Blue_Flower.png");
This doesn't work, fetch is not defined.
I then tried adding fetch into browser.executeScript
var promise1 = browser.executeScript(`return fetch("https://upload.wikimedia.org/wikipedia/commons/9/9d/Blue_Flower.png");`)
promise1.then((x) => x.arrayBuffer()).then((buffer) => fs.writeFile("flower.png", buffer, () => console.log("done")))
This doesn't work. It turns out that when executeScript returns an object, all functions in the object are replaced with {}.
I then tried returning the buffer from executeScript.
var promise1 = browser.executeScript(`return fetch("https://upload.wikimedia.org/wikipedia/commons/9/9d/Blue_Flower.png").then((x) => x.arrayBuffer());`)
promise1.then((buffer) => fs.writeFile("flower.png", buffer, () => console.log("done")))
Still doesn't work. It turns out that buffer becomes {};
Is there some way I can access the arrayBuffer without it turning into {} after it comes out of executeScript?
const res = await fetch(url);
const fileWriteStream = fs.createWriteStream('/directory/path.ext');
res.body.pipe(fileWriteStream);
I managed to do it by using base64. I used code from this answer :
var promise1 = browser.executeScript(`return fetch("https://upload.wikimedia.org/wikipedia/commons/9/9d/Blue_Flower.png").then((x) => x.arrayBuffer()).then(function(buffer){
var blob = new Blob([buffer])
var reader = new FileReader();
reader.readAsDataURL(blob);
return reader
}).then(function(reader){
return new Promise(function(x, y){
this.addEventListener("loadend", function(){
x(this);
}.bind(this))
}.bind(reader));
}).then(function(reader){
return reader.result.slice(reader.result.indexOf(",")+1);
})
`)
promise1.then(function(b64data){
let buff = Buffer.from(b64data, 'base64');
fs.writeFileSync("flower.png", buff);
console.log("done");
})

Store contents of fileReader in a variable

trying to do something that I feel like should be releatively straight forward.
I just want to read a text files content and store it in a variable.
Here is my code:
readToCSV(file) {
// console.log(file);
let cat = "";
var reader = new FileReader();
reader.onloadend = function (event) {
if (event.target.readyState == FileReader.DONE) {
var data = event.target.result;
}
console.log("data:", data)
cat = data;
};
reader.readAsText(file)
console.log("cat:",cat);
};
Ive tried just about everything, and keep getting undefined outside the function. Do i just have to nest any further processing inside the onloadend function. That seems silly.
Ok, so I found a work around and wanted to post it for anyone else who is trying to just get fileContents into a veriable (seriously, why should that be so difficult)
anyways, I ended up wrapping the reader in a promise and storing that.
Anyways, new code as follows:
async handleFileUpload(e){
console.log("e",e)
await this.setState({file:e[0]})
console.log("state: ",this.state.file)
const fileContents = await this.readToText(this.state.file)
console.log("fc:",fileContents);
//await this.readToCSV(fileContents)
}
async readToText(file) {
const temporaryFileReader = new FileReader();
return new Promise((resolve, reject) => {
temporaryFileReader.onerror = () => {
temporaryFileReader.abort();
reject(new DOMException("Problem parsing input file."));
};
temporaryFileReader.onload = () => {
resolve(temporaryFileReader.result);
};
temporaryFileReader.readAsText(file);
});
};
I just want to read a text files content and store it in a variable.
The way you're currently doing it, cat will be an empty string because of the async nature of the FileReader.
I would do it with a callback.
var cat = '';
const reader = new FileReader();
reader.readAsText(document.querySelector('input[type="file"]').files[0]);
reader.onload = () => storeResults(reader.result);
function storeResults(result) {
cat = result;
}
This way you get the job done and don't have to nest further processing directly within onloadend.
Hope that helps!

Ionic read file from system and put it in canvas

Im trying to read file stored in file system (android) and then put the content into canvas using pdfjs library so I could render it in view. Idid the same with system pdf viewer and it worked, but I need to do later some painting on it and manupulation so it can't be dispsplayed in viewer, it must be within my app.
Rendering of my pdf works fine, since I have tested it with live reload mode.
Below is reading code
readFile( pathToFile ){
this.file.resolveLocalFilesystemUrl( pathToFile).then((fileEntry: any) => {
fileEntry.file( (file) => {
var reader = new FileReader();
reader.onloadend = (event) => {
const x = event.target as any;
// let sliced = x._result.slice(x._result.indexOf(',') + 1, x._result.length);
console.log('item', x)
console.log('item', x.result)
console.log('buffer',new Uint8Array(x.result))
// console.log('64', new Uint8Array(x._result));
// const bytes = this.base64ToUint8Array(sliced)
this.renderPDF(x.result, this.container.nativeElement, 1)
};
reader.readAsArrayBuffer(file);
});
});
}
here are the logs:
as you can see pdf1 ist the last log so the promise from getDocument gets not resolved:
renderPDF(url, canvasContainer, scale) {
console.log('pdf1')
this.pdfCreator.disableWorker = true;
this.pdfCreator
.getDocument(url)
.then((doc) => {
this.doc = doc;
console.log('pdf2')
this.renderPages(canvasContainer, scale);
})
.catch(err => console.log(err))
}
I have spent two days on it without sccess...
I think there is something went wrong with url,
Could you please try this
readFile( pathToFile ){
this.file.resolveLocalFilesystemUrl( pathToFile).then((fileEntry: any) => {
fileEntry.file( (file) => {
var reader = new FileReader();
reader.onloadend = (event) => {
const x = event.target as any;
// let sliced = x._result.slice(x._result.indexOf(',') + 1, x._result.length);
console.log('item', x)
console.log('item', x.result)
console.log('buffer',new Uint8Array(x.result))
// console.log('64', new Uint8Array(x._result));
// const bytes = this.base64ToUint8Array(sliced)
var blob = new Blob([new Uint8Array(x.result)], {type: 'application/pdf'});
var url = URL.createObjectURL(blob);
this.renderPDF(url, this.container.nativeElement, 1)
};
reader.readAsArrayBuffer(file);
});
});
}

Reading multiple files synchronously in Javascript using FileReader

I have a for loop iterating over the number of files
I have to read the first line of each file and add it let's say to a Map having File name as the key and First line of that file as a the value.
I am using FileReader to read the file but it is asynchronous.
When I open a stream to read the file the loop gets incremented before I am done with reading the file and adding my desired entry to the map.
I need a synchronous operation i.e. Read the First line , add it to the Map and then increment the loop and proceed with the next file.
for (var i = 0; i < files.length; i++){
var file = files[i];
var reader = new FileReader();
reader.onload = function(progressEvent){
var lines = progressEvent.target.result.split('\n');
firstLine = lines[0];
alert('FirstLine'+firstLine);
//add to Map here
}
reader.readAsText(file);
}
How to modify the code so as to achieve the above mentioned functionality.
You can use promises and let them run in the order you create them using reduce.
The below code shows how it could be done this way, and you can take a look at this simple JSFiddle that demos the idea.
//create a function that returns a promise
function readFileAndAddToMap(file){
return new Promise(function(resolve, reject){
var reader = new FileReader();
reader.onload = function(progressEvent){
var lines = progressEvent.target.result.split('\n');
firstLine = lines[0];
console.log('FirstLine'+firstLine);
//add to Map here
resolve();
}
reader.onerror = function(error){
reject(error);
}
reader.readAsText(file);
});
}
//create an array to hold your promises
var promises = [];
for (var i = 0; i < files.length; i++){
//push to the array
promises.push(readFileAndAddToMap(files[i]));
}
//use reduce to create a chain in the order of the promise array
promises.reduce(function(cur, next) {
return cur.then(next);
}, Promise.resolve()).then(function() {
//all files read and executed!
}).catch(function(error){
//handle potential error
});
I was facing the same issue, what I did was I removed the for loop and used recursive function instead. That way, I was able to handle the sequence of FileReader.
Below I tried to modify your code based on my logic. feel free to ask any question in comments if you need more clarity.
attachmentI = { i: files.length };
function UploadMe() {
attachmentI.i--;
if(attachmentI.i > -1){
var file = files[attachmentI.i];
var reader = new FileReader();
reader.onload = function(progressEvent){
var lines = progressEvent.target.result.split('\n');
firstLine = lines[0];
alert('FirstLine'+firstLine);
//add to Map here
UploadMe();
}
reader.readAsText(file);
}

Looping through files for FileReader, output always contains last value from loop

I'm using FileReader API to read files on local.
<input type="file" id="filesx" name="filesx[]" onchange="readmultifiles(this.files)" multiple="" />
<script>
function readmultifiles(files) {
var ret = "";
var ul = document.querySelector("#bag>ul");
while (ul.hasChildNodes()) {
ul.removeChild(ul.firstChild);
}
for (var i = 0; i < files.length; i++) //for multiple files
{
var f = files[i];
var name = files[i].name;
alert(name);
var reader = new FileReader();
reader.onload = function(e) {
// get file content
var text = e.target.result;
var li = document.createElement("li");
li.innerHTML = name + "____" + text;
ul.appendChild(li);
}
reader.readAsText(f,"UTF-8");
}
}
</script>
If input includes 2 files:
file1 ---- "content1"
file2 ---- "content2"
I get this output:
file2__content1
file2__content2
How to fix code to display:
file1__content1
file2__content2
The problem is you're running the loop now but the callbacks you are setting are getting run later (when the events fire). By the time they run, the loop is over and remains at whatever the last value was. So it will always show "file2" in your case for the name.
The solution is to put the file name inside a closure with the rest. One way to do this is create an immediately-invoked function expression (IIFE) and pass the file in as a parameter to that function:
for (var i = 0; i < files.length; i++) { //for multiple files
(function(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
// get file content
var text = e.target.result;
var li = document.createElement("li");
li.innerHTML = name + "____" + text;
ul.appendChild(li);
}
reader.readAsText(file, "UTF-8");
})(files[i]);
}
Alternately, you can define a named function and call it as normal:
function setupReader(file) {
var name = file.name;
var reader = new FileReader();
reader.onload = function(e) {
// get file content
var text = e.target.result;
var li = document.createElement("li");
li.innerHTML = name + "____" + text;
ul.appendChild(li);
}
reader.readAsText(file, "UTF-8");
}
for (var i = 0; i < files.length; i++) {
setupReader(files[i]);
}
Instead of using var, use let as the declared variable only be used in one loop.
for (let i = 0; i < files.length; i++) //for multiple files
{
let f = files[i];
let name = files[i].name;
alert(name);
let reader = new FileReader();
reader.onload = function(e) {
// get file content
let text = e.target.result;
let li = document.createElement("li");
li.innerHTML = name + "____" + text;
ul.appendChild(li);
}
reader.readAsText(f,"UTF-8");
}
Edit: Just use let instead of var in the loop. That fixes the issue OP had (but was only introduced in 2015).
Old answer (An interesting workaround):
While it is not exactly robust or future-proof, it is worth mentioning that this can also be achieved by adding a property to the FileReader object:
var reader = new FileReader();
reader._NAME = files[i].name; // create _NAME property that contains filename.
Then access it through e within the onload callback function:
li.innerHTML = e.target._NAME + "____" + text;
Why this works:
Even though the reader variable is replaced multiple times during the loop like i, the new FileReader object is unique and remains in memory. It is accessible within the reader.onload function through the e argument. By storing additional data in the reader object, it is kept in memory and accessible through reader.onload via e.target event argument.
This explains why why your output is:
file2__content1file2__content2
and not:
file1__content1file2__content2
The content is displayed correctly because e.target.result is a property within the FileReader object itself. Had FileReader contained a filename property by default, it could have been used and this whole mess avoided entirely.
A word of caution
This is called extending host objects (if I understand the difference between native objects...). FileReader is the host object that is being extended in this situation. Many professional developers believe doing this is bad practice and/or evil. Collisions may occur if _NAME ever becomes used in the future. This functionality isn't documented in any specification so it could even break in the future, and it may not work in older browsers.
Personally, I have not encountered any issues by adding additional properties to host objects. Assuming the property name is unique enough, browsers don't disable it, and future browsers don't change these objects too much, it should work fine.
Here are some articles that explain this quite well:
http://kendsnyder.com/extending-host-objects-evil-extending-native-objects-not-evil-but-risky/
http://perfectionkills.com/whats-wrong-with-extending-the-dom/
And some article on the problem itself:
http://tobyho.com/2011/11/02/callbacks-in-loops/
You can make a promise/callback for reading the file in the loop.
Promise-
fileBase64(file) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = function() {
resolve(reader.result);
};
reader.onerror = function(error) {
reject(error);
};
});
}
I am calling this function on onClick
onClick = async () => {
for (var i = 0; i < this.state.company_bank_statement_array.length; i++) {
let file = document.getElementById(
this.state.company_bank_statement_array[i]
);
let fileData = await this.fileBase64(file.files[0]);
this.state.bankStatements.push({
data: fileData,
filename: file.files[0].name,
});
}
};
I had the same problem, solved it by using Array.from
let files = e.target.files || e.dataTransfer.files;
Array.from(files).forEach(file => {
// do whatever
})
I think the best way to solve this problem is by recursively call a function that reads the blob file. So in my case I solve the problem with the following snippet code, maybe is a little complicated but it works in any scenario that I tried.
Notice that, I didn't pass the array and index as arguments. You need to call them with the object they belong to.
//Initialize blobs
var foo = new Blob(["Lorem ipsum dolor sit amet, consectetur adipiscing elit."], {
type: 'text/plain'
});
var bar = new Blob(["Sed tristique ipsum vitae consequat aliquet"], {
type: 'text/plain'
});
//Initialize array and index
var arrayOfBlobs = [foo, bar];
var arrayIndex = 0;
function fileRead () {
var me = this;
if (this.arrayIndex < this.arrayOfBlobs.length) {
var reader = new FileReader();
function bindedOnload(event) {
console.log("bindedOnload called");
console.log("reader results: ", event.target.result);
this.arrayIndex++; //Incrument the index
this.fileRead(); //Recursive call
}
//By Binding the onload event to the local scope we
//can have access to all local vars and functions
reader.onload = bindedOnload.bind(me);
reader.readAsText(this.arrayOfBlobs[arrayIndex]);
} else {
//This will executed when finishing reading all files
console.log("Finished");
}
}
//Call the fileRead for the first time
fileRead();

Categories