File Reader is slow and not set state on React - javascript

When converting a. pdf file to Base64, I need to be filled in my state, but for some reason my conversion is slower than my setstate, which when being seted is always empty.
My code
async transformBase64(inputFile) {
return new Promise((resolve, reject) => {
var fileReader = new FileReader();
fileReader.readAsDataURL(inputFile)
if (fileReader.result != undefined){
resolve(this.setState({ Base64: fileReader.result }), () => {});
}else{
reject("Err")
}
});
}
What can I do to solve my problem?

It looks like you're not setting up an onload callback for your fileReader. This is needed to correctly signal back to your application that file data has loaded and is ready, seeing that the FileReader API is asynchronous.
Consider apply the following changes to your code to resolve your problem:
async transformBase64(inputFile) {
return new Promise((resolve, reject) => {
var fileReader = new FileReader();
// If error occurs, reject the promise
fileReader.onerror = () => {
reject("Err")
}
// Define an onload handler that's called when file loaded
fileReader.onload = () => {
// File data loaded, so proceed to call setState
if (fileReader.result != undefined){
resolve(this.setState({
Base64: fileReader.result
}), () => {});
}else{
reject("Err")
}
}
fileReader.readAsDataURL(inputFile)
});
}

Related

FileReader is not being fired on a web worker

I have the below function that convert pdfs to images, the function is within a web worker.
For some reason the fileReader.onload is not being fired, the filePdf is correct and is on the right format. Any idea?
const processFile = async (filePdf, post) => {
let PDFJS
if (!PDFJS) {
PDFJS = await import('pdfjs-dist/build/pdf.js')
}
if (!filePdf) return
const fileReader = new FileReader()
console.log(filePdf)
let pages
try {
fileReader.onload = async () => {
const pdf = await PDFJS.getDocument(fileReader.result).promise
pages = await pdfToImageMap(pdf)
}
} catch (e) {
console.log({e})
}
fileReader.readAsArrayBuffer(filePdf)
return post({type: 'done'})
}
filePdf:
Try to change your logic.
At the moment you are trying to wait for the onload, which will work. So the try block succeeds. Then you return your post function. So you've run the file reader, but didn't wait for it load and returned with the post function.
Instead wait for the fileReader to load by awaiting a promise wrapped around the load function. And inside the Promise call fileReader.readAsArrayBuffer(filePdf) to make sure that the onload function is called. In the onload function use your try / catch block to use your PDFJS framework.
Also, don't waste any values stored in variables. If the pages value is something you need, then use it and return it somehow. Otherwise don't store the value and discard it.
Try the snippet below and see if it works.
const processFile = async (filePdf, post) => {
const PDFJS = await import('pdfjs-dist/build/pdf.js')
if (!filePdf) return
console.log(filePdf)
const fileReader = new FileReader()
const pages = await new Promise(resolve => {
fileReader.onload = async () => {
try {
const pdf = await PDFJS.getDocument(fileReader.result).promise
const pages = await pdfToImageMap(pdf)
resolve(pages)
} catch (e) {
console.log({e})
}
}
fileReader.readAsArrayBuffer(filePdf)
})
return post({type: 'done', pages})
}

Create a promise with a dynamic timeout

I have created a Promise in oder to get the duration of one file whenever has finished its synthesising.
I believe that the solution is really inefficient, since I set a timeout regardless when the task has finished so probably I will just waste time each time I call the method:
polly.synthesizeSpeech(params, function (err, data) {
if (err)
console.log(err, err.stack);
else {
var uInt8Array = new Uint8Array(data.AudioStream);
var arrayBuffer = uInt8Array.buffer;
var blob = new Blob([arrayBuffer]);
var urlAudioFile = URL.createObjectURL(blob);
var audio = new Audio(urlAudioFile);
audio.type = 'audio/wav';
getAudioFileDurationAsync(audio);
};
});
function getAudioFileDurationAsync(audio) {
let promise = new Promise(function (resolve, reject) {
setTimeout(() => {
resolve("done!")
}, 3000);
});
promise.then(
result => {
console.log(audio.duration);
},
error => console.log(error) // doesn't run
);
};
Obviously, after 3000ms I get the duration of the file, but I would like to do it as soon as the file has finished synthesising. How could I do it?
From the documentation, it seems to be possible to get the duration :
var audioElement = new Audio('car_horn.wav');
audioElement.addEventListener('loadeddata', () => {
let duration = audioElement.duration;
// The duration variable now holds the duration (in seconds) of the audio clip
})
Hope it helps you
is that working for you?
Basically you just need to wrap the code you want to be notified about with a Promise. If you have a callback function just like in your example, all you have to do is let it resolve from within that callback.
const audioFileDuration = (params) => new Promise((resolve, reject) => {
polly.synthesizeSpeech(params, function(err, data) {
if (err) {
reject(err);
}
var uInt8Array = new Uint8Array(data.AudioStream);
var arrayBuffer = uInt8Array.buffer;
var blob = new Blob([arrayBuffer]);
var urlAudioFile = URL.createObjectURL(blob);
var audio = new Audio(urlAudioFile);
audio.type = 'audio/wav';
resolve(audio.duration)
});
});
audioFileDuration(params).then(duration => console.log(duration))
SetTimeOut acts as the maximum duration[TTL] you want to wait for the function.
You can try resolving the promise in two flows
when timeout reached of 3000ms
as soon as you get the duration of file after it has finished its synthesising.
whichever of the above flows completes early will resolve the promise and your code could proceed further not waiting for second resolve.
Make sure you clear timeout if the second process(getting duration) is finished earlier

JavaScript Onload property

I have read some posts regarding the onload property. My understanding is that it is not a method, not an event, event listener, or trigger. It is merely a empty pointer to a function waiting to be assigned. Here is a shortened script. It assigns a image manipulation function upon event then renders the image to the web page. Instead of assign the function to onload which is the pointer to the function. Why can't I just execute the function directly? Am I making any sense here? Thanks
var reader = new FileReader();
reader.onload = function(e) {
var img = document.createElement("img");
img.src = e.target.result;
var canvas = document.createElement("canvas");
......
dataurl = canvas.toDataURL(file.type);
document.getElementById('output').src = dataurl;
}
reader.readAsDataURL(file);
Accordingly to MDN:
The FileReader.onload property contains an event handler executed when
the load event is fired, when content read with readAsArrayBuffer,
readAsBinaryString, readAsDataURL or readAsText is available.
So it's executed after reading content is done, that you're performing by calling reader.readAsDataURL(file).
onload is a field of the object FileReader which handles a reference to the function that you wanna execute when the desired file gets loaded. Essentially, it's the callback that gets called when the event load is triggered. Why using this pattern and not executing the function directly? Because loading files is an asynchronous task (which is initialized with readAsDataURL, readAsBinaryString, readAsArrayBuffer or readAsText).
You can't call the function directly because files load asynchronously. You can convert the FileReader code to return a promise though and then you can use async/await to make the code look like you're directly calling it
funciton readFileAsDataURL(file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = () => resolve(e.target.result);
reader.onerror = reject;
reader.readAsDataURL(file);
};
}
function loadImage(url) {
return new Promise((resolve, reject) => {
const img = new Image():
img.onload = () => resolve(img);
img.onerror = reject;
img.src = url;
};
}
async function foo() {
const result = await readFileAsDataURL(file);
const img = await loadImage(result);
// you can now use img
......
}
So in essence, OnLoad is an event handler (or reference to the event handler) for FileReader when it is called to further processes data when FileReader is done loading? Is my interpretation correct?

Why await a promise doesn't wait the promise to resolve?

I am trying to learn to use properly async await but I am kind of cofused about it.
In the snippets, I am trying to build an array of object containing the infos I need about the file I am uploading in the component. The problem is that the objects in this.fileInfo are not exactly waiting the promise to return the encoded images, returning this output as I console.log this.fileInfo:
As you can see, the key image is a ZoneAwarePromise whose value is undefined. Can you please help me to fix this?
Function build()
async build(e) {
let files = e.target.files;
this.j = Array.from(files);
this.fileInfo = await this.j.reduce((acc, cur) => [
...acc, {
name: cur.name.replace(/^.*\\/, ""),
sizeunit: this.getSize(cur.size),
extention: cur.name.split(/\.(?=[^\.]+$)/).slice(-1).pop().toString(),
image: this.previewFile(cur)
}
], [])
console.log(await this.fileInfo);
}
Promise
async previewFile(file) {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => {
return new Promise((res) => {
res(reader.result)
}).then( res => res);
};
}
You are not awaiting anything in this function: async previewFile(file).
Perhaps you assume returning a new Promise somewhere along the lines of your code will make it function as a Promise. In this particular case it will not work, because it is inside a delegate (onload), that will not be executed within the scope of your function previewFile().
You can lose the async modifier, because you can return a Promise instead:
previewFileAsync(file) {
// the async modifier keyword is not necessary,
// because we don't need to await anything.
return new Promise((res) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => res(reader.result);
});
}
When you call this, you can await it inside your loop:
async buildAsync(e) {
let files = e.target.files;
for(let i = 0; i < files.length; i++) {
const file = files[i];
const preview = await previewFileAsync(file);
// Do something with preview here...
}
}
Of course, you can execute a range of promises to allow for some sort of concurrency, but this will help get you started.
I added the Async suffix to your method so a caller knows that this can be awaited. It does not do anything special, but it helps clarify your code. You can use whatever suffix you think is right. I'm just used to the Async suffix.
Edit
Stackblitz example of async logic

Javascript Promises with FileReader()

I have the following HTML Code:
<input type='file' multiple>
And Here's my JS Code:
var inputFiles = document.getElementsByTagName("input")[0];
inputFiles.onchange = function(){
var fr = new FileReader();
for(var i = 0; i < inputFiles.files.length; i++){
fr.onload = function(){
console.log(i) // Prints "0, 3, 2, 1" in case of 4 chosen files
}
}
fr.readAsDataURL(inputFiles.files[i]);
}
So my question is, how can I make this loop synchronous ? That is first wait for the file to finish loading then move on to the next file. Someone told me to use JS Promises. But I can't make it to work. Here's what I'm trying:
var inputFiles = document.getElementsByTagName("input")[0];
inputFiles.onchange = function(){
for(var i = 0; i < inputFiles.files.length; i++){
var fr = new FileReader();
var test = new Promise(function(resolve, reject){
console.log(i) // Prints 0, 1, 2, 3 just as expected
resolve(fr.readAsDataURL(inputFiles.files[i]));
});
test.then(function(){
fr.onload = function(){
console.log(i); // Prints only 3
}
});
};
}
Thanks in advance...
We modified midos answer to get it to work the following:
function readFile(file){
return new Promise((resolve, reject) => {
var fr = new FileReader();
fr.onload = () => {
resolve(fr.result )
};
fr.onerror = reject;
fr.readAsText(file.blob);
});
}
If you want to do it sequentially( not synchronously) using Promises, you could do something like:
var inputFiles = document.getElementsByTagName("input")[0];
inputFiles.onchange = function(){
var promise = Promise.resolve();
inputFiles.files.map( file => promise.then(()=> pFileReader(file)));
promise.then(() => console.log('all done...'));
}
function pFileReader(file){
return new Promise((resolve, reject) => {
var fr = new FileReader();
fr.onload = resolve; // CHANGE to whatever function you want which would eventually call resolve
fr.onerror = reject;
fr.readAsDataURL(file);
});
}
Preface: This answer originally written in 2015 shows wrapping FileReader in a promise. That's still a perfectly valid way to do the readAsDataURL operation the question asked about, but if you were going to use readAsText or readAsArrayBuffer (in general, new code shouldn't use the older readAsBinaryString), you'd want to use the File object's built-in promise-based methods text or arrayBuffer instead (or possibly stream if you want to do inline processing of the data as it flowed through), all of which are inherited from Blob.
The nature of FileReader is that you cannot make its operation synchronous.
I suspect you don't really need or want it to be synchronous, just that you want to get the resulting URLs correctly. The person suggesting using promises was probably right, but not because promises make the process synchronous (they don't), but because they provide standardized semantics for dealing with asynchronous operations (whether in parallel or in series):
Using promises, you'd start with a promise wrapper for readAsDataURL (I'm using ES2015+ here, but you can convert it to ES5 with a promise library instead):
function readAsDataURL(file) {
return new Promise((resolve, reject) => {
const fr = new FileReader();
fr.onerror = reject;
fr.onload = () => {
resolve(fr.result);
}
fr.readAsDataURL(file);
});
}
Then you'd use the promise-based operations I describe in this answer to read those in parallel:
Promise.all(Array.prototype.map.call(inputFiles.files, readAsDataURL))
.then(urls => {
// ...use `urls` (an array) here...
})
.catch(error => {
// ...handle/report error...
});
...or in series:
let p = Promise.resolve();
for (const file of inputFiles.files) {
p = p.then(() => readAsDataURL(file).then(url => {
// ...use `url` here...
}));
}
p.catch(error => {
// ...handle/report error...
});
Inside an ES2017 async function, you could use await. It doesn't do much for the parallel version:
// Inside an `async` function
try {
const urls = await Promise.all(Array.prototype.map.call(inputFiles.files, readAsDataURL));
} catch (error) {
// ...handle/report error...
}
...but it makes the series version simpler and clearer:
// Inside an `async` function
try {
for (const file of inputFiles.files) {
const url = await readAsDataURL(file);
// ...use `url` here...
}
} catch (error) {
// ...handle/report error...
}
Without promises, you'd do this by keeping track of how many outstanding operations you have so you know when you're done:
const inputFiles = document.getElementsByTagName("input")[0];
inputFiles.onchange = () => {
const data = []; // The results
let pending = 0; // How many outstanding operations we have
// Schedule reading all the files (this finishes before the first onload
// callback is allowed to be executed). Note that the use of `let` in the
// `for` loop is important, `var` would not work correctly.
for (let index = 0; index < inputFiles.files.length; ++index) {
const file = inputFiles.files[index];
// Read this file, remember it in `data` using the same index
// as the file entry
const fr = new FileReader();
fr.onload = () => {
data[index] = fr.result;
--pending;
if (pending == 0) {
// All requests are complete, you're done
}
}
fr.readAsDataURL(file);
++pending;
});
};
Or if you want for some reason to read the files sequentially (but still asynchronously), you can do that by scheduling the next call only when the previous one is complete:
// Note: This assumes there is at least one file, if that
// assumption isn't valid, you'll need to add an up-front check
var inputFiles = document.getElementsByTagName("input")[0];
inputFiles.onchange = () => {
let index = 0;
readNext();
function readNext() {
const file = inputFiles.files[index++];
const fr = new FileReader();
fr.onload = () => {
// use fr.result here
if (index < inputFiles.files.length) {
// More to do, start loading the next one
readNext();
}
}
fr.readAsDataURL(file);
}
};
I upgrade Jens Lincke answer by add working example and introduce async/await syntax
function readFile(file) {
return new Promise((resolve, reject) => {
let fr = new FileReader();
fr.onload = x=> resolve(fr.result);
fr.onerrror = reject;
fr.readAsDataURL(file) // or readAsText(file) to get raw content
})}
function readFile(file) {
return new Promise((resolve, reject) => {
let fr = new FileReader();
fr.onload = x=> resolve(fr.result);
fr.readAsDataURL(file) // or readAsText(file) to get raw content
})}
async function load(e) {
for(let [i,f] of [...e.target.files].entries() ){
msg.innerHTML += `<h1>File ${i}: ${f.name}</h1>`;
let p = document.createElement("pre");
p.innerText += await readFile(f);
msg.appendChild(p);
}
}
<input type="file" onchange="load(event)" multiple />
<div id="msg"></div>
Promisified FileReader
/**
* Promisified FileReader
* More info https://developer.mozilla.org/en-US/docs/Web/API/FileReader
* #param {*} file
* #param {*} method: readAsArrayBuffer, readAsBinaryString, readAsDataURL, readAsText
*/
export const readFile = (file = {}, method = 'readAsText') => {
const reader = new FileReader()
return new Promise((resolve, reject) => {
reader[method](file)
reader.onload = () => {
resolve(reader)
}
reader.onerror = (error) => reject(error)
})
}
Usage
const file = new File(["foo"], "foo.txt", {
type: "text/plain",
});
// Text
const resp1 = await readFile(file)
console.log(resp1.result)
// DataURL
const resp2 = await readFile(file, 'readAsDataURL')
console.log(resp2.result)
Using promises can make it much more elegant,
// opens file dialog waits till user selects file and return dataurl of uploaded file
async function pick() {
var filepicker = document.createElement("input");
filepicker.setAttribute("type","file");
filepicker.click();
return new Promise((resolve,reject) => {
filepicker.addEventListener("change", e => {
var reader = new FileReader();
reader.addEventListener('load', file => resolve(file.target.result));
reader.addEventListener('error', reject);
reader.readAsDataURL(e.target.files[0]);
});
});
}
// Only call this function on a user event
window.onclick = async function() {
var file = await pick();
console.log(file);
}
Here is another modification to Jens' answer (piggybacking off Mido's answer) to additionally check the file size:
function readFileBase64(file, max_size){
max_size_bytes = max_size * 1048576;
return new Promise((resolve, reject) => {
if (file.size > max_size_bytes) {
console.log("file is too big at " + (file.size / 1048576) + "MB");
reject("file exceeds max size of " + max_size + "MB");
}
else {
var fr = new FileReader();
fr.onload = () => {
data = fr.result;
resolve(data)
};
fr.readAsDataURL(file);
}
});
}
We can use the callback function to get the reader.result
function myDisplay(some) {
document.getElementById('demo').innerHTML = some;
}
function read(file, callback) {
const reader = new FileReader();
reader.onload = () => {
callback(reader.result);
}
reader.readAsText(file);
}
// When you pass a function as an argument, remember not to use parenthesis.
read(this.files[0], myDisplay);

Categories