Wait all promises in a map function - javascript

I want to wait to read all my pictures in a map function
I tried this
let buffer = [];
// Folder of the dataset.
const rootFolder = './dataset'
console.log("Entering in folder dataset");
fs.readdirSync(rootFolder);
// For each folders
const files = fs.readdirSync(rootFolder).map(dirName => {
if(fs.lstatSync(path.join(rootFolder, dirName)).isDirectory()){
console.log(`Entering in folder ${path.join(rootFolder, dirName)}`);
// For each files
fs.readdirSync(path.join(rootFolder, dirName)).map(picture => {
if(fs.lstatSync(path.join(rootFolder, dirName, picture)).isFile()){
if(picture.startsWith("norm")){
return fileToTensor(path.join(rootFolder, dirName, picture)).then((img) => {
buffer.push(img);
}).catch((error) => {console.log(error)});
}
}
});
}
});
Promise.all(files);
console.log(buffer);
async function fileToTensor(path) {
return await sharp(path)
.removeAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
}
But my buffer is still empty...
I know promises exist but I don't know how can I include them in map(map())
Thanks you :)

I would refactor the above code to this:
let files = [];
// loop each dir.
fs.readdirSync(rootFolder).forEach(dirName => {
// if it's a directory, procede.
if(fs.lstatSync(path.join(rootFolder, dirName)).isDirectory()){
console.log(`Entering in folder ${path.join(rootFolder, dirName)}`);
fs.readdirSync(path.join(rootFolder, dirName)).forEach(picture => {
if(fs.lstatSync(path.join(rootFolder, dirName, picture)).isFile()){
// If lstatsync says it's a file and if it starts with "norm"
if(picture.startsWith("norm")){
// push a new promise to the array.
files.push(new Promise((resolve, reject) => {
fileToTensor(path.join(rootFolder, dirName, picture)).then((img) => {
buffer.push(img);
resolve();
}).catch((error) => {console.log(error); reject(error);});
}));
}
}
});
}
});
// Resolve all promises.
Promise.all(files).then(() => {
// Then do whatever you need to do.
console.log(buffer);
}).catch((errors) => {
console.log('one ore more errors occurred', errors);
});
Basically, here is what I did:
Removed .map, since it's not necessary in this context. Also, in your case, not all code paths returned a result, hence not every callback returned a result.
Pushed each needed item to the files array, which is a Promise[].
Called Promise.all on the files array. Each resolved promise will push the result to the buffer array. I would've handled it in a different way, but still, this is the fastest I could think of.
Registered a callback on Promise.all, so that buffer will be defined.
As a side note, there are a lot of third party libraries that helps you to avoid having nested loops and promises looping the file system. I've just posted this to try giving something that could actually work from the existing code, despite an entire refactor would be clever here, and a preliminary analysis of available node libraries would also help to make the code easier to read and to mantain.

First of all a few advices:
DON't use arrow functions for anything you cannot put in a single line (they aren't intended for that and this wrecks readability)
Check that each callback you pass to .map() actually return something (first one doesn't. It seems you missed a return before inner fs.readdir(..)....
Better try to name all functions (except arrow ones in the cases that they're good choice). This way not only I could name it to better identify them in the previous point but also stack traces would be much more readable and useful (traceable).
That being said, you are reading directories (and subdirectories) synchronously to finally return promises (I understand that fileToTensor() is expected to return a promise). It may not have a major impact on the overall execution time because I suppose actual file processings would be much more expensive BUT this is a bad pattern because you are blocking the event loop during the tree scan (so, if your code is for a server that needs to attend other petitions, you are pulling performance a bit down...).
Finally, as others already said, there are libraries, such as glob that eases that task.
On the other hand, if you want to do it by yourself (as an understanding exercise) I myself implemented my own library for the same task before knowing about glob which could serve you as a simpler example.

Hye I've bit updated your code please go through once. It might be helpful :)
let fsReadDir = Util.promisify(Fs.readdir);
let fsStat = Util.promisify(Fs.stat);
let picturePromises = [];
let directories = await fsReadDir(rootFolder);
for (let dirIndex = 0; dirIndex < directories.length; dirIndex++) {
let file = directories[dirIndex];
let stat = await fsStat(path[pathIndex] + '/' + file);
if (stat.isDirectory()) {
let pictures = await fsReadDir(path.join(rootFolder, dirName));
for (let picIndex = 0; picIndex < pictures.length; picIndex++) {
let stat = await fsStat(path.join(rootFolder, dirName, pictures[picIndex]));
if (stat.isFile()) {
if (picture.startsWith("norm")) {
let pTensor = fileToTensor(path.join(rootFolder, dirName, pictures[picIndex])).then((img) => {
buffer.push(img);
}).catch((error) => { console.log(error) });
picturePromises.push(pTensor);
}
}
}
}
}
Promise.all(picturePromises);
console.log(buffer);
async function fileToTensor(path) {
return await sharp(path)
.removeAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
}

Related

Javascript for loop does not wait for fetch request to complete and moves onto next iteration

I have below code in javascript in which some asynchronous task is being performed:
async function fetchData(id){
for(let i=1;;++i){
res = await fetch(`https://some-api/v1/products/${id}/data?page=${i}`,{//headers here});
res = await res.json();
if(res.length==0) break;
else{ //do some work here and continue for next iteration}
}
}
async function callApi(){
var arr = [//list of id's here to pass to api one by one, almost 100 id's here];
await Promise.all(arr.map(async(e)=>{
await fetchData(e);
}));
}
callApi();
The above code looks fine to me, except that it doesn't work as expected. Ideally, what should happen is that unless one id's call is not completed( unless break condition not satisfies for one id), the for loop should not proceed to next iteration. Rather, I am getting totally different results. The api calls are happening in random order because the loop is not waiting the iteration to complete. My hard requirement is that unless one iteration is not complete, it should not move to next one.
await seems to have no effect here. Please guide me how can I achieve this. I am running out of ideas.
Thank You!
Your arr.map(...) is not awaiting the different fetchData calls before the next map call, so I'd turn this into a specific for loop to be sure it waits:
async function callApi(){
const arr = [...];
for(let i = 0; i < arr.length; i++){
await fetchData(arr[i]);
}
}
or alternatively use a for of
async function callApi(){
const arr = [...];
for(let a of arr){
await fetchData(a);
}
}
The fetchData function also looks like it could use some improvements with error handling, but since you shortened your code quite a bit, I'm assuming there is something like that going on there, too, and your issue is actually with the callApi() code instead, as the fetch and await looks good to me there.
You should decide either to use promises or async await. Don't mix them.
With promises you can always use funky abstractions but with a simple recursive approach you can do like
function fetchData(hds, id, page = 1, pages = []){
return fetch(`https://some-api/v1/products/${id}/data?page=${page}`,hds)
.then(r => r.ok ? r.json() : Promise.reject({status:r.status,pages})
.then(j => fetchData(hds, id, ++page, pages.push(doSomethingWith(j))))
.catch(e => (console.log(e.status), e.pages));
}
So we use recursion to fetch indefinitelly until the API says enough and r.ok is false.
At the callApi side you can use reduce since we have an ids array.
const ids = [/* ids array */],
hds = { /* headers object */ };
function callApi(ids){
return ids.reduce( (p,id) => p.then(_ => fetchData(hds,id))
.then(pages => process(pages))
, Promise.resolve(null)
)
.catch(e => console.log(e));
}
So now both accesses to the id and page data are working asynchronously but only fired once the previous one finishes. Such as
(id=1,page=1) then (id=1,page=2) then (id=1,page=3) then (process 3 pages of id=1) then
(id=2,page=1) then (id=2,page=2) then (process 2 pages of id=2) etc...
While I love the promises, you can also implement the same functionality with the asyc await abstraction. I believe the idea behind the invention of the async await is to mimic sync imperative code. But keep in mind that it's an abstraction over an abstraction and I urge you to learn promises by heart before even attemting to use async await. The general rule is to never mix both in the same code.
Accordingly the above code could have been written as follows by using async await.
async function fetchData(hds, id){
let page = 1,
pages = [],
res;
while(true){
res = await fetch(`https://some-api/v1/products/${id}/data?page=${page++}`,hds);
if (res.ok) pages.push(await res.json())
else return pages;
}
}
Then the callApi function can be implemented in a similar fashion
const ids = [/* ids array */],
hds = { /* headers object */ };
async function callApi(ids){
let pages;
for(let i = 0; i < ids.length; i++){
try {
pages = await fetchData(hds,ids[i]);
await process(pages); // no need for await if the process function is sync
}
catch(e){
console.log(e);
}
}
}

How to wait until multiple files are processed before calling finished function js

The following function runs after a drag and drop operation of multiple files.
function getFilesInfo(ev){
for (let i = 0; i < ev.dataTransfer.items.length; i++) {
if (ev.dataTransfer.items[i].kind === 'file') {
let file = ev.dataTransfer.items[i].getAsFile();
//getFileInfo adds string to DOM element
//Note the promise usage ...
file.arrayBuffer().then((data)=>getFileInfo(file.name,data));
}
}
}
I can't figure out how to call a function after all of the promises in this function finish.
Basically I want something like this, sequentially:
getFilesInfo(ev);
//getFileInfo(<file1>);
//getFileInfo(<file2>);
//getFileInfo(<file3>);
// etc.
//run only after all getFileInfo() calls have finished
processResults();
The tricky part is that reading the files generates a promise for each file that gets called when the file has been read into memory (part of the arrayBuffer() call). I can't figure out how to delay processResults because getFilesInfo finishes after all of the read calls have been triggered, not (from what I can tell), after the getFileInfo functions have finished.
It seems like perhaps I could somehow add all arrayBuffer calls to an array and then do some promise chaining (maybe?) but that seems awkward and I'm not even sure how I would do that.
You can use Promise.all to wait for an array of promise to finish:
async function getFilesInfo(ev) {
// create list of jobs
const jobs = [];
for (const item of ev.dataTransfer.items) {
if (item.kind === 'file') {
let file = item.getAsFile();
jobs.push(file.arrayBuffer().then(data => {
getFileInfo(file.name, data);
}));
}
}
// wait for all promise to fullfil
await Promise.all(jobs);
}
https://developer.mozilla.org/fr/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
You could do it that way:
function getFilesInfo(ev){
return ev.dataTransfer.items.filter(item=>item.kind === 'file').map(item=>{
let file = item.getAsFile();
return file.arrayBuffer().then((data)=>getFileInfo(file.name,data));
});
}
Promise.all(...getFilesInfo(ev)).then(_=>{
processResults();
});
// or with async/await
(async ()=>{
await Promise.all(...getFilesInfo(ev));
processResults();
})()
async function getFilesInfo(ev) {
await Promise.all(ev.dataTransfer.items.map(async (i) => {
const file = i.getAsFile();
const data = await file.arrayBuffer();
return getFileInfo(file.name, data);
}));
}
await getFilesInfo(ev); // will be awaited until all the promises are resolved
processResults();
Let me know if that helps.
The conceptual hurdle I was running into was that I was thinking of the then function as returning the results, not promises. Also, many of the examples I've seen with Promise.all are usually just concatenating explicit calls, not building an array in a loop.
As suggested by Bergi, I simply added the calls to an array, and then passed that array into Promise.all
function getFilesInfo(ev) {
// create list of jobs
let jobs = [];
for (const item of ev.dataTransfer.items) {
if (item.kind === 'file') {
let file = item.getAsFile();
jobs.push(file.arrayBuffer().then(data => {
getFileInfo(file.name, data);
}));
}
}
return jobs;
}
//The call in the parent
let jobs = getFilesInfo(ev);
Promise.all(jobs).then(processResults);

Recursion with an API, using Vanilla JS

I'm playing with the Rick and Morty API and I want to get all of the universe's characters
into an array so I don't have to make more API calls to work the rest of my code.
The endpoint https://rickandmortyapi.com/api/character/ returns the results in pages, so
I have to use recursion to get all the data in one API call.
I can get it to spit out results into HTML but I can't seem to get a complete array of JSON objects.
I'm using some ideas from
Axios recursion for paginating an api with a cursor
I translated the concept for my problem, and I have it posted on my Codepen
This is the code:
async function populatePeople(info, universePeople){ // Retrieve the data from the API
let allPeople = []
let check = ''
try {
return await axios.get(info)
.then((res)=>{
// here the current page results is in res.data.results
for (let i=0; i < res.data.results.length; i++){
item.textContent = JSON.stringify(res.data.results[i])
allPeople.push(res.data.results[i])
}
if (res.data.info.next){
check = res.data.info.next
return allPeople.push(populatePeople(res.data.info.next, allPeople))
}
})
} catch (error) {
console.log(`Error: ${error}`)
} finally {
return allPeople
}
}
populatePeople(allCharacters)
.then(data => console.log(`Final data length: ${data.length}`))
Some sharp eyes and brains would be helpful.
It's probably something really simple and I'm just missing it.
The following line has problems:
return allPeople.push(populatePeople(res.data.info.next, allPeople))
Here you push a promise object into allPeople, and as .push() returns a number, you are returning a number, not allPeople.
Using a for loop to push individual items from one array to another is really a verbose way of copying an array. The loop is only needed for the HTML part.
Also, you are mixing .then() with await, which is making things complex. Just use await only. When using await, there is no need for recursion any more. Just replace the if with a loop:
while (info) {
....
info = res.data.info.next;
}
You never assign anything to universePeople. You can drop this parameter.
Instead of the plain for loop, you can use the for...of syntax.
As from res you only use the data property, use a variable for that property only.
So taking all that together, you get this:
async function populatePeople(info) {
let allPeople = [];
try {
while (info) {
let {data} = await axios.get(info);
for (let content of data.results) {
const item = document.createElement('li');
item.textContent = JSON.stringify(content);
denizens.append(item);
}
allPeople.push(...data.results);
info = data.info.next;
}
} catch (error) {
console.log(`Error: ${error}`)
} finally {
section.append(denizens);
return allPeople;
}
}
Here is working example for recursive function
async function getAllCharectersRecursively(URL,results){
try{
const {data} = await axios.get(URL);
// concat current page results
results =results.concat(data.results)
if(data.info.next){
// if there is next page call recursively
return await getAllCharectersRecursively(data.info.next,results)
}
else{
// at last page there is no next page so return collected results
return results
}
}
catch(e){
console.log(e)
}
}
async function main(){
let results = await getAllCharectersRecursively("https://rickandmortyapi.com/api/character/",[])
console.log(results.length)
}
main()
I hesitate to offer another answer because Trincot's analysis and answer is spot-on.
But I think a recursive answer here can be quite elegant. And as the question was tagged with "recursion", it seems worth presenting.
const populatePeople = async (url) => {
const {info: {next}, results} = await axios .get (url)
return [...results, ...(next ? await populatePeople (next) : [])]
}
populatePeople ('https://rickandmortyapi.com/api/character/')
// or wrap in an `async` main, or wait for global async...
.then (people => console .log (people .map (p => p .name)))
.catch (console .warn)
.as-console-wrapper {max-height: 100% !important; top: 0}
<script>/* dummy */ const axios = {get: (url) => fetch (url) .then (r => r .json ())} </script>
This is only concerned with fetching the data. Adding it to your DOM should be a separate step, and it shouldn't be difficult.
Update: Explanation
A comment indicated that this is hard to parse. There are two things that I imagine might be tricky here:
First is the object destructuring in {info: {next}, results} = <...>. This is just a nice way to avoid using intermediate variables to calculate the ones we actually want to use.
The second is the spread syntax in return [...results, ...<more>]. This is a simpler way to build an array than using .concat or .push. (There's a similar feature for objects.)
Here's another version doing the same thing, but with some intermediate variables and an array concatenation instead. It does the same thing:
const populatePeople = async (url) => {
const response = await axios .get (url)
const next = response .info && response .info .next
const results = response .results || []
const subsequents = next ? await populatePeople (next) : []
return results .concat (subsequents)
}
I prefer the original version. But perhaps you would find this one more clear.

How to use Promise.all in react js ES6

What i want to do is to upload file on server, then get URL of uploaded file and preview it. Files can be more than one. For that purpose i have written following code:
let filesURL=[];
let promises=[];
if(this.state.files_to_upload.length>0) {
for(let i=0; i<this.state.files_to_upload.length; i++) {
promises.push(this.uploadFilesOnServer(this.state.files_to_upload[i]))
}
Promise.all(promises).then(function(result){
console.log(result);
result.map((file)=>{
filesURL.push(file);
});
});
console.log(filesURL);
}
const uploadedFilesURL=filesURL;
console.log(uploadedFilesURL);
console.log(filesURL); give me the values returned by Promise.all.
And i want to use these values only when Promise.all completes properly. But, i am facing problem that lines console.log(uploadedFilesURL); excutes first irrespective of Promise.all and give me undefined values.I think i am not using promises correctly, can anyone please help me?
uploadFileOnServer code is:
uploadFilesOnServer(file)
{
let files=[];
let file_id='';
const image=file;
getImageUrl().then((response) => {
const data = new FormData();
data.append('file-0', image);
const {upload_url} = JSON.parse(response);
console.log(upload_url);
updateProfileImage(upload_url, data).then ((response2) => {
const data2 = JSON.parse(response2);
file_id=data2;
console.log(file_id);
files.push(file_id);
console.log(files);
});
});
return files;
}
No, promise is asynchronous and as such, doesn't work the way you think. If you want to execute something after a promise completed, you must put it into the promise's then callback. Here is the example based on your code:
uploadFilesOnServer(file) {
let files=[];
let file_id='';
const promise = getImageUrl()
.then((imageUrlResponse) => {
const data = new FormData();
data.append('file-0', file);
const { upload_url } = JSON.parse(imageUrlResponse);
console.log(upload_url);
return updateProfileImage(upload_url, data);
})
.then ((updateImageResponse) => {
file_id= JSON.parse(updateImageResponse);
console.log(file_id);
files.push(file_id);
console.log(files);
return files;
});
return promise;
}
let filesPromise = Promise.resolve([]);
if(this.state.files_to_upload.length > 0) {
const promises = this.state.files_to_upload.map((file) => {
return this.uploadFilesOnServer(file);
});
filesPromise = Promise.all(promises).then((results) => {
console.log(results);
return [].concat(...results);
});
}
// This is the final console.log of you (console.log(uploadedFilesURL);)
filesPromise.then((filesUrl) => console.log(filesUrl));
A good book to read about ES6 in general and Promises in particular is this book Understanding ECMAScript 6 - Nicholas C. Zakas
Edit:
Here is an simple explanation of the example code:
The uploadFilesOnServer is a function that takes a file, upload it and will return the file URL when the upload completes in the future in the form of a promise. The promise will call its then callback when it gets the url.
By using the map function, we create a list of url promises, the results we've got from executing uploadFilesOnServer on each file in the list.
The Promise.all method waits for all the promises in the list to be completed, joins the list of url results and create a promise with the result which is the list of urls. We need this because there is no guarantee that all of the promises will complete at once, and we need to gather all the results in one callback for convenience.
We get the urls from the then callback.
You have to do this on the .then part of your Promise.all()
Promise.all(promises)
.then(function(result){
console.log(result);
result.map((file)=>{
filesURL.push(file);
});
return true; // return from here to go to the next promise down
})
.then(() => {
console.log(filesURL);
const uploadedFilesURL=filesURL;
console.log(uploadedFilesURL);
})
This is the way async code works. You cannot expect your console.log(filesURL); to work correctly if it is being called syncronously after async call to fetch files from server.
Regarding to your code there are several problems:
1.uploadFilesOnServer must return Promise as it is async. Therefore:
uploadFilesOnServer(file)
{
let files=[];
let file_id='';
const image=file;
return getImageUrl().then((response) => {
const data = new FormData();
data.append('file-0', image);
const {upload_url} = JSON.parse(response);
console.log(upload_url);
updateProfileImage(upload_url, data).then ((response2) => {
const data2 = JSON.parse(response2);
file_id=data2;
console.log(file_id);
files.push(file_id);
console.log(files);
return files;
});
});
}
2.Inside your main function body you can assess results of the Promise.all execution only in its respective then handler.
As a side note I would recomment you to use es7 async/await features with some transpilers like babel/typescript. This will greatly reduce the nesting/complications of writing such async code.

Can Multiple fs.write to append to the same file guarantee the order of execution?

Assume we have such a program:
// imagine the string1 to string1000 are very long strings, which will take a while to be written to file system
var arr = ["string1",...,"string1000"];
for (let i = 1; i < 1000; i++) {
fs.write("./same/path/file.txt", arr[i], {flag: "a"}});
}
My question is, will string1 to string1000 be gurantted to append to the same file in order?
Since fs.write is async function, I am not sure how each call to fs.write() is really executed. I assume the call to the function for each string should be put somewhere in another thread (like a callstack?) and once the previous call is done the next call can be executed.
I'm not really sure if my understanding is accurate.
Edit 1
As in comments and answers, I see fs.write is not safe for multiple write to same file without waiting for callback. But what about writestream?
If I use the following code, would it guarantee the order of writing?
// imagine the string1 to string1000 are very long strings, which will take a while to be written to file system
var arr = ["string1",...,"string1000"];
var fileStream = fs.createWriteFileStream("./same/path/file.txt", { "flags": "a+" });
for (let i = 1; i < 1000; i++) {
fileStream.write(arr[i]);
}
fileStream.on("error", () => {// do something});
fileStream.on("finish", () => {// do something});
fileStream.end();
Any comments or corrections will be helpful! Thanks!
The docs say that
Note that it is unsafe to use fs.write multiple times on the same file without waiting for the callback. For this scenario, fs.createWriteStream is strongly recommended.
Using a stream works because streams inherently guarantee that the order of strings being written to them is the same order that is read out of them.
var stream = fs.createWriteStream("./same/path/file.txt");
stream.on('error', console.error);
arr.forEach((str) => {
stream.write(str + '\n');
});
stream.end();
Another way to still use fs.write but also make sure things happen in order is to use promises to maintain the sequential logic.
function writeToFilePromise(str) {
return new Promise((resolve, reject) => {
fs.write("./same/path/file.txt", str, {flag: "a"}}, (err) => {
if (err) return reject(err);
resolve();
});
});
}
// for every string,
// write it to the file,
// then write the next one once that one is finished and so on
arr.reduce((chain, str) => {
return chain
.then(() => writeToFilePromise(str));
}, Promise.resolve());
You can synchronize the access to the file using the read/write locking for node, please see the following example an you could read the documentation
var ReadWriteLock = require('rwlock');
var lock = new ReadWriteLock();
lock.writeLock(function (release) {
fs.appendFile(fileName, addToFile, function(err, data) {
if(err)
console.log("write error"); //logging error message
else
console.log("write ok");
release(); // unlock
});
});
I had the same problem and wrote an NPM package to solve it for my project. It works by buffering the data in an array, and waiting until the event loop turns over, to concatenate and write the data in a single call to fs.appendFile:
const SeqAppend = require('seqappend');
const writeLog = SeqAppend('log1.txt');
writeLog('Several...');
writeLog('...logged...');
writeLog('.......events');

Categories