Cant write to files inside of .then()? - javascript

I had a question about some code that i had working earlier, but now decides it doesnt want to work at all. Basically, I have a promise that returns me an array of data. I'll explain whats happening at the bottom of the code.
function mkdir(path){
return new Promise(function(resolve,reject){
fs.mkdir(path, { recursive: true },function(err){
if(err){console.log(err)}
})
return resolve()
})
}
function writeFile(file,content){
return new Promise(function(resolve,reject){
fs.writeFile(file, content, function(err){
if(err){console.log(err)}
return resolve()
})
})
}
function rawData(socket){
var mintDataArray = []
return new Promise(function(){
for (let i = 1; i < 13; i+= 1) {
getdat(i).then(function(r){
//mintDataArray.push(r)
for(o in r){
var dataurls = []
dataurls.push(r[o].discord_url,r[o].magic_eden_url,r[o].twitter_url)
//socket.emit('twitterFollowers',r[o])
const ProjectData = {
"mintDate": 0 || "",
"name":"",
"stock":0,
"links":[],
"mintTime": 0 || "",
"PricePlusDescription":0 || ""
}
if(r[o].mintDate != null){
ProjectData.mintDate = moment.utc(r[o].mintDate).format("MMMM Do")
}else{
ProjectData.mintDate = "No date specified yet"
}
ProjectData.name = r[o].name
ProjectData.stock = r[o].supply
ProjectData.links.push(dataurls)
ProjectData.PricePlusDescription = r[o].price
mintDataArray.push(ProjectData)
}
}).then(function(socket){
//CollectionSorter(mintDataArray)
for(i in mintDataArray){
var data = mintDataArray[i]
//console.log(data) // <----- This prints out the data that needs to be written to files.
var MintDateFolder = __dirname + "/UpcomingCollections/" +data.mintDate
if(!fs.existsSync(MintDateFolder)){
console.log('huh?')
mkdir(__dirname + '/UpcomingCollections/'+data.mintDate)
}
writeFile(MintDateFolder +"/"+data.name,JSON.stringify(data))
}
})
}
//socket.emit('twitterFollowers',mintDataArray)
})
}
So what the code is supposed to do, is check to see if that directory first exists in general. If it doesnt, then create the new directory. Then after that, its supposed to write files to it (not just that directory, but to other directories as well). It doesnt create the directory if it doesnt exists and it doesnt even write to it if i manually create the directory, however it does write to the other directories. I'm really not sure with this one because I had this working earlier, where it was creating the directory if it didn't exist. so i'm not sure what i messed up on.
I recently made mkdir and writefile functions and i thought they were the issue, because when i had this working I was just using fs.mkdir and fs.writefile. However, i went and tried again without those functions and i was still having the same troubles. I thought about making another promise to check if the directory existed but I already have quite a few nested promises.
read() function:
function read(i){
return new Promise(function(resolve,reject){
var r = https.request(options, function (res) {
var data = []
res.on('data', function (d) {
data.push(d)
}).on('end', function () {
var NFTTokenData = []
console.log(`STATUS: ${res.statusCode}`);
var info = Buffer.concat(data)
zlib.gunzip(info,function(err,buf){
var NFTData = []
var x = buf.toString()
var dat = JSON.parse(x)
var collectionList = dat.pageProps.__APOLLO_STATE__
for(keys in collectionList){
if(collectionList[keys].__typename.includes('Nft')){
collections.push(collectionList[keys])
resolve(collections)
}
}
})
})
})
r.end()
})
}
FINAL SOLUTION
function Project(fromApi) {
return {
mintDate: moment.utc(fromApi.mintDate).format("MMMM Do"),
name: fromApi.name,
imageLink:fromApi.project_image_link,
stock: fromApi.supply,
links: [
fromApi.discord_url,
fromApi.magic_eden_url,
fromApi.twitter_url
],
price: fromApi.price
}
}
function write(project, dir) {
const dest = resolve(dir, join(project.mintDate, project.name))
console.log(dest)
return stat(dirname(dest))
.catch(_ => mkdir(dirname(dest), {recursive: true}))
.then(_ => writeFile(dest, JSON.stringify(project)))
.then(_ => project)
}
function rawData(socket){
return new Promise(function(resolve,reject){
[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17].map(i =>
read(i).then(function(r){
var data = []
for(i in r){
if(r[i].__typename == "Nft"){
data.push(r[i])
}
}
data.map(item => {
var project = Project(item)
write(project,"./UpcomingCollections")
})
})
)
})
}
This is a modification of the answer down below!

You can do anything inside of .then – its only role is to sequence functions.
Let's talk about some other issues in your code -
function mkdir(path){
return new Promise(function(resolve,reject){
fs.mkdir(path, { recursive: true },function(err){
if(err){console.log(err)} // ❌ reject(err); do not log
})
return resolve() // ❌ resolve() outside of mkdir callback
})
}
function writeFile(file,content){
return new Promise(function(resolve,reject){
fs.writeFile(file, content, function(err){
if(err){console.log(err)} // ❌ reject(err); do not log
return resolve() // ⚠️ "return" not needed
})
})
}
Did you find it tedious to implement wrappers for each fs function in Node? You will be happy to know the fs/promises module already provides Promise-based APIs for each -
import { mkdir, writeFile } from "fs/promises" // ✅
// mkdir(path[, options])
// Returns: <Promise> Upon success, fulfills with undefined if recursive is false, or the first directory path created if recursive is true.
// writeFile(file, data[, options])
// Returns: <Promise> Fulfills with undefined upon success.
Next we'll look over the other issues in the main program -
function rawData(socket){
var mintDataArray = [] // ⚠️ state "outside" of Promise context
return new Promise(function(){
for (let i = 1; i < 13; i+= 1) {
getdat(i).then(function(r){
for(o in r){ // ⚠️ global "o"; don't use for..in, use for..of instead
// ,,,
mintDataArray.push(Project) // ⚠️ attempting to send state "out" of Promise
}
// ⚠️ missing "return"
// implicitly returning "undefined"
}).then(function(socket){ // ⚠️ function parameter (socket) receives resolved Promise
for(i in mintDataArray){ // ⚠️ global "i", for..in again, accessing external state
var data = mintDataArray[i]
var MintDateFolder = __dirname + "/UpcomingCollections/" +data.mintDate // ⚠️ use "path" module
if(!fs.existsSync(MintDateFolder)){ // ⚠️ async methods until this point, why Sync here?
console.log('huh?')
mkdir(__dirname + '/UpcomingCollections/'+data.mintDate) // ⚠️ asynchronous
}
writeFile(MintDateFolder +"/"+data.name,JSON.stringify(data)) // ⚠️ attempts write before mkdir completes
}
})
}
socket.emit('twitterFollowers',mintDataArray) // ⚠️ accessing external state
})
}
read and transform data
It's quite a bit of work, but don't worry. By breaking big problems down into small ones, we can work smarter, not harder. We'll start by renaming getdat to read -
read(i).then(r => {
const mintData = [] // state *inside* promise context
for (const v of r) {
// for all v of r, add project data to mintData
mintData.push({
mintDate: v.mintDate,
name: v.name,
stock: v.supply,
links: [
v.discord_url,
v.magic_eden_url,
v.twitter_url
],
price: v.price
})
}
return mintData // "return" resolves the promise
}).then(...)
Already our .then function is getting big. There's quite a bit of code for extracting and constructing the project data, so make that its own function, Project -
function Project(fromApi) {
// add date logic, or more
return {
mintDate: fromApi.mintDate,
name: fromApi.name,
stock: fromApi.supply,
links: [
fromApi.discord_url,
fromApi.magic_eden_url,
fromApi.twitter_url
],
price: fromApi.price
}
}
read(i).then(r => {
const mintData = []
for (const v of r) {
mintData.push(Project(v)) // ✅
}
return mintData
}).then(...)
Which is the same thing as -
read(i)
.then(r => r.map(Project)) // ✨ no for loop needed!
.then(...)
write
Let's check back in with your code and see our progress -
function rawData(socket){
// var mintDataArray = [] // ✅ remove external state
return new Promise(function(){
for (let i = 1; i < 13; i+= 1) {
read(i)
.then(r => r.map(Project)) // ✨
.then(function(socket) {
// ⚠️ remember, "socket" gets the resolved promise
// since `read` resolves an array of Projects, we should rename it
})
}
// ⚠️ we'll come back to this
// socket.emit('twitterFollowers',mintDataArray)
})
}
We continue with the .then(function(socket){ ... }) handler. There's a good amount of code here for creating the path, making a directory, and writing JSON to a file. Let's make that its own function called write -
import { stat, mkdir, writeFile } from "fs/promises"
import { join, resolve, dirname } from "path" // 🔍 unrelated to Promise "resolve"
function write(project, dir) {
const dest = resolve(dir, join(project.mintDate, project.name)) // ✅ sanitary path handling
return stat(dirname(dest))
.catch(_ => mkdir(dirname(dest), {recursive: true})) // create dir if not exists
.then(_ => writeFile(dest, JSON.stringify(project))) // write project JSON data
.then(_ => project) // return project
}
Our rawData function is cleaning up nicely, but we still have an outstanding issue of running async operations inside two separate loops -
function rawData(socket){
return new Promise(function(){
for (let i = 1; i < 13; i+= 1) {
read(i) //❓ how to resolve promise for each read?
.then(r => r.map(Project))
.then(projects => { // ✅ "projects", not "socket"
for (const p of projects) {
write(p, "./UpcomingCollections") //❓ how to resole promise for each write?
}
// ❓ what should we return?
})
}
// socket.emit('twitterFollowers',mintDataArray)
})
}
promises in a loop
Promise has another function we must become familiar with. Promise.all takes an array of promises and resolves only when all promises have resolved -
function myfunc(x) {
return Promise.resolve(x * 100)
}
Promise.all([myfunc(1), myfunc(2), myfunc(3)]).then(console.log)
// Promise{ [ 100, 200, 300 ] }
Promise.all([1,2,3].map(n => myfunc(n))).then(console.log)
// Promise{ [ 100, 200, 300 ] }
We can use Promise.all to clean up the two loops in rawData. And look at that, we can sequence the data directly into the socket -
function rawData(socket){
return Promise.all(
[1,2,3,4,5,6,7,8,9,10,11,12].map(i =>
read(i).then(r => r.map(Project))
)
)
.then(projects => {
Promise.all(projects.map(p => write(p, "./UpcomingCollections"))) // ✨
})
.then(projects => socket.emit("twitterFollowers", projects)) // ✨
}
all together now
We can see a lot of pain points are eliminated by using Promise-based code in an effective, idiomatic way. Until this point we have not addressed the issue of error handling, but now there's nothing left to say. Because we used Promises correctly, any errors will bubble up and the caller can .catch them and respond appropriately -
import { stat, mkdir, writeFile } from "fs/promises"
import { join, resolve, dirname } from "path"
function Project(fromApi) {
return {
mintDate: fromApi.mintDate,
name: fromApi.name,
stock: fromApi.supply,
links: [
fromApi.discord_url,
fromApi.magic_eden_url,
fromApi.twitter_url
],
price: fromApi.price
}
}
function write(project, dir) {
const dest = resolve(dir, join(project.mintDate, project.name))
return stat(dirname(dest))
.catch(_ => mkdir(dirname(dest), {recursive: true}))
.then(_ => writeFile(dest, JSON.stringify(project)))
.then(_ => project)
}
function rawData(socket){
return Promise.all(
[1,2,3,4,5,6,7,8,9,10,11,12].map(i =>
read(i).then(r => r.map(Project))
)
)
.then(projects => {
Promise.all(projects.map(p => write(p, "./UpcomingCollections")))
})
.then(projects => socket.emit("twitterFollowers", projects))
}
async..await
Modern JavaScript provides async..await syntax allowing us to blur the lines between synchronous and asynchronous code. This allows us to remove many .then calls, flattens our code, reduces cognitive load, and shares asynchronous values in the same scope -
async function rawData(socket) {
const r = await Promise.all([1,2,3,4,5,6,7,8,9,10,11,12].map(read))
const projects = await Promise.all(
r.flatMap(Project).map(p => write(p, "./UpcomingCollections"))
)
return socket.emit("twitterfollowers", projects)
}
Some of the complications of this program stems from the use of nested data. Promise.all is efficient and runs the promises in parallel, but keeping the data nested in the array makes it a bit harder to work with. To show that async..await truly blurs the line between sync and async, we will bring back the two for..of loops and call await in the loop. This results in a serial order processing but readability is terrific. Maybe your use-case isn't hyper demanding and so this style is completely adequate -
async function rawData(socket) {
for (const i of [1,2,3,4,5,6,7,8,9,10,11,12]) {
const result = await read(i)
for (const r of result) {
const project = Project(r)
await write(project, "./UpcomingCollections")
socket.emit("twitterFollowers", project)
}
}
}

Mulan's answer was correct, I just needed to change the for loop in my other function, to a .map. After that, everything worked perfectly. I had to do some modification to the rawData function as seen below.
function Project(fromApi) {
return {
mintDate: moment.utc(fromApi.mintDate).format("MMMM Do"),
name: fromApi.name,
imageLink:fromApi.project_image_link,
stock: fromApi.supply,
links: [
fromApi.discord_url,
fromApi.magic_eden_url,
fromApi.twitter_url
],
price: fromApi.price
}
}
function write(project, dir) {
const dest = resolve(dir, join(project.mintDate, project.name))
console.log(dest)
return stat(dirname(dest))
.catch(_ => mkdir(dirname(dest), {recursive: true}))
.then(_ => writeFile(dest, JSON.stringify(project)))
.then(_ => project)
}
function rawData(socket){
return new Promise(function(resolve,reject){
[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17].map(i =>
read(i).then(function(r){
var data = []
for(i in r){
if(r[i].__typename == "Nft"){
data.push(r[i])
}
}
data.map(item => {
var project = Project(item)
write(project,"./UpcomingCollections")
})
})
)
})
}

Related

Wrap a resultset callback function with a generator/iterator

I'm working on converting a legacy callback-based API into an async library. But I just can't wrap my head around getting a "resultset" to work as a generator (Node 10.x).
The original API works like this:
api.prepare((err, rs) => {
rs.fetchRows(
(err, row) => {
// this callback is called as many times as rows exist
console.log("here's a row:", row);
},
() => {
console.log("we're done, data exausted");
}
);
});
But here is how I want to use it:
const wrapped = new ApiWrapper(api);
const rs = await wrapped.prepare({});
for (let row of rs.rows()) {
console.log("here's a row:", row);
}
let row;
while(row = await rs.next()) {
console.log("here's a row:", row);
}
I thought I had it under control with generators, but it looks like you cannot use yield inside a callback. It actually seems logical if you think about.
class ApiWrapper {
constructor(api) {
this.api = api;
}
prepare() {
return new Promise((resolve, reject) => {
this.api.prepare((err, rs) => {
if (err) {
reject(err);
} else {
resolve(rs);
}
});
});
}
*rows() {
this.api.fetchRows((err, row) => {
if (err) {
throw err;
} else {
yield row; // nope, not allowed here
}
});
}
next() { ... }
}
So what alternatives do I have?
Important: I don't want to store anything in an array then iterate that, we're talking giga-loads of row data here.
Edit
I'm able to simulate the behavior I want using stream.Readable but it warns me that it's an experimental feature. Here's a simplified array-based version of the issue I'm trying to solve using stream:
const stream = require('stream');
function gen(){
const s = new stream.Readable({
objectMode: true,
read(){
[11, 22, 33].forEach(row => {
this.push({ value: row });
});
this.push(null)
}
});
return s;
}
for await (let row of gen()) {
console.log(row);
}
// { value: 11 }
// { value: 22 }
// { value: 33 }
(node:97157) ExperimentalWarning: Readable[Symbol.asyncIterator] is an experimental feature. This feature could change at any time
I finally realized I needed something similar to Go's channels that were async/await compatible. Basically the answer is to synchronize an async iterator and a callback, making them wait for each other as next() iterations are consumed.
The best (Node) native solution I found was to use a stream as an iterator, which is supported in Node 10.x but tagged experimental. I also tried to implement it with the p-defer NPM module, but that turned out to be more involved than I expected. Finally ran across the https://www.npmjs.com/package/#nodeguy/channel module, which was exactly what I needed:
const Channel = require('#nodeguy/channel');
class ApiWrapper {
// ...
rows() {
const channel = new Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
this.api.fetchRows(async (err, row) => {
await channel.push(row);
}).then(() => channel.close());
return iter;
}
}
// then later
for await (let row of rs.rows()) {
console.log(row)
}
Note how each iterating function core, next() and rows(), have a await that will throttle how much data can be pushed across the channel, otherwise the producing callback could end up pushing data uncontrollably into the channel queue. The idea is that the callback should wait for data to be consumed by the iterator next() before pushing more.
Here's a more self-contained example:
const Channel = require('#nodeguy/channel');
function iterating() {
const channel = Channel();
const iter = {
[Symbol.asyncIterator]() {
return this;
},
async next() {
console.log('next');
const val = await channel.shift();
if (val === undefined) {
return { done: true };
} else {
return { done: false, value: val };
}
}
};
[11, 22, 33].forEach(async it => {
await channel.push(it);
console.log('pushed', it);
});
console.log('returned');
return iter;
}
(async function main() {
for await (let it of iterating()) {
console.log('got', it);
}
})();
/*
returned
next
pushed 11
got 11
next
pushed 22
got 22
next
pushed 33
got 33
next
*/
Like I said, Streams and/or Promises can be used to implement this, but the Channel module solves some of the complexity that make it more intuitive.
The original question has two nested callback taking async functions
api.prepare((err,res) => ...)
rs.fetchRows((err,res) => ...)
The first one runs the callback only once so just promisifying it as follows is sufficient.
function promisfied(f){
return new Promise((v,x) => f(x,v));
}
However the second function will invoke it's callback multiple times and we wish to generate an async iterable from this function such that we can consume it in a for await of loop.
This is also possible by employing async generators as follows;
async function* rowEmitterGenerator(rs){
let _v, // previous resolve
_x, // previous reject
_row = new Promise((v,x) => (_v = v, _x = x));
rs.fetchRows((err, row) => ( err ? _x(err) : _v(row)
, _row = new Promise((v,x) => (_v = v, _x = x))
));
while(true){
try {
yield _row;
}
catch(e){
console.log(e);
}
}
}
Then putting all together in a top level await context;
const rows = await promisified(api.prepare),
rowEmitter = rowEmitterGenerator(rows);
for await (let row of rowEmitter){
console.log(`Received row: ${row}`);
// do something with the row
}

javascript promise after foreach loop with multiple mongoose find

I'm trying to have a loop with some db calls, and once their all done ill send the result. - Using a promise, but if i have my promise after the callback it dosent work.
let notuser = [];
let promise = new Promise((resolve, reject) => {
users.forEach((x) => {
User.find({
/* query here */
}, function(err, results) {
if(err) throw err
if(results.length) {
notuser.push(x);
/* resolve(notuser) works here - but were not done yet*/
}
})
});
resolve(notuser); /*not giving me the array */
}).then((notuser) => {
return res.json(notuser)
})
how can i handle this ?
Below is a function called findManyUsers which does what you're looking for. Mongo find will return a promise to you, so just collect those promises in a loop and run them together with Promise.all(). So you can see it in action, I've added a mock User class with a promise-returning find method...
// User class pretends to be the mongo user. The find() method
// returns a promise to 'find" a user with a given id
class User {
static find(id) {
return new Promise(r => {
setTimeout(() => r({ id: `user-${id}` }), 500);
});
}
}
// return a promise to find all of the users with the given ids
async function findManyUsers(ids) {
let promises = ids.map(id => User.find(id));
return Promise.all(promises);
}
findManyUsers(['A', 'B', 'C']).then(result => console.log(result));
I suggest you take a look at async it's a great library for this sort of things and more, I really think you should get used to implement it.
I would solve your problem using the following
const async = require('async')
let notuser = [];
async.forEach(users, (user, callback)=>{
User.find({}, (err, results) => {
if (err) callback(err)
if(results.length) {
notUser.push(x)
callback(null)
}
})
}, (err) => {
err ? throw err : return(notuser)
})
However, if you don't want to use a 3rd party library, you are better off using promise.all and await for it to finish.
EDIT: Remember to install async using npm or yarn something similar to yarn add async -- npm install async
I used #danh solution for the basis of fixing in my scenario (so credit goes there), but thought my code may be relevant to someone else, looking to use standard mongoose without async. I want to gets a summary of how many reports for a certain status and return the last 5 for each, combined into one response.
const { Report } = require('../../models/report');
const Workspace = require('../../models/workspace');
// GET request to return page of items from users report
module.exports = (req, res, next) => {
const workspaceId = req.params.workspaceId || req.workspaceId;
let summary = [];
// returns a mongoose like promise
function addStatusSummary(status) {
let totalItems;
let $regex = `^${status}$`;
let query = {
$and: [{ workspace: workspaceId }, { status: { $regex, $options: 'i' } }],
};
return Report.find(query)
.countDocuments()
.then((numberOfItems) => {
totalItems = numberOfItems;
return Report.find(query)
.sort({ updatedAt: -1 })
.skip(0)
.limit(5);
})
.then((reports) => {
const items = reports.map((r) => r.displayForMember());
summary.push({
status,
items,
totalItems,
});
})
.catch((err) => {
if (!err.statusCode) {
err.statusCode = 500;
}
next(err);
});
}
Workspace.findById(workspaceId)
.then((workspace) => {
let promises = workspace.custom.statusList.map((status) =>
addStatusSummary(status)
);
return Promise.all(promises);
})
.then(() => {
res.status(200).json({
summary,
});
})
.catch((err) => {
if (!err.statusCode) {
err.statusCode = 500;
}
next(err);
});
};

Javascript Promise.all() method to fire after all errors and success – surprised that finally() doesnt do this [duplicate]

Let's say I have a set of Promises that are making network requests, of which one will fail:
// http://does-not-exist will throw a TypeError
var arr = [ fetch('index.html'), fetch('http://does-not-exist') ]
Promise.all(arr)
.then(res => console.log('success', res))
.catch(err => console.log('error', err)) // This is executed
Let's say I want to wait until all of these have finished, regardless of if one has failed. There might be a network error for a resource that I can live without, but which if I can get, I want before I proceed. I want to handle network failures gracefully.
Since Promise.all doesn't leave any room for this, what is the recommended pattern for handling this, without using a promises library?
Update, you probably want to use the built-in native Promise.allSettled:
Promise.allSettled([promise]).then(([result]) => {
//reach here regardless
// {status: "fulfilled", value: 33}
});
As a fun fact, this answer below was prior art in adding that method to the language :]
Sure, you just need a reflect:
const reflect = p => p.then(v => ({v, status: "fulfilled" }),
e => ({e, status: "rejected" }));
reflect(promise).then((v) => {
console.log(v.status);
});
Or with ES5:
function reflect(promise){
return promise.then(function(v){ return {v:v, status: "fulfilled" }},
function(e){ return {e:e, status: "rejected" }});
}
reflect(promise).then(function(v){
console.log(v.status);
});
Or in your example:
var arr = [ fetch('index.html'), fetch('http://does-not-exist') ]
Promise.all(arr.map(reflect)).then(function(results){
var success = results.filter(x => x.status === "fulfilled");
});
Similar answer, but more idiomatic for ES6 perhaps:
const a = Promise.resolve(1);
const b = Promise.reject(new Error(2));
const c = Promise.resolve(3);
Promise.all([a, b, c].map(p => p.catch(e => e)))
.then(results => console.log(results)) // 1,Error: 2,3
.catch(e => console.log(e));
const console = { log: msg => div.innerHTML += msg + "<br>"};
<div id="div"></div>
Depending on the type(s) of values returned, errors can often be distinguished easily enough (e.g. use undefined for "don't care", typeof for plain non-object values, result.message, result.toString().startsWith("Error:") etc.)
Benjamin's answer offers a great abstraction for solving this issue, but I was hoping for a less abstracted solution. The explicit way to to resolve this issue is to simply call .catch on the internal promises, and return the error from their callback.
let a = new Promise((res, rej) => res('Resolved!')),
b = new Promise((res, rej) => rej('Rejected!')),
c = a.catch(e => { console.log('"a" failed.'); return e; }),
d = b.catch(e => { console.log('"b" failed.'); return e; });
Promise.all([c, d])
.then(result => console.log('Then', result)) // Then ["Resolved!", "Rejected!"]
.catch(err => console.log('Catch', err));
Promise.all([a.catch(e => e), b.catch(e => e)])
.then(result => console.log('Then', result)) // Then ["Resolved!", "Rejected!"]
.catch(err => console.log('Catch', err));
Taking this one step further, you could write a generic catch handler that looks like this:
const catchHandler = error => ({ payload: error, resolved: false });
then you can do
> Promise.all([a, b].map(promise => promise.catch(catchHandler))
.then(results => console.log(results))
.catch(() => console.log('Promise.all failed'))
< [ 'Resolved!', { payload: Promise, resolved: false } ]
The problem with this is that the caught values will have a different interface than the non-caught values, so to clean this up you might do something like:
const successHandler = result => ({ payload: result, resolved: true });
So now you can do this:
> Promise.all([a, b].map(result => result.then(successHandler).catch(catchHandler))
.then(results => console.log(results.filter(result => result.resolved))
.catch(() => console.log('Promise.all failed'))
< [ 'Resolved!' ]
Then to keep it DRY, you get to Benjamin's answer:
const reflect = promise => promise
.then(successHandler)
.catch(catchHander)
where it now looks like
> Promise.all([a, b].map(result => result.then(successHandler).catch(catchHandler))
.then(results => console.log(results.filter(result => result.resolved))
.catch(() => console.log('Promise.all failed'))
< [ 'Resolved!' ]
The benefits of the second solution are that its abstracted and DRY. The downside is you have more code, and you have to remember to reflect all your promises to make things consistent.
I would characterize my solution as explicit and KISS, but indeed less robust. The interface doesn't guarantee that you know exactly whether the promise succeeded or failed.
For example you might have this:
const a = Promise.resolve(new Error('Not beaking, just bad'));
const b = Promise.reject(new Error('This actually didnt work'));
This won't get caught by a.catch, so
> Promise.all([a, b].map(promise => promise.catch(e => e))
.then(results => console.log(results))
< [ Error, Error ]
There's no way to tell which one was fatal and which was wasn't. If that's important then you're going to want to enforce and interface that tracks whether it was successful or not (which reflect does).
If you just want to handle errors gracefully, then you can just treat errors as undefined values:
> Promise.all([a.catch(() => undefined), b.catch(() => undefined)])
.then((results) => console.log('Known values: ', results.filter(x => typeof x !== 'undefined')))
< [ 'Resolved!' ]
In my case, I don't need to know the error or how it failed--I just care whether I have the value or not. I'll let the function that generates the promise worry about logging the specific error.
const apiMethod = () => fetch()
.catch(error => {
console.log(error.message);
throw error;
});
That way, the rest of the application can ignore its error if it wants, and treat it as an undefined value if it wants.
I want my high level functions to fail safely and not worry about the details on why its dependencies failed, and I also prefer KISS to DRY when I have to make that tradeoff--which is ultimately why I opted to not use reflect.
There is a finished proposal for a function which can accomplish this natively, in vanilla Javascript: Promise.allSettled, which has made it to stage 4, is officialized in ES2020, and is implemented in all modern environments. It is very similar to the reflect function in this other answer. Here's an example, from the proposal page. Before, you would have had to do:
function reflect(promise) {
return promise.then(
(v) => {
return { status: 'fulfilled', value: v };
},
(error) => {
return { status: 'rejected', reason: error };
}
);
}
const promises = [ fetch('index.html'), fetch('https://does-not-exist/') ];
const results = await Promise.all(promises.map(reflect));
const successfulPromises = results.filter(p => p.status === 'fulfilled');
Using Promise.allSettled instead, the above will be equivalent to:
const promises = [ fetch('index.html'), fetch('https://does-not-exist/') ];
const results = await Promise.allSettled(promises);
const successfulPromises = results.filter(p => p.status === 'fulfilled');
Those using modern environments will be able to use this method without any libraries. In those, the following snippet should run without problems:
Promise.allSettled([
Promise.resolve('a'),
Promise.reject('b')
])
.then(console.log);
Output:
[
{
"status": "fulfilled",
"value": "a"
},
{
"status": "rejected",
"reason": "b"
}
]
For older browsers, there is a spec-compliant polyfill here.
I really like Benjamin's answer, and how he basically turns all promises into always-resolving-but-sometimes-with-error-as-a-result ones. :)
Here's my attempt at your request just in case you were looking for alternatives. This method simply treats errors as valid results, and is coded similar to Promise.all otherwise:
Promise.settle = function(promises) {
var results = [];
var done = promises.length;
return new Promise(function(resolve) {
function tryResolve(i, v) {
results[i] = v;
done = done - 1;
if (done == 0)
resolve(results);
}
for (var i=0; i<promises.length; i++)
promises[i].then(tryResolve.bind(null, i), tryResolve.bind(null, i));
if (done == 0)
resolve(results);
});
}
var err;
Promise.all([
promiseOne().catch(function(error) { err = error;}),
promiseTwo().catch(function(error) { err = error;})
]).then(function() {
if (err) {
throw err;
}
});
The Promise.all will swallow any rejected promise and store the error in a variable, so it will return when all of the promises have resolved. Then you can re-throw the error out, or do whatever. In this way, I guess you would get out the last rejection instead of the first one.
I had the same problem and have solved it in the following way:
const fetch = (url) => {
return node-fetch(url)
.then(result => result.json())
.catch((e) => {
return new Promise((resolve) => setTimeout(() => resolve(fetch(url)), timeout));
});
};
tasks = [fetch(url1), fetch(url2) ....];
Promise.all(tasks).then(......)
In that case Promise.all will wait for every Promise will come into resolved or rejected state.
And having this solution we are "stopping catch execution" in a non-blocking way. In fact, we're not stopping anything, we just returning back the Promise in a pending state which returns another Promise when it's resolved after the timeout.
This should be consistent with how Q does it:
if(!Promise.allSettled) {
Promise.allSettled = function (promises) {
return Promise.all(promises.map(p => Promise.resolve(p).then(v => ({
state: 'fulfilled',
value: v,
}), r => ({
state: 'rejected',
reason: r,
}))));
};
}
Instead of rejecting, resolve it with a object.
You could do something like this when you are implementing promise
const promise = arg => {
return new Promise((resolve, reject) => {
setTimeout(() => {
try{
if(arg != 2)
return resolve({success: true, data: arg});
else
throw new Error(arg)
}catch(e){
return resolve({success: false, error: e, data: arg})
}
}, 1000);
})
}
Promise.all([1,2,3,4,5].map(e => promise(e))).then(d => console.log(d))
Benjamin Gruenbaum answer is of course great,. But I can also see were Nathan Hagen point of view with the level of abstraction seem vague. Having short object properties like e & v don't help either, but of course that could be changed.
In Javascript there is standard Error object, called Error,. Ideally you always throw an instance / descendant of this. The advantage is that you can do instanceof Error, and you know something is an error.
So using this idea, here is my take on the problem.
Basically catch the error, if the error is not of type Error, wrap the error inside an Error object. The resulting array will have either resolved values, or Error objects you can check on.
The instanceof inside the catch, is in case you use some external library that maybe did reject("error"), instead of reject(new Error("error")).
Of course you could have promises were you resolve an error, but in that case it would most likely make sense to treat as an error anyway, like the last example shows.
Another advantage of doing it this, array destructing is kept simple.
const [value1, value2] = PromiseAllCatch(promises);
if (!(value1 instanceof Error)) console.log(value1);
Instead of
const [{v: value1, e: error1}, {v: value2, e: error2}] = Promise.all(reflect..
if (!error1) { console.log(value1); }
You could argue that the !error1 check is simpler than an instanceof, but your also having to destruct both v & e.
function PromiseAllCatch(promises) {
return Promise.all(promises.map(async m => {
try {
return await m;
} catch(e) {
if (e instanceof Error) return e;
return new Error(e);
}
}));
}
async function test() {
const ret = await PromiseAllCatch([
(async () => "this is fine")(),
(async () => {throw new Error("oops")})(),
(async () => "this is ok")(),
(async () => {throw "Still an error";})(),
(async () => new Error("resolved Error"))(),
]);
console.log(ret);
console.log(ret.map(r =>
r instanceof Error ? "error" : "ok"
).join(" : "));
}
test();
I think the following offers a slightly different approach... compare fn_fast_fail() with fn_slow_fail()... though the latter doesn't fail as such... you can check if one or both of a and b is an instance of Error and throw that Error if you want it to reach the catch block (e.g. if (b instanceof Error) { throw b; }) . See the jsfiddle.
var p1 = new Promise((resolve, reject) => {
setTimeout(() => resolve('p1_delayed_resolvement'), 2000);
});
var p2 = new Promise((resolve, reject) => {
reject(new Error('p2_immediate_rejection'));
});
var fn_fast_fail = async function () {
try {
var [a, b] = await Promise.all([p1, p2]);
console.log(a); // "p1_delayed_resolvement"
console.log(b); // "Error: p2_immediate_rejection"
} catch (err) {
console.log('ERROR:', err);
}
}
var fn_slow_fail = async function () {
try {
var [a, b] = await Promise.all([
p1.catch(error => { return error }),
p2.catch(error => { return error })
]);
console.log(a); // "p1_delayed_resolvement"
console.log(b); // "Error: p2_immediate_rejection"
} catch (err) {
// we don't reach here unless you throw the error from the `try` block
console.log('ERROR:', err);
}
}
fn_fast_fail(); // fails immediately
fn_slow_fail(); // waits for delayed promise to resolve
I just wanted a polyfill that exactly replicated ES2020 behaviour since I'm locked into node versions a lot earlier than 12.9 (when Promise.allSettled appeared), unfortunately. So for what it's worth, this is my version:
const settle = (promise) => (promise instanceof Promise) ?
promise.then(val => ({ value: val, status: "fulfilled" }),
err => ({ reason: err, status: "rejected" })) :
{ value: promise, status: 'fulfilled' };
const allSettled = async (parr) => Promise.all(parr.map(settle));
This handles a mixed array of promise and non-promise values, as does the ES version. It hands back the same array of { status, value/reason } objects as the native version.
Here's my custom settledPromiseAll()
const settledPromiseAll = function(promisesArray) {
var savedError;
const saveFirstError = function(error) {
if (!savedError) savedError = error;
};
const handleErrors = function(value) {
return Promise.resolve(value).catch(saveFirstError);
};
const allSettled = Promise.all(promisesArray.map(handleErrors));
return allSettled.then(function(resolvedPromises) {
if (savedError) throw savedError;
return resolvedPromises;
});
};
Compared to Promise.all
If all promises are resolved, it performs exactly as the standard one.
If one of more promises are rejected, it returns the first one rejected much the same as the standard one but unlike it waits for all promises to resolve/reject.
For the brave we could change Promise.all():
(function() {
var stdAll = Promise.all;
Promise.all = function(values, wait) {
if(!wait)
return stdAll.call(Promise, values);
return settledPromiseAll(values);
}
})();
CAREFUL. In general we never change built-ins, as it might break other unrelated JS libraries or clash with future changes to JS standards.
My settledPromiseall is backward compatible with Promise.all and extends its functionality.
People who are developing standards -- why not include this to a new Promise standard?
I recently built a library that allows what you need. it executes promises in parallel, and if one fails, the process continues, at the end it returns an array with all the results, including errors.
https://www.npmjs.com/package/promise-ax
I hope and it is helpful for someone.
const { createPromise } = require('promise-ax');
const promiseAx = createPromise();
const promise1 = Promise.resolve(4);
const promise2 = new Promise((resolve, reject) => setTimeout(reject, 100, new Error("error")));
const promise3 = Promise.reject("error");
const promise4 = promiseAx.resolve(8);
const promise5 = promiseAx.reject("errorAx");
const asyncOperation = (time) => {
return new Promise((resolve, reject) => {
if (time < 0) {
reject("reject");
}
setTimeout(() => {
resolve(time);
}, time);
});
};
const promisesToMake = [promise1, promise2, promise3, promise4, promise5, asyncOperation(100)];
promiseAx.allSettled(promisesToMake).then((results) => results.forEach((result) => console.log(result)));
// Salida esperada:
// 4
// Error: error
// error
// 8
// errorAx
// 100
I would do:
var err = [fetch('index.html').then((success) => { return Promise.resolve(success); }).catch((e) => { return Promise.resolve(e); }),
fetch('http://does-not-exist').then((success) => { return Promise.resolve(success); }).catch((e) => { return Promise.resolve(e); })];
Promise.all(err)
.then(function (res) { console.log('success', res) })
.catch(function (err) { console.log('error', err) }) //never executed
I've been using following codes since ES5.
Promise.wait = function(promiseQueue){
if( !Array.isArray(promiseQueue) ){
return Promise.reject('Given parameter is not an array!');
}
if( promiseQueue.length === 0 ){
return Promise.resolve([]);
}
return new Promise((resolve, reject) =>{
let _pQueue=[], _rQueue=[], _readyCount=false;
promiseQueue.forEach((_promise, idx) =>{
// Create a status info object
_rQueue.push({rejected:false, seq:idx, result:null});
_pQueue.push(Promise.resolve(_promise));
});
_pQueue.forEach((_promise, idx)=>{
let item = _rQueue[idx];
_promise.then(
(result)=>{
item.resolved = true;
item.result = result;
},
(error)=>{
item.resolved = false;
item.result = error;
}
).then(()=>{
_readyCount++;
if ( _rQueue.length === _readyCount ) {
let result = true;
_rQueue.forEach((item)=>{result=result&&item.resolved;});
(result?resolve:reject)(_rQueue);
}
});
});
});
};
The usage signature is just like Promise.all. The major difference is that Promise.wait will wait for all the promises to finish their jobs.
I know that this question has a lot of answers, and I'm sure must (if not all) are correct.
However it was very hard for me to understand the logic/flow of these answers.
So I looked at the Original Implementation on Promise.all(), and I tried to imitate that logic - with the exception of not stopping the execution if one Promise failed.
public promiseExecuteAll(promisesList: Promise<any>[] = []): Promise<{ data: any, isSuccess: boolean }[]>
{
let promise: Promise<{ data: any, isSuccess: boolean }[]>;
if (promisesList.length)
{
const result: { data: any, isSuccess: boolean }[] = [];
let count: number = 0;
promise = new Promise<{ data: any, isSuccess: boolean }[]>((resolve, reject) =>
{
promisesList.forEach((currentPromise: Promise<any>, index: number) =>
{
currentPromise.then(
(data) => // Success
{
result[index] = { data, isSuccess: true };
if (promisesList.length <= ++count) { resolve(result); }
},
(data) => // Error
{
result[index] = { data, isSuccess: false };
if (promisesList.length <= ++count) { resolve(result); }
});
});
});
}
else
{
promise = Promise.resolve([]);
}
return promise;
}
Explanation:
- Loop over the input promisesList and execute each Promise.
- No matter if the Promise resolved or rejected: save the Promise's result in a result array according to the index. Save also the resolve/reject status (isSuccess).
- Once all Promises completed, return one Promise with the result of all others.
Example of use:
const p1 = Promise.resolve("OK");
const p2 = Promise.reject(new Error(":-("));
const p3 = Promise.resolve(1000);
promiseExecuteAll([p1, p2, p3]).then((data) => {
data.forEach(value => console.log(`${ value.isSuccess ? 'Resolve' : 'Reject' } >> ${ value.data }`));
});
/* Output:
Resolve >> OK
Reject >> :-(
Resolve >> 1000
*/
You can execute your logic sequentially via synchronous executor nsynjs. It will pause on each promise, wait for resolution/rejection, and either assign resolve's result to data property, or throw an exception (for handling that you will need try/catch block). Here is an example:
function synchronousCode() {
function myFetch(url) {
try {
return window.fetch(url).data;
}
catch (e) {
return {status: 'failed:'+e};
};
};
var arr=[
myFetch("https://ajax.googleapis.com/ajax/libs/jquery/2.0.0/jquery.min.js"),
myFetch("https://ajax.googleapis.com/ajax/libs/jquery/2.0.0/NONEXISTANT.js"),
myFetch("https://ajax.NONEXISTANT123.com/ajax/libs/jquery/2.0.0/NONEXISTANT.js")
];
console.log('array is ready:',arr[0].status,arr[1].status,arr[2].status);
};
nsynjs.run(synchronousCode,{},function(){
console.log('done');
});
<script src="https://rawgit.com/amaksr/nsynjs/master/nsynjs.js"></script>
Promise.all with using modern async/await approach
const promise1 = //...
const promise2 = //...
const data = await Promise.all([promise1, promise2])
const dataFromPromise1 = data[0]
const dataFromPromise2 = data[1]
I don't know which promise library you are using, but most have something like allSettled.
Edit: Ok since you want to use plain ES6 without external libraries, there is no such method.
In other words: You have to loop over your promises manually and resolve a new combined promise as soon as all promises are settled.

How to make run nested asynchronous methods synchronously?

How do I wrap this routine inside a Promise so that I only resolve when I get all the data?
var accounts = [];
getAccounts(userId, accs => {
accs.forEach(acc => {
getAccountTx(acc.id, tx => {
accounts.push({
'id': acc.id,
'tx': tx
});
});
})
});
EDIT: Any issues if I do it like this?
function getAccountsAllAtOnce() {
var accounts = [];
var required = 0;
var done = 0;
getAccounts(userId, accs => {
required = accs.length;
accs.forEach(acc => {
getAccountTx(acc.id, tx => {
accounts.push({
'id': acc.id,
'tx': tx
});
done = done + 1;
});
})
});
while(done < required) {
// wait
}
return accounts;
}
Let's put this routine into a separate function, so it is easier to re-use it later. This function should return a promise, which will be resolved with array of accounts (also I'll modify your code as small as possible):
function getAccountsWithTx(userId) {
return new Promise((resolve, reject) => {
var accounts = [];
getAccounts(userId, accs => {
accs.forEach(acc => {
getAccountTx(acc.id, tx => {
accounts.push({
'id': acc.id,
'tx': tx
});
// resolve after we fetched all accounts
if (accs.length === accounts.length) {
resolve(accounts);
}
});
});
});
});
}
The single difference is just returning a promise and resolving after all accounts were fetched. However, callbacks tend your codebase to have this "callback hell" style, when you have a lot of nested callbacks, and it makes it hard to reason about it. You can workaround it using good discipline, but you can simplify it greatly switching to returning promises from all async functions. For example your func will look like the following:
function getAccountsWithTx(userId) {
getAccounts(userId)
.then(accs => {
const transformTx = acc => getAccountTx(acc.id)
.then(tx => ({ tx, id: acc.id }));
return Promise.all(accs.map(transformTx));
});
}
Both of them are absolutely equivalent, and there are plently of libraries to "promisify" your current callback-style functions (for example, bluebird or even native Node util.promisify). Also, with new async/await syntax it becomes even easier, because it allows to think in sync flow:
async function getAccountsWithTx(userId) {
const accs = await getUserAccounts(userId);
const transformTx = async (acc) => {
const tx = getAccountTx(acc.id);
return { tx, id: acc.id };
};
return Promise.all(accs.map(transformTx));
}
As you can see, we eliminate any nesting! It makes reasoning about code much easier, because you can read code as it will be actually executed. However, all these three options are equivalent, so it is up to you, what makes the most sense in your project and environment.
I'd split every step into its own function, and return a promise or promise array from each one. For example, getAccounts becomes:
function getAccountsAndReturnPromise(userId) {
return new Promise((resolve, reject) => {
getAccounts(userId, accounts => {
return resolve(accounts);
});
});
};
And getAccountTx resolves to an array of { id, tx } objects:
function getAccountTransactionsAndReturnPromise(accountId) {
return new Promise((resolve, reject) => {
getAccountTx(account.id, (transactions) => {
var accountWithTransactions = {
id: account.id,
transactions
};
return resolve(accountWithTransactions);
});
});
};
Then you can use Promise.all() and map() to resolve the last step to an array of values in the format you desire:
function getDataForUser(userId) {
return getAccountsAndReturnPromise(userId)
.then(accounts=>{
var accountTransactionPromises = accounts.map(account =>
getAccountTransactionsAndReturnPromise(account.id)
);
return Promise.all(accountTransactionPromises);
})
.then(allAccountsWithTransactions => {
return allAccountsWithTransactions.map(account =>{
return {
id: account.id,
tx: tx
}
});
});
}

Unable to get promises to work, subsequent promises not being called

I'm trying to extend some existing code with additional promises, but they are a new topic for me at the moment and i'm obviously missing something. This is running as part of a build scrip for npm.
All i am currently trying to make happen is for the final then to be called after the pack operation has happened for each architecture. I have tried wrapping it in a
return new Promise
But at the moment i am not returning anything from that function so i'm not sure what i should include in the resolve call at the end. If i just call the resolve with a true nothing happens, and wrapping it in a promise seems to cause the function to not actually run, and no errors are caught anywhere?
I'm guessing i am going about this completely wrong, all i want to achieve is to run another function once the previous one has completed?
Here's the code as it stands with the additional .then that i can't get to be called.
function build(cfg) {
return new Promise((resolve, reject) => {
webpack(cfg, (err, stats) => {
if (err) return reject(err);
resolve(stats);
});
});
}
function startPack() {
console.log('start pack...');
build(electronCfg)
.then(() => build(cfg))
.then(() => del('release'))
.then(paths => {
if (shouldBuildAll) {
// build for all platforms
const archs = ['ia32', 'x64'];
const platforms = ['linux', 'win32', 'darwin'];
platforms.forEach(plat => {
archs.forEach(arch => {
pack(plat, arch, log(plat, arch));
});
});
} else {
// build for current platform only
pack(os.platform(), os.arch(), log(os.platform(), os.arch()));
}
})
.then(() => {
console.log('then!');
})
.catch(err => {
console.error(err);
});
}
function pack(plat, arch, cb) {
// there is no darwin ia32 electron
if (plat === 'darwin' && arch === 'ia32') return;
const iconObj = {
icon: DEFAULT_OPTS.icon + (() => {
let extension = '.png';
if (plat === 'darwin') {
extension = '.icns';
} else if (plat === 'win32') {
extension = '.ico';
}
return extension;
})()
};
const opts = Object.assign({}, DEFAULT_OPTS, iconObj, {
platform: plat,
arch,
prune: true,
'app-version': pkg.version || DEFAULT_OPTS.version,
out: `release/${plat}-${arch}`,
'osx-sign': true
});
packager(opts, cb);
}
You didn't say what log is, but if it's a plain logging function, then it looks like you're passing in undefined (the result from calling log(...)) as the cb argument to pack. Perhaps you meant:
pack(plat, arch, () => log(plat, arch));
In any case, this won't do anything to wait for packing to finish. I don't know why you're not seeing any console output, but if you're looking for this output to happen after all the packing has finished, then you need to wrap packager in a promise. Something like:
var pack = (plat, arch) => new Promise(resolve => {
// ...
packager(opts, resolve);
});
And then use Promise.all instead of forEach to do all the packaging (in parallel if that's OK):
.then(paths => {
if (!shouldBuildAll) {
return pack(os.platform(), os.arch());
}
return Promise.all(['linux', 'win32', 'darwin'].map(plat =>
Promise.all(['ia32', 'x64'].map(arch => pack(plat, arch))));
})
.then(() => console.log('then!'))
.catch(err => console.error(err));

Categories