Return when first promise resolves - javascript

Goal
I have a bunch of file names in an array, and would like to read the contents of the first of the files that exists. They're config files, so it's important that the order is deterministic, so I can't use .race(). The version I have below maps over each file in order, tries to load it, and if it loads successfully, calls resolve.
Problems
Here are a couple of issues with this implementation:
Calling resolve(...) doesn't actually exit the loop, so the program opens every file in the list, even when doesn't need to.
The rejection condition (at this is required to reject when we don't receive any files) seems like a hack. However, if it's not here, the promise is never rejected.
The resolution code seems suspiciously like a promise anti-pattern.
Are there any better ways to do structure this? I could probably do it with a single Promise.filter call, but I don't want to query every file if I don't need to.
Thanks
Code
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
var _ = require('lodash');
new Promise((resolve, reject) => {
// Resolve with the first of the files below that exists
return Promise.mapSeries(
['./file_that_doesntexist.json', '../file_that_might_exist.json', './file_that_doesnt_exist_either.json', '../file_that_exists.json']
, (filename) => fs.readFileAsync(filename, 'utf-8')
.then(file => {
resolve([filename, file]);
return true;
})
.catch(_.stubFalse)
)
.then(files => { // this is required to reject when we don't receive any files
if(!files.some(x => x))
reject('did not receive any files');
});
})
.then(function([filename, configFile]) {
// do something with filename and configFile
})
.catch(function(err) {
console.error(err)
})

This can be achieved by recursion but also by building a catch chain using Array#reduce():
var paths = ['./file_that_doesntexist.json', '../file_that_might_exist.json', './file_that_doesnt_exist_either.json', '../file_that_exists.json'];
// Resolve with the first of the files below that exists
paths.reduce(function(promise, path) {
return promise.catch(function(error) {
return fs.readFileAsync(path, 'utf-8').then(file => [path, file]);
});
}, Promise.reject())
.then(function([filename, configFile]) {
// do something with filename and configFile
})
.catch(function(err) {
console.error('did not receive any files', err);
});
The catch chain ensures that every time fs.readFileAsync(path, 'utf-8') fails, the next path is tried.
The first successful fs.readFileAsync(path, 'utf-8') will drop through to .then(function([filename, configFile]) {...}.
Total failure will drop through to .catch(function(err) {...}.

If you want sequential iteration, just use a recursive approach:
var Promise = require('bluebird');
var fs = Promise.promisifyAll(require('fs'));
function readFirstOf(filenames)
if (!filenames.length)
return Promise.reject(new Error('did not receive any files'));
return fs.readFileAsync(filenames[0], 'utf-8')
.then(file =>
[filenames[0], file]
, err =>
readFirstOf(filenames.slice(1))
);
}
readFirstOf(['./file_that_doesntexist.json', '../file_that_might_exist.json', './file_that_doesnt_exist_either.json', '../file_that_exists.json'])
.then(function([filename, configFile]) {
// do something with filename and configFile
})
.catch(function(err) {
console.error(err)
})
If you want to try to read them all in parallel and the select the first successful in the list, you can use Promise.map + .reflect() and then just filter the results (e.g. via _.find).

There is this hackish approach to solve this problem neatly. You may invert the promises like;
var invert = pr => pr.then(v => Promise.reject(v), x => Promise.resolve(x));
which in fact comes handy when used with Promise.all() to get the first resolving promise by ignoring the rejected ones. I mean when inverted, all promises rejected (resolved) may go unnoticed while the first resolving (rejecting) one gets caught at the .catch() stage of Promise.all(). Cool..!
Watch this;
var invert = pr => pr.then(v => Promise.reject(v), x => Promise.resolve(x)),
promises = [Promise.reject("No such file"),
Promise.reject("No such file either"),
Promise.resolve("This is the first existing files content"),
Promise.reject("Yet another missing file"),
Promise.resolve("Another file content here..!")];
Promise.all(promises.map(pr => invert(pr)))
.catch(v => console.log(`First successfully resolving promise is: ${v}`));

Related

Node.js return a promise in recursive loop

I am having trouble getting a loop to return nested promises in the correct order.
I am trying to recursively loop through a directory with subdirectories to get files and copy them to a new directory. This functionality is happening inside a Promise chain since other processes need to complete in order after the files are copied.
The following code works but when I console.log out a string after the .then() it is logging before the promise has resolved.
The code is:
let newDirectory = /// helper function that returns promise of mkdirSync(exampleDir)
newDirectory.then(function(result){
getAllFiles('/TestDir').then((result) => {
console.log(result);
});
console.log('this should fire after the "result" from above');
//// rest of promises in promise chain
});
The recursive function I'm calling "getAllFiles" to go through "TestDir" and its sub folders and copy files to "ExampleDir"
const getAllFiles = function(dirPath, arrayOfFiles) {
return new Promise((resolve, reject) => {
var promises = [];
files = fs.readdirSync(dirPath)
arrayOfFiles = arrayOfFiles || []
files.forEach(function(file) {
if (fs.statSync(dirPath + "/" + file).isDirectory()) {
arrayOfFiles = resolve(getAllFiles(dirPath + "/" + file, arrayOfFiles));
} else {
promises.push(helper.copyFile(path.join(dirPath, "/", file),`/TestDir/${file}`))
}
}
})
Promise.all(promises).then(function(result){
resolve(result);
console.log('done with Promises');
}).catch((error) => {
reject(error);
});
}
The helper that copies the files returns a promise after the file has been copied
exports.copyFile = function(file, destDir){
return new Promise(function ( resolve, reject ){
fs.copyFile(file, destDir, (err) => {
if(err) reject( err );
else resolve('Successfully copied');
});
});
}
This actually seems to work but I am concerned it wont work with a large set of data since the
console.log('this should fire after the "result" from above');
fires before the other logs. The console looks like:
this should fire after the "result" from above
done with Promises /// how ever many files are copied
[ 'Successfully copied'] //// with a length of however many files are copied)
done with Promises //// this is fired once
Is this a log to be expected, or should all of the promises resolve and be logged before the "'this should fire after the "result" from above'" line is logged?
Is this a log to be expected, or should all of the promises resolve and be logged before the "'this should fire after the "result" from above'" line is logged?
Yes, it is to be expected.
You are writing your code as if it is synchronous when Promises are not meant for that.
What is actually happening in the code snippet below is after the newDirectory Promise resolves, both the getAllFiles and console.log('this should fire after the "result" from above'); function will be executed immediately. That is, the console.log will not wait on the getAllFiles to resolve before executing.
let newDirectory = /// helper function that returns promise of mkdirSync(exampleDir)
newDirectory.then(function(result){
getAllFiles('/TestDir').then((result) => {
console.log(result);
});
console.log('this should fire after the "result" from above');
//// rest of promises in promise chain
});
So if you wanted to change the order of the console.log to ensure that it is executed after the getAllFiles Promise resolves, you could rewrite as follows.
newDirectory.then(function(result){
getAllFiles('/TestDir').then((result) => {
console.log(result);
// or you could add it here
}).then(() => {
console.log('this should fire after the "result" from above');
});
});
You should also notice that I am saying when the Promise resolves, not when the function has finished executing. There is a very important distinction.
If we take your example above yet again and and say we wanted to perform some other action after all the files had been copied.
newDirectory.then(function(result){
getAllFiles('/TestDir').then((result) => {
...
});
}).then(() => {
console.log('doing some other task');
});
In the above example, what would happen is the newDirectory Promise would resolve, then the getAllFiles would be invoked, and before the getAllFiles has finished executing, the final console.log would be logged. This is an important principal of Promises, if you wish them to behave synchronously, you need to chain the Promises, i.e. you need to return the promise through all the chained then functions. So to fix the above problem we would need to return the promise that is resolved from the getAllFiles function as follows
newDirectory.then(function(result){
return getAllFiles('/TestDir').then((result) => {
...
});
}).then(() => {
console.log('doing some other task');
});
If you want to console log after a Promise, you'll have to do it in a .then() like
Promise.all(promises).then(function(result) {
resolve(result);
console.log('done with Promises');
})
.then(() => console.log("this should fire after the "result" from above"))
.catch((error) => reject(error));
This is because Promises are non-blocking and anything after it won't wait for it to finish before executing.
fs/promises and fs.Dirent
Here's an efficient, non-blocking ls program using Node's fast fs.Dirent objects and fs/promises module. This approach allows you to skip wasteful fs.exist or fs.stat calls on every path.
Using async and await, we can avoid having to think much about how to specifically wire up the Promises -
// main.js
import { readdir } from "fs/promises"
import { join } from "path"
async function* ls (path = ".")
{ yield path
for (const dirent of await readdir(path, { withFileTypes: true }))
if (dirent.isDirectory())
yield* ls(join(path, dirent.name))
else
yield join(path, dirent.name)
}
async function* empty () {}
async function toArray (iter = empty())
{ let r = []
for await (const x of iter)
r.push(x)
return r
}
toArray(ls(".")).then(console.log, console.error)
Let's get some sample files so we can see ls working -
$ yarn add immutable # (just some example package)
$ node main.js
[
'.',
'main.js',
'node_modules',
'node_modules/.yarn-integrity',
'node_modules/immutable',
'node_modules/immutable/LICENSE',
'node_modules/immutable/README.md',
'node_modules/immutable/contrib',
'node_modules/immutable/contrib/cursor',
'node_modules/immutable/contrib/cursor/README.md',
'node_modules/immutable/contrib/cursor/__tests__',
'node_modules/immutable/contrib/cursor/__tests__/Cursor.ts.skip',
'node_modules/immutable/contrib/cursor/index.d.ts',
'node_modules/immutable/contrib/cursor/index.js',
'node_modules/immutable/dist',
'node_modules/immutable/dist/immutable-nonambient.d.ts',
'node_modules/immutable/dist/immutable.d.ts',
'node_modules/immutable/dist/immutable.es.js',
'node_modules/immutable/dist/immutable.js',
'node_modules/immutable/dist/immutable.js.flow',
'node_modules/immutable/dist/immutable.min.js',
'node_modules/immutable/package.json',
'package.json',
'yarn.lock'
]
See this related Q&A for a dir program that recursively lists directories, a search program for finding files, and more.

How execute promises in order?

I can't make my code work in order. I need the connection test to come first, and finally the functions are also resolved in order to form a text string that will be sent in a tweet with an NPM package. (This is not my true code, it is a summary example)
I've tried many things and my brain is on fire
// Test DB conection
db.authenticate()
.then(() => {
const server = http.createServer(app)
server.listen(config.port, () => {
console.log(`http://localhost:${config.port}`)
})
reload(app)
})
.catch(err => {
console.log(`Error: ${err}`)
})
// Functions
resumen.man = (numRoom) => {
const registries = Registries.findOne({})
.then((registries) => {
return registries.name+' is good.'
})
}
resumen.man1 = (numRoom) => {
const registries = Registries.findOne({})
.then((registries) => {
return registries.name+' is bad.'
})
}
resumen.man2 = (numRoom) => {
const registries = Registries.findOne({})
.then((registries) => {
return registries.name+' is big.'
})
}
// Execute resumen.man(1) first and save text in $varStringMultiLine ?
// Execute resumen.man1(1) later and save text in the same $varStringMultiLine ?
// Execute resumen.man2(1) last and save text in the same $varStringMultiLine ?
sendTweet($varStringMultiLine)
Thanx.
As commented by #Barmar and #some, you could chain the promises with .then or use async / await. I would recommend the latter, since .then-chaining will get unwieldy fast.
This is a really good explanation for async / await: https://javascript.info/async-await
Basically, you can use
await db.authenticate();
to halt the code and not execute the next line before the promise is resolved. However, to not freeze the whole execution, this itself needs to be done asynchronously in a promise.

Wait all promises in a map function

I want to wait to read all my pictures in a map function
I tried this
let buffer = [];
// Folder of the dataset.
const rootFolder = './dataset'
console.log("Entering in folder dataset");
fs.readdirSync(rootFolder);
// For each folders
const files = fs.readdirSync(rootFolder).map(dirName => {
if(fs.lstatSync(path.join(rootFolder, dirName)).isDirectory()){
console.log(`Entering in folder ${path.join(rootFolder, dirName)}`);
// For each files
fs.readdirSync(path.join(rootFolder, dirName)).map(picture => {
if(fs.lstatSync(path.join(rootFolder, dirName, picture)).isFile()){
if(picture.startsWith("norm")){
return fileToTensor(path.join(rootFolder, dirName, picture)).then((img) => {
buffer.push(img);
}).catch((error) => {console.log(error)});
}
}
});
}
});
Promise.all(files);
console.log(buffer);
async function fileToTensor(path) {
return await sharp(path)
.removeAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
}
But my buffer is still empty...
I know promises exist but I don't know how can I include them in map(map())
Thanks you :)
I would refactor the above code to this:
let files = [];
// loop each dir.
fs.readdirSync(rootFolder).forEach(dirName => {
// if it's a directory, procede.
if(fs.lstatSync(path.join(rootFolder, dirName)).isDirectory()){
console.log(`Entering in folder ${path.join(rootFolder, dirName)}`);
fs.readdirSync(path.join(rootFolder, dirName)).forEach(picture => {
if(fs.lstatSync(path.join(rootFolder, dirName, picture)).isFile()){
// If lstatsync says it's a file and if it starts with "norm"
if(picture.startsWith("norm")){
// push a new promise to the array.
files.push(new Promise((resolve, reject) => {
fileToTensor(path.join(rootFolder, dirName, picture)).then((img) => {
buffer.push(img);
resolve();
}).catch((error) => {console.log(error); reject(error);});
}));
}
}
});
}
});
// Resolve all promises.
Promise.all(files).then(() => {
// Then do whatever you need to do.
console.log(buffer);
}).catch((errors) => {
console.log('one ore more errors occurred', errors);
});
Basically, here is what I did:
Removed .map, since it's not necessary in this context. Also, in your case, not all code paths returned a result, hence not every callback returned a result.
Pushed each needed item to the files array, which is a Promise[].
Called Promise.all on the files array. Each resolved promise will push the result to the buffer array. I would've handled it in a different way, but still, this is the fastest I could think of.
Registered a callback on Promise.all, so that buffer will be defined.
As a side note, there are a lot of third party libraries that helps you to avoid having nested loops and promises looping the file system. I've just posted this to try giving something that could actually work from the existing code, despite an entire refactor would be clever here, and a preliminary analysis of available node libraries would also help to make the code easier to read and to mantain.
First of all a few advices:
DON't use arrow functions for anything you cannot put in a single line (they aren't intended for that and this wrecks readability)
Check that each callback you pass to .map() actually return something (first one doesn't. It seems you missed a return before inner fs.readdir(..)....
Better try to name all functions (except arrow ones in the cases that they're good choice). This way not only I could name it to better identify them in the previous point but also stack traces would be much more readable and useful (traceable).
That being said, you are reading directories (and subdirectories) synchronously to finally return promises (I understand that fileToTensor() is expected to return a promise). It may not have a major impact on the overall execution time because I suppose actual file processings would be much more expensive BUT this is a bad pattern because you are blocking the event loop during the tree scan (so, if your code is for a server that needs to attend other petitions, you are pulling performance a bit down...).
Finally, as others already said, there are libraries, such as glob that eases that task.
On the other hand, if you want to do it by yourself (as an understanding exercise) I myself implemented my own library for the same task before knowing about glob which could serve you as a simpler example.
Hye I've bit updated your code please go through once. It might be helpful :)
let fsReadDir = Util.promisify(Fs.readdir);
let fsStat = Util.promisify(Fs.stat);
let picturePromises = [];
let directories = await fsReadDir(rootFolder);
for (let dirIndex = 0; dirIndex < directories.length; dirIndex++) {
let file = directories[dirIndex];
let stat = await fsStat(path[pathIndex] + '/' + file);
if (stat.isDirectory()) {
let pictures = await fsReadDir(path.join(rootFolder, dirName));
for (let picIndex = 0; picIndex < pictures.length; picIndex++) {
let stat = await fsStat(path.join(rootFolder, dirName, pictures[picIndex]));
if (stat.isFile()) {
if (picture.startsWith("norm")) {
let pTensor = fileToTensor(path.join(rootFolder, dirName, pictures[picIndex])).then((img) => {
buffer.push(img);
}).catch((error) => { console.log(error) });
picturePromises.push(pTensor);
}
}
}
}
}
Promise.all(picturePromises);
console.log(buffer);
async function fileToTensor(path) {
return await sharp(path)
.removeAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
}

Merged gulp tasks never fire `end` event

I've got a gulp task that loops through a folder looking for sub folders and outputs a JavaScript file based upon the contents of each folder. Below is a more visual example.
src
assets
scripts
critical
loadCSS.init.js
legacy
flexibility.init.js
picturefill.init.js
modern
connectivity.js
layzr.init.js
menu_list.js
scroll-hint.init.js
slideout.init.js
swiper.init.js
service-worker
service-worker.js
becomes:
dev
assets
scripts
critical.js
legacy.js
modern.js
service-worker.js
This is achieved by reading the contents of the src/assets/scripts directory, then running a loop against each folder (critical, legacy, modern, service-worker) and sending the contents of each folder to a Gulp tasks which get merged together with merge-stream.
All this works great, except that once the tasks are merged back together, I want to trigger a notification if the compilation succeeded. If I try to pipe anything to the merged streams, it doesn't work. It just returns the merged streams, and never continues on.
If I un-promisify my PROCESS_SCRIPTS function and don't use merge-stream (i.e. only processing one manually specified folder), it works fine, so I'm at a loss as to what's going on.
Here's my full task:
module.exports = {
scripts(gulp, plugins, ran_tasks, on_error) {
// task-specific plugins
const ESLINT = require("gulp-eslint");
const WEBPACK = require("webpack-stream");
// process scripts
const PROCESS_SCRIPTS = (js_directory, destination_file_name = "modern.js", compare_file_name = "modern.js", source = [global.settings.paths.src + "/assets/scripts/*.js"]) => {
return new Promise((resolve, reject) => {
const WEBPACK_CONFIG = {
mode: "development",
};
// update webpack config for the current target destination and file name
WEBPACK_CONFIG.mode = plugins.argv.dist ? "production" : WEBPACK_CONFIG.mode;
WEBPACK_CONFIG.output = {
filename: destination_file_name
};
const TASK = gulp.src(source)
// prevent breaking on error
.pipe(plugins.plumber({errorHandler: on_error}))
// check if source is newer than destination
.pipe(plugins.newer(js_directory + "/" + compare_file_name))
// lint all scripts
.pipe(ESLINT())
// print lint errors
.pipe(ESLINT.format())
// run webpack
.pipe(WEBPACK(WEBPACK_CONFIG))
// generate a hash and add it to the file name
.pipe(plugins.hash({template: "<%= name %>.<%= hash %><%= ext %>"}))
// output scripts to compiled directory
.pipe(gulp.dest(js_directory))
// generate a hash manfiest
.pipe(plugins.hash.manifest(".hashmanifest-scripts", {
deleteOld: true,
sourceDir: js_directory
}))
// output hash manifest in root
.pipe(gulp.dest("."))
// reject after errors
.on("error", () => {
reject(TASK);
})
// return the task after completion
.on("end", () => {
resolve(TASK);
});
});
};
// scripts task, lints, concatenates, & compresses JS
return new Promise ((resolve) => {
// set JS directory
const JS_DIRECTORY = plugins.argv.dist ? global.settings.paths.dist + "/assets/scripts" : global.settings.paths.dev + "/assets/scripts";
// set the source directory
const SOURCE_DIRECTORY = global.settings.paths.src + "/assets/scripts";
// set up an empty merged stream
const MERGED_STREAMS = plugins.merge();
// get the script source folder list
const SCRIPT_FOLDERS = plugins.fs.readdirSync(SOURCE_DIRECTORY);
// get the script destination file list
const SCRIPT_FILES = plugins.fs.existsSync(JS_DIRECTORY) ? plugins.fs.readdirSync(JS_DIRECTORY) : false;
// process all the script folders
const PROCESS_SCRIPT_FOLDERS = () => {
return Promise.resolve().then(() => {
// shift to the next folder
const FOLDER_NAME = SCRIPT_FOLDERS.shift();
// find the existing destination script file name
const FILE_NAME = SCRIPT_FILES ? SCRIPT_FILES.find((name) => {
return name.match(new RegExp(FOLDER_NAME + ".[a-z0-9]{8}.js"));
}) : FOLDER_NAME + ".js";
// process all scripts, update the stream
return PROCESS_SCRIPTS(JS_DIRECTORY, FOLDER_NAME + ".js", FILE_NAME, SOURCE_DIRECTORY + "/" + FOLDER_NAME + "/**/*").then((processed) => {
MERGED_STREAMS.add(processed);
});
}).then(() => SCRIPT_FOLDERS.length > 0 ? PROCESS_SCRIPT_FOLDERS() : resolve());
};
PROCESS_SCRIPT_FOLDERS().then(() => {
// wrap up
return MERGED_STREAMS
// prevent breaking on error
.pipe(plugins.plumber({
errorHandler: on_error,
}))
// notify that task is complete, if not part of default or watch
.pipe(plugins.gulpif(gulp.seq.indexOf("scripts") > gulp.seq.indexOf("default"), plugins.notify({
title: "Success!",
message: "Scripts task complete!",
onLast: true,
})))
// push task to ran_tasks array
.on("data", () => {
if (ran_tasks.indexOf("scripts") < 0) {
ran_tasks.push("scripts");
}
})
// resolve the promise on end
.on("end", () => {
resolve();
});
});
});
}
};
Also visible on my GitHub: https://github.com/JacobDB/new-site/blob/master/gulp-tasks/scripts.js
EDIT: I've tried a few things, I'll detail them here.
console.log("hello world") never fires after MERGED_STREAMS.on("data"), MERGED_STREAMS.on("error"), or MERGED_STREAMS.on("end").
Moving const MERGED_STREAMS = plugins.merge(); to a module-level variable (i.e. just after const WEBPACK = require("webpack-stream")) does not change the outcome.
Doing #2 and then using MERGED_STREAMS.add(gulp.src(source) ...) instead of adding the stream after the promise completes does not change the outcome, except when leaving .pipe(gulp.dist(".")), which is required to output a .hashmanifest, and always marks the task as ran.
Disabling webpack, hash, or eslint, in any combination does not make a difference.
Changing PROCESS_SCRIPTS from returning a promise to return the stream, then processing each folder as individual variables, then merging them manually does appear to correctly trigger the task as ran, but webpack can only be run once, so it only outputs one file – critical.hash.js. Note: I haven't tested this method in conjunction with disabling hash, which may be causing it to be marked as correctly ran if .hashmanifest is always being output.
Splitting the linting step and the webpack step in to separate task kind of causes the task to be correctly marked as ran, but only if the lint task is not a promise, which results in unexpected end of stream errors in the console.
EDIT 2: Updated with a revised version of my task based on #Louis's advice.
There are many problems with the code above. One major issue that makes the code hard to follow and debug is that you use new Promise where you don't need it. Generally, if you have new Promise and the logic inside the promise executor will resolve or reject depending on the result of another promise, then you don't need to use new Promise.
Sometimes people have code like this:
function foo() {
const a = getValueSynchronously();
const b = getOtherValueSynchronously();
return doSomethingAsynchronous(a, b).then(x => x.someMethod());
}
Suppose that doSomethigAsynchronous returns a promise. The problem with the foo function above is that if getValueSynchronously and getOtherValueSynchronously fail, then foo will raise an exception, but if doSomethingAsynchronous fails, then it will reject a promise. So code that uses foo has to handle synchronous exceptions and asynchronous rejections if it wants to handle all possible failures. Sometimes people feel they can fix the issue by causing all failures to be promise rejections:
function foo() {
return new Promise((resolve, reject) => {
const a = getValueSynchronously();
const b = getOtherValueSynchronously();
doSomethingAsynchronous(a, b).then(x => x.someMethod()).then(resolve, reject);
});
}
In the code above, if getValueSynchronously or getOtherValueSynchronously fail, that's a promise rejection. But the problem with the code above is that it is easy to get it wrong. You can forget to call reject everywhere it is needed. (As a matter of fact, you do have this error in your code. You have nested promises whose rejection won't be propagated up. They are just lost, which means if an error occurs your code will just stop, without you knowing why.) Or you may be tempted to call `resolve way down in a nested function, which makes the logic hard to follow.
You can just as well do:
function foo() {
return Promise.resolve().then(() => {
const a = getValueSynchronously();
const b = getOtherValueSynchronously();
return doSomethingAsynchronous(a, b);
}).then(x => x.someMethod());
}
You can use Promise.resolve() to enter the promisified world (hmm... "the promised land?"). In the code above, you do not have to remember to call reject. If the code inside the callback to .then fails for any reason, you get a rejected promise.
I also noticed in a number of places you return a value from the executor function you pass to new Promise. Your code would behave exactly the same if you did not use return there. To illustrate, this code:
function foo() {
return new Promise((resolve, reject) => {
return doSomethingAsynchronous().then(resolve, reject);
});
}
behaves the exactly same way as this code:
function foo() {
return new Promise((resolve, reject) => {
doSomethingAsynchronous().then(resolve, reject);
});
}
The value returned from the executor is ignored. End of story. If you think the value you return from your executors are doing something, then that's incorrect.

Node JS : Array loop and function Not understanding how callbacks and promises work

New to concept of callbacks and promises. I'm trying to read contents of a directory which is correctly returning the addresses but this code is only printing console.log(value) and the console.log in the function getAccount "gettin info..." but nowhere printing the balance the api is getting closed and the process completes, I dont understand this because the address is being passed and the first console.log is printing inside the function but not doing further job. when i remove the fs.readdir and files.foreach and pass a single value to getAccount its working perfectly fine. No errors or bugs , its a runtime error and im guessing related to callbacks
'use strict';
const RippleAPI = require('ripple-lib').RippleAPI;
const testFolder = '/home/ripple/.keystore';
const fs = require('fs');
const api = new RippleAPI({server: 'wss://s1.ripple.com'});
function getAccount(address) {
var balance = 0.0;
console.log("getting info for :"+address);
return api.getAccountInfo(address).then(info => {
var balance = info.xrpBalance;
console.log("balance of "+address+" is "+balance);
return balance;
});
}
api.connect().then(() => {
fs.readdir(testFolder, function(err, files) {
// console.log("Total no of wallets : "+files.length);
// for (var i =3000, len=3200; i < len; i++) {
// var ad = files[i];
// console.log(ad + "\t" + typeof(ad));
// if(!xwallets.includes(ad))
files.forEach(function(value) {
getAccount(value).then(balance => {console.log(balance); });
console.log(value);
});
// console.log("running for index : "+i+" filedata :"+files[i]);
// }
});
// console.log(balance);
}).then(() => {
return api.disconnect();
}).then(() => {
console.log("done and disconnected");
}).catch(console.error);
You really don't want to mix asynchronous operations that return a promise with those that take a direct callback. It's hard to write good robust code when mixing like that. So, since you are already using with promises in a number of operations, it's best to take the remaining async operations and "promisify" them so you can use them with promises. Then, you can take advantage of all the clean error propagation and chaining of promises.
Then, secondly, you have to make sure all your asynchronous operations in a loop are properly chained to the parent promise. To do that here, we will accumulate the promises from getAccount() in the loop into an array of promises. Then, after the loop, we can tell when they're all done using Promise.all() and then return that promise from readdir() so that they will all be properly chained to the parent promise.
This will then make sure everything inside the loop finishes before the parent resolves and before you call api.disconnect().
Here's what that would look like:
'use strict';
const RippleAPI = require('ripple-lib').RippleAPI;
const testFolder = '/home/ripple/.keystore';
const fs = require('fs');
const api = new RippleAPI({server: 'wss://s1.ripple.com'});
function getAccount(address) {
var balance = 0.0;
console.log("getting info for :"+address);
return api.getAccountInfo(address).then(info => {
var balance = info.xrpBalance;
console.log("balance of "+address+" is "+balance);
return balance;
});
}
// promisify readdir
const readdir = util.promisify(fs.readdir);
api.connect().then(() => {
return readdir(testFolder).then(function(files) {
const promises = [];
files.forEach(function(file) {
console.log(file);
promises.push(getAccount(file).then(balance => {
console.log(balance);
// make balance be the resolved value for this promise
return balance;
}));
});
// make sure we are waiting for all these promises to be done
// by chaining them into the parent promise
return Promise.all(promises);
});
}).then(balances => {
// process balances array here
console.log(balances);
return api.disconnect();
}, err => {
// disconnect, even in the error case, but preserve the error
console.error(err);
return api.disconnect().then(() => {throw err;})
}).then(() => {
console.log("done and disconnected");
});
Summary of changes made:
Promisify fs.readdir() so we can use promise with it
Change how we call readdir() to use promises
Collect getAccount() promises inside of .forEach() into an array of promises
Add return Promise.all(promises) so that we wait for all those promises to be done and so that it is chained into the parent promise
Make balance be the resolved value of each promise in the .forEach() loop, even after logging its value
Make sure we're calling api.disconnect() even in the error cases
After logging error, preserve the error value as the reject reason

Categories