Executing asynchronous calls in a synchronous manner - javascript

I've been trying to wrap my head around this issue for the last hours but can't figure it out. I guess I still have to get used to the functional programming style ;)
I wrote a recursive function that traverses through a directory structure and does things to certain files. This functions uses the asynchronous IO methods. Now I want to perform some action when this whole traversing is done.
How would I make sure that this action is performed after all parse calls have been performed but still use the asynchronous IO functions?
var fs = require('fs'),
path = require('path');
function parse(dir) {
fs.readdir(dir, function (err, files) {
if (err) {
console.error(err);
} else {
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
console.error(err);
} else {
if (stats.isDirectory()) {
parse(p);
} else if (stats.isFile()) {
// do some stuff
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.');
// do some stuff here when async parse completely finished

Look for Step module. It can chain asynchronous functions calls and pass results from one to another.

You could use async module . Its auto function is awesome . If you have function A() and function B() and function C() . Both function B() and C() depend of function A() that is using value return from function A() . using async module function you could make sure that function B and C will execute only when function A execution is completed .
Ref : https://github.com/caolan/async
async.auto({
A: functionA(){//code here },
B: ['A',functionB(){//code here }],
C: ['A',functionC(){//code here }],
D: [ 'B','C',functionD(){//code here }]
}, function (err, results) {
//results is an array that contains the results of all the function defined and executed by async module
// if there is an error executing any of the function defined in the async then error will be sent to err and as soon as err will be produced execution of other function will be terminated
}
})
});
In above example functionB and functionC will execute together once function A execution will be completed . Thus functionB and functionC will be executed simultaneously
functionB: ['A',functionB(){//code here }]
In above line we are passing value return by functionA using 'A'
and functionD will be executed only when functionB and functionC execution will be completed .
if there will be error in any function , then execution of other function will be terminated and below function will be executed .where you could write your logic of success and failure .
function (err, results) {}
On succesfull execution of all function "results" will contain the result of all the functions defined in async.auto
function (err, results) {}

Take a look at modification of your original code which does what you want without async helper libs.
var fs = require('fs'),
path = require('path');
function do_stuff(name, cb)
{
console.log(name);
cb();
}
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err) {
cb(err);
} else {
// cb_n creates a closure
// which counts its invocations and calls callback on nth
var n = files.length;
var cb_n = function(callback)
{
return function() {
--n || callback();
}
}
// inside 'each' we have exactly n cb_n(cb) calls
// when all files and dirs on current level are proccessed,
// parent cb is called
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
cb(err);
} else {
if (stats.isDirectory()) {
parse(p, cb_n(cb));
} else if (stats.isFile()) {
do_stuff(p+f, cb_n(cb));
// if do_stuff does not have async
// calls inself it might be easier
// to replace line above with
// do_stuff(p+f); cb_n(cb)();
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.', function()
{
// do some stuff here when async parse completely finished
console.log('done!!!');
});

Something like this would work -- basic change to your code is the loop turned into a recursive call that consumes a list until it is done. That makes it possible to add an outer callback (where you can do some processing after the parsing is done).
var fs = require('fs'),
path = require('path');
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
}
function handleFiles(dir, files, cb){
var file = files.shift();
if (file){
var p = path.join(dir, file);
fs.stat(p, function(err, stats){
if (err)
cb(err);
else{
if (stats.isDirectory())
parse(p, function(err){
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
else if (stats.isFile()){
console.log(p);
handleFiles(dir, files, cb);
}
}
})
} else {
cb();
}
}
parse('.', function(err){
if (err)
console.error(err);
else {
console.log('do something else');
}
});

See following solution, it uses deferred module:
var fs = require('fs')
, join = require('path').join
, promisify = require('deferred').promisify
, readdir = promisify(fs.readdir), stat = promisify(fs.stat);
function parse (dir) {
return readdir(dir).map(function (f) {
return stat(join(dir, f))(function (stats) {
if (stats.isDirectory()) {
return parse(dir);
} else {
// do some stuff
}
});
});
};
parse('.').done(function (result) {
// do some stuff here when async parse completely finished
});

I've been using syncrhonize.js with great success. There's even a pending pull request (which works quite well) to support async functions which have multiple parameters. Far better and easier to use than node-sync imho. Added bonus that it has easy-to-understand and thorough documentation, whereas node-sync does not.
Supports two different methods for wiring up the sync, a defered/await model (like what #Mariusz Nowak was suggesting) and a slimmer though not-as-granular function-target approach. The docs are pretty straightforward for each.

Recommend to use node-seq
https://github.com/substack/node-seq
installed by npm.
I'm using it, and I love it..

Look for node-sync, a simple library that allows you to call any asynchronous function in synchronous way. The main benefit is that it uses javascript-native design - Function.prototype.sync function, instead of heavy APIs which you'll need to learn. Also, asynchronous function which was called synchronously through node-sync doesn't blocks the whole process - it blocks only current thread!

Related

Node.js my function showing undefined dictionary value despite assigning it

I'm new to javascript so i'm sorry if this has been asked before.
I have this function which i use to read json files.
const jsonfile = require('jsonfile');
function readData(fileName: string, callback) {
jsonfile.readFile(".\\data\\" + fileName + ".json", 'utf8', function (err, data) {
if (err) console.error(err)
callback(null, data);
})
}
Then I call it using this:
// Read JSON files
var fileList = ["guides"];
var files = {};
for (let i = 0; i < fileList.length; i++) {
readData(fileList[i], function (err, result) {
if (err) throw err;
files[i] = result;
})
}
but when I:
console.log(files['guides']);
it returns undefined. Can anyone help me fix this? Thank you very much.
Callback function of the jsonfile.readFile function is called asynchronously and your console.log statement is executed synchronously.
Asynchronous code is executed after the synchronous execution of your javascript code has ended; as a result, your console.log statement is logging files['guides'] before guides property is added in the files object. This is why you get undefined.
Following code example shows this problem in action:
let a = 1;
setTimeout(() => {
a = 2;
}, 100);
console.log(a);
Above code snippet outputs 1 because the callback function of setTimeout is invoked asynchronously, i.e. after the console.log(a) statement has been executed and at the time of execution of console.log statement, value of a is 1.
Solution
To make sure that you log files["guides"] after all the files have been read and files object has been populated, you could return a promise from the readData function.
Following code shows how you could create a promise wrapper around readData function:
function readData(fileName) {
return new Promise((resolve, reject) => {
jsonfile.readFile(".\\data\\" + fileName + ".json", 'utf8', function (err, data) {
if (err) reject(err);
else resolve(data);
})
}
};
Now you can call the above function as:
Promise.all(fileList.map(filePath => readData(filePath)))
.then(dataFromAllFiles => {
console.log(dataFromAllFiles);
})
.catch(error => console.log(error));
Useful Links:
Asynchronous JavaScript
Graceful asynchronous programming with Promises
Using Promises

How to read a file (Synchronously?) in a while loop in javascript (Discord.js)?

This is possibly a duplicate question, but I can't seem to figure it out.
Essentially, I am making a code that runs in a while loop where I need to then read a file within that while loop, and it seems that the fileRead in the code just stops the while loop from getting to the end. I'm pretty newbie to javascript still, so it's probably an easy fix.
What I've tried so far is changing my jsonReader function to sync (readFileSync) and that just stopped the code before it did hardly anything. (that is now what the current code is as) I've also tried making a second function for specifically reading the files I need Synchronously and that didn't seem to work either. I'm not even sure if this has to do with synchronism
My Code:
module.exports = {
name: 'xprun',
description: "runs the xp handler",
execute(message) {
const name = message.author.username;
const server = message.guild.id;
const fs = require('fs');
function jsonReader(filePath, cb) {
fs.readFileSync(filePath, 'utf-8', (err, fileData) => {
if (err) {
return cb && cb(err);
}
try {
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
});
}
console.log('Starting the loop...'); //
var run = true;
var i = 0;
while (run == true) {
i++
console.log('Running the loop...'); // Loop stops and re-runs here
// read #1
jsonReader(`./userData/rank/${server}_server/1.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.id); //
}
// read #2
jsonReader(`./userData/xp/${server}_server/${name}_xp.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.rank); //
}
console.log('The loop was completed'); //
if (i >= 5) {
run = false;
}
}); // end read #1
}); // end read #2
} // end while
console.log('The loop was ended'); //
} // end execute
} // end
As #CherryDT mentioned in the comments, readFileSync does not accept a callback. Because readFileSync is synchronous, it does not need a callback; readFile accepts a callback only because it is asynchronous, because it needs to wait until it has read the file before calling the code in the callback. The synchronous method does not need to wait in this way, so you can move the callback code out of the callback like so:
function jsonReader(filePath, cb) {
try {
const fileData = fs.readFileSync(filePath, 'utf-8');
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
}
The reason your loop was infinitely running was because you set run to false only within your callback cb method, but because readFileSync does not accept a callback, your callback was never being run. With the above code, your callback should now be running, and the loop should no longer run infinitely (unless there are other issues within your callbacks).

Callback problems

I am new into javascript, and currently I'm trying to learning callback to my script. This script should return reduced words in array of objects
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = []
list.forEach(function (val) {
readFile(val, function(error,data){
txtObj.push(data)
})
})
function readFile(src, cb){
fs.readFile(src,'utf8', function (error,data) {
if (error) return callback(error,null)
return mapred(data)
})
}
return callback(null,txtObj)
}
But it returns empty array. Any help would be appreciated.
Thanks!
`fs.readFile`
is an asynchronous function, before it's done and result callback is invoked, you are returning the empty txtObj array.
how to fix it ?
call return callback(null,txtObj) after fs.readFile is finished running.
and also, as you are running asynchronous function on an array of items one-by-one, it might not still work the way you want. might want to use modudles like async in nodejs
Here comes an asynchronous version using module async. synchronous file operation is strongly objected :)
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
async = require('async');
async.each(list, readFile, function(err) {
callback(err,txtObj)
});
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
EDIT : You can do this without async, but it is little bit dirty isn't it ? also its OK if you remove the self invoking function inside the forEach, i included so that you can access the val, even after the callback is done
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
counter = list.length,
start = 0;
list.forEach(function (val) {
(function(val)
readFile(val, function(error,data) {
txtObj.push(data);
start++;
if(error || (start === counter)) {
callback(error,txtObj);
}
}))(val);
})
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
The reason you are getting an empty array result is that you are performing the callback before the readFile function has a chance to populate the array. You are performing multiple asynchronous actions but not letting them to complete before continuing.
If there was only one async action, you would call callback() in the callback function of readFile, but as you need to perform multiple async actions before calling callback(), you should consider using fs.readFileSync().
Sometimes sync cannot be avoided.
function mapping(list, callback)
{
var txtObj = []
list.forEach(function(val)
{
try { txtObj.push(mapred(fs.readFileSync(val, 'utf8'))) }
catch(err) { callback(err) }
})
callback(null, txtObj)
}

Call to modules in specific order in node

I've used the following code to call two modules, but the invoke action is called before the validate file (I saw in debug). What I should do to verify that validateFile is called before appHandler.invokeAction? Should I use a promise?
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
appHandler.invokeAction(req, res);
Update
this is the validate file code
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function () {
glob("myfolder/*.json", function (err, files) {
var stack = [];
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
if (err) {
console.log("cannot read the file", err);
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
});
});
});
}
};
Because glob and fs.readFile are async functions and appHandler.invokeAction is invoked during i/o from disk.
Promise is a good solution to solve this but an old school callback could do the job.
validator.validateFile().then(function() {
appHandler.invokeAction(req, res);
});
and for validate
var Promise = require("bluebird"), // not required if you are using iojs or running node with `--harmony`
glob = require('mz/glob'),
fs = require('mz/fs');
module.exports = {
validateFile: function () {
return glob("myfolder/*.json").then(function(files) {
return Promise.all(files.map(function(file) {
// will return an array of promises, if any of them
// is rejected, validateFile promise will be rejected
return fs.readFile(file).then(function (content) {
// throw new Error(''); if content is not valid
});
}));
})
}
};
If you want working with promise mz could help :)
As the fs.fileRead is async, you should put the code that you want to execute after validateFile to its callback.
The origin could be:
var validator = require('../uti/valid').validateFile();
var appHandler = require('../contr/Handler');
// create a new function that when execute, will call appHandler.invokeAction with req and res given to its arguments.
validator.validateFile(appHandler.invokeAction.bind(null, req, res));
The validator part should be:
var called = false;
var glob = require('glob'),
fs = require('fs');
module.exports = {
validateFile: function (callback) {
glob("myfolder/*.json", function (err, files) {
var stack = [];
// Use it to decide whether all files processed or not.
var filesToLoad = files.length;
files.forEach(function (file) {
fs.readFile(file, 'utf8', function (err, data) { // Read each file
--filesToLoad;
if (err) {
console.log("cannot read the file", err);
// If the invoke action doesn't depend on the result. You may want to call it here too.
}
var obj = JSON.parse(data);
obj.action.forEach(function (crud) {
for (var k in crud) {
if (_inArray(crud[k].path, stack)) {
console.log("duplicate founded!" + crud[k].path);
break;
}
stack.push(crud[k].path);
}
})
// Only called the callback after all files processed.
if (filesToLoad === 0) {
callback();
}
});
});
});
}
};
Edit: Thanks for Bergi's remind that there's the files are an array and you have to call the callback when all files is processed, so we have to further use a variable to decide how many files are not processed yet.

Elegant way to call a callback function after other callbacks returned

I have the following code in my nodejs application:
function someFileOperation(callback) {
var files = ...;
files.forEach(function (file) {
doSomethingAsync(file, function (err, result) {
});
});
}
What is an elegant way to call the callback of someFileOperation() in case all doSomethingAsync() called their callback function and call it only once, when an error in doSomethingAsync() occurred?
For now I came up with something like this:
function someFileOperation(callback) {
var files = ...;
var noFiles = files.length;
files.forEach(function (file) {
doSomethingAsync(file, function (err, result) {
if (err) {
callback(err);
callback = function () {}; // I don't want it to be called again
} else if (--noFiles <= 0) {
callback(null, true);
}
});
});
}
But I think this is a lot of overhead for such a simple task. So I am looking for a much more elegant way or maybe a little framework for these kind of problems
Use async.map or async.foreach see here: https://github.com/caolan/async#map and https://github.com/caolan/async#forEach
the async.map method takes an array of items and performs the same async call for each item in your array in parallel. If no errors are encountered, it will call a callback with a new array of results.

Categories