Meteor WrapAsync working asynchronously - javascript

I am aware of this stackoverflow answer and I have been using it to help me.
However something weird happens when I apply the code to my situation.
It seems that the wrapAsync function, called execSync in my code, runs and outputs what it is supposed to; however, it just finished last as it did before i had the wrapAsync in place.
The code
Meteor.methods({
'distinctSpecs'({}){
console.log("called");
var json_categories_clean = [];
var execSync =
Meteor.wrapAsync(require("child_process").exec,
require("child_process"))
var returned_data =
execSync(
"mongo products --eval \"var collection='laptops', outputFormat='json'\" variety.js",
{ cwd:"/home/jonathan/Documents/variety-master"},
(err, stdout, stderr) => {
if (err) {
console.error(err);
console.log(stdout);
console.log(stderr);
return;
}
console.log("waited for this");
var json_categories =
JSON.parse(stdout.substring(
stdout.indexOf('[', stdout.indexOf('[')+1),
stdout.lastIndexOf(']')+1));
for (var x=0; x < json_categories.length; x++) {
json_categories_clean.push(json_categories[x]["_id"])
}
console.log("returning inner");
return json_categories_clean;
});
console.log("returning outer");
return returned_data;
}
});
**The output **
called
returning outer
waited for this
returning inner

After formatting your code it's pretty clear that you are invoking wrapAsync wrong:
Meteor.wrapAsync(require("child_process").exec,
require("child_process"))
you probably want:
const exec = Npm.require("child_process").exec;
Meteor.wrapAsync(a, b, function(callback) {
exec(a, b, function(err, stdout, stderr) {
callback(err, stdout);
});
});
The last parameter to the function you wrap needs to be a function that takes an error and a result as parameters (and nothing else).
Also, once you have the async function, you don't provide a callback anymore. You are waiting for the return instead.

Related

How to read a file (Synchronously?) in a while loop in javascript (Discord.js)?

This is possibly a duplicate question, but I can't seem to figure it out.
Essentially, I am making a code that runs in a while loop where I need to then read a file within that while loop, and it seems that the fileRead in the code just stops the while loop from getting to the end. I'm pretty newbie to javascript still, so it's probably an easy fix.
What I've tried so far is changing my jsonReader function to sync (readFileSync) and that just stopped the code before it did hardly anything. (that is now what the current code is as) I've also tried making a second function for specifically reading the files I need Synchronously and that didn't seem to work either. I'm not even sure if this has to do with synchronism
My Code:
module.exports = {
name: 'xprun',
description: "runs the xp handler",
execute(message) {
const name = message.author.username;
const server = message.guild.id;
const fs = require('fs');
function jsonReader(filePath, cb) {
fs.readFileSync(filePath, 'utf-8', (err, fileData) => {
if (err) {
return cb && cb(err);
}
try {
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
});
}
console.log('Starting the loop...'); //
var run = true;
var i = 0;
while (run == true) {
i++
console.log('Running the loop...'); // Loop stops and re-runs here
// read #1
jsonReader(`./userData/rank/${server}_server/1.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.id); //
}
// read #2
jsonReader(`./userData/xp/${server}_server/${name}_xp.json`, (err, data) => {
if (err) {
console.log(err);
} else {
console.log(data.rank); //
}
console.log('The loop was completed'); //
if (i >= 5) {
run = false;
}
}); // end read #1
}); // end read #2
} // end while
console.log('The loop was ended'); //
} // end execute
} // end
As #CherryDT mentioned in the comments, readFileSync does not accept a callback. Because readFileSync is synchronous, it does not need a callback; readFile accepts a callback only because it is asynchronous, because it needs to wait until it has read the file before calling the code in the callback. The synchronous method does not need to wait in this way, so you can move the callback code out of the callback like so:
function jsonReader(filePath, cb) {
try {
const fileData = fs.readFileSync(filePath, 'utf-8');
const object = JSON.parse(fileData);
return cb && cb(null, object);
} catch (err) {
return cb && cb(err);
}
}
The reason your loop was infinitely running was because you set run to false only within your callback cb method, but because readFileSync does not accept a callback, your callback was never being run. With the above code, your callback should now be running, and the loop should no longer run infinitely (unless there are other issues within your callbacks).

Callback is already called using async?

Once fs.readFile loop through all files and get the matching data and push it to results, I want to call callback(results) so i can send response to client. I am getting an error with below code Error: Callback is already called HOw can i resolve this issue using async approach.
app.js
searchFileService.readFile(searchTxt, logFiles, function(lines, err) {
console.log('Logs', lines);
if (err)
return res.send();
res.json(lines);
})
readFile.js
var searchStr;
var results = [];
function readFile(str,logFiles,callback){
searchStr = str;
async.map(logFiles, function(logfile, callback) {
fs.readFile('logs/dit/' + logfile.filename, 'utf8', function(err, data) {
if (err) {
callback(null,err);
}
var lines = data.split('\n'); // get the lines
lines.forEach(function(line) { // for each line in lines
if (line.indexOf(searchStr) != -1) { // if the line contain the searchSt
results.push(line);
callback(results,null);
}
});
});
}), function(error, result) {
results.map(result,function (result){
console.log(result);
});
};
}
Note: this answer is an extension to trincot's answer. So if this answers your question, kindly mark his as the answer!
You said: Once fs.readFile loop through all files and get the matching data and push it to results then I don't think .map is the appropriate function for this, to be honest. This is for transforming every element from an array into another which is not what you are doing.
A better method would be .eachSeries to read one file at a time.
It's a good idea to rename your second callback to something else e.g. done to not confuse yourself (and others). Calling done() is for telling that the operation on the file is completed as in we are "done" reading the file.
Lastly, be careful with your typos. The first one may have prevented you from getting into the last part.
var results = [];
var searchStr;
function readFile(str, logFiles, callback) {
searchStr = str;
// loop through each file
async.eachSeries(logFiles, function (logfile, done) {
// read file
fs.readFile('logs/dit/' + logfile.filename, 'utf8', function (err, data) {
if (err) {
return done(err);
}
var lines = data.split('\n'); // get the lines
lines.forEach(function(line) { // for each line in lines
if (line.indexOf(searchStr) != -1) { // if the line contain the searchSt
results.push(line);
}
});
// when you are done reading the file
done();
});
// wrong: }), function (err) {
}, function (err) {
if (err) {
console.log('error', err);
}
console.log('all done: ', results);
// wrong: results.map(result, function (result){
results.map(function (result){
console.log(result);
});
// send back results
callback(results);
});
}

Async functions in Node.js module

I'm kind of new to JavaScript/Node.js so bear with me. Also my english may not be that good.
I'm trying to write a Node.js module module.js with functions that do some long-running work. Kind of like this:
var exec = require('child_process').exec;
module.exports.myFunction1 = function(callback) {
// this function runs for like 3 seconds
exec('long running shell command' ,function(err,stdout,stderr) {
callback(stdout);
})
};
module.exports.myFunction2 = function(callback) {
// this function runs for like 1 second
exec('long running shell command' ,function(err,stdout,stderr) {
callback(stdout);
})
};
Now, I also have a main.js where I invoke these functions:
var module = require('./module.js');
var output1 = module.myFunction1();
var output2 = module.myFunction2();
My first problem is that my functions return undefined. I understand that this is because the exec function runs asynchronously and therefore the function returns before exec has finished. This is basically what I want but how can I tell my function that it should only callback when exec has finished?
I also don't want the functions to block node.js when I invoke them in my main.js. So basically, my output of the above code would be...
Output myFunction2: Output2
Output myFunction1: Output1
...because myFunction2() finishes faster than myFunction1().
I tried many, many solutions I found online but nothing seems to work properly.
Thank you very much in advance!
--- EDIT ---
Ok, I'm having a somewhat correct solution. Right now my code looks like this:
module.js
var Q = require('q');
require('shelljs/global')
module.exports = {
myFunction1: function () {
var deferred = Q.defer();
var result = exec('long running command', {silent:true}).output.toString();
if (ok) {
deferred.resolve(result);
}
else {
deferred.reject('Error');
}
return deferred.promise;
},
myFunction2: function () {
var deferred = Q.defer();
var result = exec('long running command', {silent:true}).output.toString();
if (ok) {
deferred.resolve(result);
}
else {
deferred.reject('Error');
}
return deferred.promise;
}
}
My main.js lloks like this now:
var module = require('./module');
module.myFunction1()
.then(function (result) {
console.log('Result 1: ' + result);
})
.fail(function (error) {
console.log(error)
});
module.myFunction2()
.then(function (result) {
console.log('Result 2: ' + result);
})
.fail(function (error) {
console.log(error)
});
And I get the expected output:
Result 1: Output that myFunction1() generated
Result 2: Output that myFunction2() generated
My Problem now is, that myFunction1() always logs before myFunction2(), even if myFunction2() finished first. Did I understood something wrong about Promises? Shouldn't myFunction2() return immediately after it finished?
Your functions take callbacks. Those parameters are functions which are called on completion, which makes it easy to do
var exec = require('child_process').exec;
module.exports.myFunction1 = function(callback) {
// this function runs for like 3 seconds
exec('long running shell command' ,function(err,stdout,stderr) {
callback(stdout);
})
};
module.myFunction1(function(stdout){
console.log("Output myFunction1: " + stdout);
});
Using a callback, in your case, is the simplest solution but you should be aware that there are other patterns to deal with asynchronous executions. Here's a good overview. For example, a popular solution, especially interesting when you have to chain asychronous continuations, is to use promises, which allow
var exec = require('child_process').exec;
module.exports.myFunction1 = function() {
return new Promise(function(resolve, fail){
// this function runs for like 3 seconds
exec('long running shell command' ,function(err,stdout,stderr) {
if (err) fail(err);
else resolve(stdout, stderr);
});
});
};
module.myFunction1()
.then(function(stdout){
console.log("Output myFunction1: " + stdout);
})
.then(module.myFunction2)
.then(function(stdout){
console.log("Output myFunction2: " + stdout);
})
At first, I would suggest you to handle errors (err, stderr) in your modules. As you can see, your functions takes one argument which is callback. If your asynchronous function runs, the callback function is called. So you can use it like this:
module.myFunction1(function(stdout) {
console.log("Output myFunction1: " + stdout);
module.myFunction2(function(stdout2) {
console.log("Output myFunction2: " + stdout2);
});
});
exec function also takes callback function (with first argument error err - error first callbacks). There are other options how to handle flow control of asynchronous code (e.g. library async). You can also learn about Promises which is today's alternative to error first callbacks.
Callback functions don't return values directly... what you need is to setup what will happen when value will get read. Something like this:
my_function(what_will_happen_when_my_function_will_get_finished());
exectly:
myFunction1(function(data){console.log('hello! I've finished, and received: '+data);});

Nodejs Running Functions in Series

So right now I'm trying to use Nodejs to access files in order to write them to a server and process them.
I've split it into the following steps:
Traverse directories to generate an array of all of the file paths
Put the raw text data from each of file paths in another array
Process the raw data
The first two steps are working fine, using these functions:
var walk = function(dir, done) {
var results = [];
fs.readdir(dir, function(err, list) {
if (err) return done(err);
var pending = list.length;
if (!pending) return done(null, results);
list.forEach(function(file) {
file = path.resolve(dir, file);
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function(err, res) {
results = results.concat(res);
if (!--pending) done(null, results);
});
} else {
results.push(file);
if (!--pending) done(null, results);
}
});
});
});
};
function processfilepaths(callback) {
// reading each file
for (var k in filepaths) { if (arrayHasOwnIndex(filepaths, k)) {
fs.readFile(filepaths[k], function (err, data) {
if (err) throw err;
rawdata[k] = data.toString().split(/ *[\t\r\n\v\f]+/g);
for (var j in rawdata[k]) { if (arrayHasOwnIndex(rawdata[k], j)) {
rawdata[k][j] = rawdata[k][j].split(/: *|: +/);
}}
});
}}
if (callback) callback();
}
Obviously, I want to call the function processrawdata() after all of the data has been loaded. However, using callbacks doesn't seem to work.
walk(rootdirectory, function(err, results) {
if (err) throw err;
filepaths = results.slice();
processfilepaths(processrawdata);
});
This never causes an error. Everything seems to run perfectly except that processrawdata() is always finished before processfilepaths(). What am I doing wrong?
You are having a problem with callback invocation and asynchronously calling functions. IMO I'll recommend that you use a library such as after-all to execute a callback once all your functions get executed.
Here's a example, here the function done will be called once all the functions wrapped with next are called.
var afterAll = require('after-all');
// Call `done` once all the functions
// wrapped with next() get called
next = afterAll(done);
// first execute this
setTimeout(next(function() {
console.log('Step two.');
}), 500);
// then this
setTimeout(next(function() {
console.log('Step one.');
}), 100);
function done() {
console.log("Yay we're done!");
}
I think for your problem, you can use async module for Node.js:
async.series([
function(){ ... },
function(){ ... }
]);
To answer you actual question, I need to explain how Node.js works:
Say, when you call an async operation (say mysql db query), Node.js sends "execute this query" to MySQL. Since this query will take some time (may be some milliseconds), Node.js performs the query using the MySQL async library - getting back to the event loop and doing something else there while waiting for MySQL to get back to us. Like handling that HTTP request.
So, In your case both functions are independent and executes almost in parallel.
For more information:
Async.js for use with Node.js
function processfilepaths(callback) {
// reading each file
for (var k in filepaths) { if (arrayHasOwnIndex(filepaths, k)) {
fs.readFile(filepaths[k], function (err, data) {
if (err) throw err;
rawdata[k] = data.toString().split(/ *[\t\r\n\v\f]+/g);
for (var j in rawdata[k]) { if (arrayHasOwnIndex(rawdata[k], j)) {
rawdata[k][j] = rawdata[k][j].split(/: *|: +/);
}}
});
}}
if (callback) callback();
}
Realize that you have:
for
readfile (err, callback) {... }
if ...
Node will call each readfile asynchronously, which only sets up the event and callback, then when it is done calling each readfile, it will do the if, before the callback probably even has a chance to get invoked.
You need to use either Promises, or a promise module like async to serialize it. What you would then do looks like:
async.XXXX(filepaths, processRawData,
function (err, ...) {
// function for when all are done
if (callback) callback();
}
);
Where XXXX is one of the functions from the library like series, parallel, each, etc... The only thing you also need to know is in your process raw data, async gives you a callback to call when done. Unless you really need sequential access (I don't think you do) use parallel so that you can queue up as many i/o events as possible, it should execute faster, maybe only marginally, but it'll better leverage the hardware.

Executing asynchronous calls in a synchronous manner

I've been trying to wrap my head around this issue for the last hours but can't figure it out. I guess I still have to get used to the functional programming style ;)
I wrote a recursive function that traverses through a directory structure and does things to certain files. This functions uses the asynchronous IO methods. Now I want to perform some action when this whole traversing is done.
How would I make sure that this action is performed after all parse calls have been performed but still use the asynchronous IO functions?
var fs = require('fs'),
path = require('path');
function parse(dir) {
fs.readdir(dir, function (err, files) {
if (err) {
console.error(err);
} else {
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
console.error(err);
} else {
if (stats.isDirectory()) {
parse(p);
} else if (stats.isFile()) {
// do some stuff
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.');
// do some stuff here when async parse completely finished
Look for Step module. It can chain asynchronous functions calls and pass results from one to another.
You could use async module . Its auto function is awesome . If you have function A() and function B() and function C() . Both function B() and C() depend of function A() that is using value return from function A() . using async module function you could make sure that function B and C will execute only when function A execution is completed .
Ref : https://github.com/caolan/async
async.auto({
A: functionA(){//code here },
B: ['A',functionB(){//code here }],
C: ['A',functionC(){//code here }],
D: [ 'B','C',functionD(){//code here }]
}, function (err, results) {
//results is an array that contains the results of all the function defined and executed by async module
// if there is an error executing any of the function defined in the async then error will be sent to err and as soon as err will be produced execution of other function will be terminated
}
})
});
In above example functionB and functionC will execute together once function A execution will be completed . Thus functionB and functionC will be executed simultaneously
functionB: ['A',functionB(){//code here }]
In above line we are passing value return by functionA using 'A'
and functionD will be executed only when functionB and functionC execution will be completed .
if there will be error in any function , then execution of other function will be terminated and below function will be executed .where you could write your logic of success and failure .
function (err, results) {}
On succesfull execution of all function "results" will contain the result of all the functions defined in async.auto
function (err, results) {}
Take a look at modification of your original code which does what you want without async helper libs.
var fs = require('fs'),
path = require('path');
function do_stuff(name, cb)
{
console.log(name);
cb();
}
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err) {
cb(err);
} else {
// cb_n creates a closure
// which counts its invocations and calls callback on nth
var n = files.length;
var cb_n = function(callback)
{
return function() {
--n || callback();
}
}
// inside 'each' we have exactly n cb_n(cb) calls
// when all files and dirs on current level are proccessed,
// parent cb is called
// f = filename, p = path
var each = function (f, p) {
return function (err, stats) {
if (err) {
cb(err);
} else {
if (stats.isDirectory()) {
parse(p, cb_n(cb));
} else if (stats.isFile()) {
do_stuff(p+f, cb_n(cb));
// if do_stuff does not have async
// calls inself it might be easier
// to replace line above with
// do_stuff(p+f); cb_n(cb)();
}
}
};
};
var i;
for (i = 0; i < files.length; i++) {
var f = files[i];
var p = path.join(dir, f);
fs.stat(p, each(f, p));
}
}
});
}
parse('.', function()
{
// do some stuff here when async parse completely finished
console.log('done!!!');
});
Something like this would work -- basic change to your code is the loop turned into a recursive call that consumes a list until it is done. That makes it possible to add an outer callback (where you can do some processing after the parsing is done).
var fs = require('fs'),
path = require('path');
function parse(dir, cb) {
fs.readdir(dir, function (err, files) {
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
}
function handleFiles(dir, files, cb){
var file = files.shift();
if (file){
var p = path.join(dir, file);
fs.stat(p, function(err, stats){
if (err)
cb(err);
else{
if (stats.isDirectory())
parse(p, function(err){
if (err)
cb(err);
else
handleFiles(dir, files, cb);
});
else if (stats.isFile()){
console.log(p);
handleFiles(dir, files, cb);
}
}
})
} else {
cb();
}
}
parse('.', function(err){
if (err)
console.error(err);
else {
console.log('do something else');
}
});
See following solution, it uses deferred module:
var fs = require('fs')
, join = require('path').join
, promisify = require('deferred').promisify
, readdir = promisify(fs.readdir), stat = promisify(fs.stat);
function parse (dir) {
return readdir(dir).map(function (f) {
return stat(join(dir, f))(function (stats) {
if (stats.isDirectory()) {
return parse(dir);
} else {
// do some stuff
}
});
});
};
parse('.').done(function (result) {
// do some stuff here when async parse completely finished
});
I've been using syncrhonize.js with great success. There's even a pending pull request (which works quite well) to support async functions which have multiple parameters. Far better and easier to use than node-sync imho. Added bonus that it has easy-to-understand and thorough documentation, whereas node-sync does not.
Supports two different methods for wiring up the sync, a defered/await model (like what #Mariusz Nowak was suggesting) and a slimmer though not-as-granular function-target approach. The docs are pretty straightforward for each.
Recommend to use node-seq
https://github.com/substack/node-seq
installed by npm.
I'm using it, and I love it..
Look for node-sync, a simple library that allows you to call any asynchronous function in synchronous way. The main benefit is that it uses javascript-native design - Function.prototype.sync function, instead of heavy APIs which you'll need to learn. Also, asynchronous function which was called synchronously through node-sync doesn't blocks the whole process - it blocks only current thread!

Categories