node poller not exiting properly - javascript

I have a function that will poll a database ever x seconds. I am using the Q library so the function will return a promise. The function will ultimetly be used in a long chain of .then()s.
The function does give me the results that I expect but the function continues to run for 30-40 seconds after the results are returned. I cannot figure out why it would not exit right after I return.
var _ = require('lodash');
var pg = require('pg');
var Q = require('q');
connString = 'postgres://somedb_info';
var query = "SELECT * FROM job where jobid='somejobid123123'";
exports.run_poller = function () {
var deferred = Q.defer();
function exec_query(callback) {
pg.connect(connString, function(err, client, done) {
if(err) {
deferred.reject(err);
}
client.query(query, function(err, result) {
done();
if(err) {
return deferred.reject(err);
}
callback(result.rows[0]);
});
});
}
function wait_for(res){
if(res.status == 'COMPLETE') {
return deferred.resolve(res);
} else {
setTimeout(function(){
exec_query(wait_for);
}, 1000);
}
}
exec_query(wait_for);
return deferred.promise;
};
Just to test this I call the function from a main.js file like so:
var poller = require('./utils/poller').run_poller;
poller().then(console.log).catch(function(err) {console.log(err,'*');});
Why doesn't main.js exit right after the data is returned? Is there a better way to achieve this?

I see that pg maintains a connection pool. I would assume that it's either taking awhile to time out some shared resources or it just takes a little while to shutdown.
You may be able to just call process.exit(0) in your node app to force it to exit sooner if you know you're done with all your work.
Or, you may be able to find configuration settings that affect how the connection pool works.
On this doc page, there's an example like this that might help:
//disconnect client when all queries are finished
client.on('drain', client.end.bind(client));
You should read the doc for .end() to make sure you're using it correctly as there are some cases where it says it should not be called (though they may not apply if your done with all activity).

Related

The program execute commands not by the logic order they are put in [duplicate]

Suppose you need to do some operations that depend on some temp file. Since
we're talking about Node here, those operations are obviously asynchronous.
What is the idiomatic way to wait for all operations to finish in order to
know when the temp file can be deleted?
Here is some code showing what I want to do:
do_something(tmp_file_name, function(err) {});
do_something_other(tmp_file_name, function(err) {});
fs.unlink(tmp_file_name);
But if I write it this way, the third call can be executed before the first two
get a chance to use the file. I need some way to guarantee that the first two
calls already finished (invoked their callbacks) before moving on without nesting
the calls (and making them synchronous in practice).
I thought about using event emitters on the callbacks and registering a counter
as receiver. The counter would receive the finished events and count how many
operations were still pending. When the last one finished, it would delete the
file. But there is the risk of a race condition and I'm not sure this is
usually how this stuff is done.
How do Node people solve this kind of problem?
Update:
Now I would advise to have a look at:
Promises
The Promise object is used for deferred and asynchronous computations.
A Promise represents an operation that hasn't completed yet, but is
expected in the future.
A popular promises library is bluebird. A would advise to have a look at why promises.
You should use promises to turn this:
fs.readFile("file.json", function (err, val) {
if (err) {
console.error("unable to read file");
}
else {
try {
val = JSON.parse(val);
console.log(val.success);
}
catch (e) {
console.error("invalid json in file");
}
}
});
Into this:
fs.readFileAsync("file.json").then(JSON.parse).then(function (val) {
console.log(val.success);
})
.catch(SyntaxError, function (e) {
console.error("invalid json in file");
})
.catch(function (e) {
console.error("unable to read file");
});
generators: For example via co.
Generator based control flow goodness for nodejs and the browser,
using promises, letting you write non-blocking code in a nice-ish way.
var co = require('co');
co(function *(){
// yield any promise
var result = yield Promise.resolve(true);
}).catch(onerror);
co(function *(){
// resolve multiple promises in parallel
var a = Promise.resolve(1);
var b = Promise.resolve(2);
var c = Promise.resolve(3);
var res = yield [a, b, c];
console.log(res);
// => [1, 2, 3]
}).catch(onerror);
// errors can be try/catched
co(function *(){
try {
yield Promise.reject(new Error('boom'));
} catch (err) {
console.error(err.message); // "boom"
}
}).catch(onerror);
function onerror(err) {
// log any uncaught errors
// co will not throw any errors you do not handle!!!
// HANDLE ALL YOUR ERRORS!!!
console.error(err.stack);
}
If I understand correctly I think you should have a look at the very good async library. You should especially have a look at the series. Just a copy from the snippets from github page:
async.series([
function(callback){
// do some stuff ...
callback(null, 'one');
},
function(callback){
// do some more stuff ...
callback(null, 'two');
},
],
// optional callback
function(err, results){
// results is now equal to ['one', 'two']
});
// an example using an object instead of an array
async.series({
one: function(callback){
setTimeout(function(){
callback(null, 1);
}, 200);
},
two: function(callback){
setTimeout(function(){
callback(null, 2);
}, 100);
},
},
function(err, results) {
// results is now equals to: {one: 1, two: 2}
});
As a plus this library can also run in the browser.
The simplest way increment an integer counter when you start an async operation and then, in the callback, decrement the counter. Depending on the complexity, the callback could check the counter for zero and then delete the file.
A little more complex would be to maintain a list of objects, and each object would have any attributes that you need to identify the operation (it could even be the function call) as well as a status code. The callbacks would set the status code to completed.
Then you would have a loop that waits (using process.nextTick) and checks to see if all tasks are completed. The advantage of this method over the counter, is that if it is possible for all outstanding tasks to complete, before all tasks are issued, the counter technique would cause you to delete the file prematurely.
// simple countdown latch
function CDL(countdown, completion) {
this.signal = function() {
if(--countdown < 1) completion();
};
}
// usage
var latch = new CDL(10, function() {
console.log("latch.signal() was called 10 times.");
});
There is no "native" solution, but there are a million flow control libraries for node. You might like Step:
Step(
function(){
do_something(tmp_file_name, this.parallel());
do_something_else(tmp_file_name, this.parallel());
},
function(err) {
if (err) throw err;
fs.unlink(tmp_file_name);
}
)
Or, as Michael suggested, counters could be a simpler solution. Take a look at this semaphore mock-up. You'd use it like this:
do_something1(file, queue('myqueue'));
do_something2(file, queue('myqueue'));
queue.done('myqueue', function(){
fs.unlink(file);
});
I'd like to offer another solution that utilizes the speed and efficiency of the programming paradigm at the very core of Node: events.
Everything you can do with Promises or modules designed to manage flow-control, like async, can be accomplished using events and a simple state-machine, which I believe offers a methodology that is, perhaps, easier to understand than other options.
For example assume you wish to sum the length of multiple files in parallel:
const EventEmitter = require('events').EventEmitter;
// simple event-driven state machine
const sm = new EventEmitter();
// running state
let context={
tasks: 0, // number of total tasks
active: 0, // number of active tasks
results: [] // task results
};
const next = (result) => { // must be called when each task chain completes
if(result) { // preserve result of task chain
context.results.push(result);
}
// decrement the number of running tasks
context.active -= 1;
// when all tasks complete, trigger done state
if(!context.active) {
sm.emit('done');
}
};
// operational states
// start state - initializes context
sm.on('start', (paths) => {
const len=paths.length;
console.log(`start: beginning processing of ${len} paths`);
context.tasks = len; // total number of tasks
context.active = len; // number of active tasks
sm.emit('forEachPath', paths); // go to next state
});
// start processing of each path
sm.on('forEachPath', (paths)=>{
console.log(`forEachPath: starting ${paths.length} process chains`);
paths.forEach((path) => sm.emit('readPath', path));
});
// read contents from path
sm.on('readPath', (path) => {
console.log(` readPath: ${path}`);
fs.readFile(path,(err,buf) => {
if(err) {
sm.emit('error',err);
return;
}
sm.emit('processContent', buf.toString(), path);
});
});
// compute length of path contents
sm.on('processContent', (str, path) => {
console.log(` processContent: ${path}`);
next(str.length);
});
// when processing is complete
sm.on('done', () => {
const total = context.results.reduce((sum,n) => sum + n);
console.log(`The total of ${context.tasks} files is ${total}`);
});
// error state
sm.on('error', (err) => { throw err; });
// ======================================================
// start processing - ok, let's go
// ======================================================
sm.emit('start', ['file1','file2','file3','file4']);
Which will output:
start: beginning processing of 4 paths
forEachPath: starting 4 process chains
readPath: file1
readPath: file2
processContent: file1
readPath: file3
processContent: file2
processContent: file3
readPath: file4
processContent: file4
The total of 4 files is 4021
Note that the ordering of the process chain tasks is dependent upon system load.
You can envision the program flow as:
start -> forEachPath -+-> readPath1 -> processContent1 -+-> done
+-> readFile2 -> processContent2 -+
+-> readFile3 -> processContent3 -+
+-> readFile4 -> processContent4 -+
For reuse, it would be trivial to create a module to support the various flow-control patterns, i.e. series, parallel, batch, while, until, etc.
The simplest solution is to run the do_something* and unlink in sequence as follows:
do_something(tmp_file_name, function(err) {
do_something_other(tmp_file_name, function(err) {
fs.unlink(tmp_file_name);
});
});
Unless, for performance reasons, you want to execute do_something() and do_something_other() in parallel, I suggest to keep it simple and go this way.
Wait.for https://github.com/luciotato/waitfor
using Wait.for:
var wait=require('wait.for');
...in a fiber...
wait.for(do_something,tmp_file_name);
wait.for(do_something_other,tmp_file_name);
fs.unlink(tmp_file_name);
With pure Promises it could be a bit more messy, but if you use Deferred Promises then it's not so bad:
Install:
npm install --save #bitbar/deferred-promise
Modify your code:
const DeferredPromise = require('#bitbar/deferred-promise');
const promises = [
new DeferredPromise(),
new DeferredPromise()
];
do_something(tmp_file_name, (err) => {
if (err) {
promises[0].reject(err);
} else {
promises[0].resolve();
}
});
do_something_other(tmp_file_name, (err) => {
if (err) {
promises[1].reject(err);
} else {
promises[1].resolve();
}
});
Promise.all(promises).then( () => {
fs.unlink(tmp_file_name);
});

Timing of query with mongoose and node

I am completely desperate with this problem. I have a query Array which I stored so long in memory. With it, I call an API, fire my requests and store the result to MongoDB...no problem. Unfortunately I have no control reg. the API-Server and some Econnet/TCP/IP connection error make my app crash from time to time.
To be able to resume my querying task, I wrote all my queries in my MonogDB and I want to track/record the queryState of each query being executed or not.
My problem occurs when I try to get the queries back from Mongo into my method which fires the requests. Due to some timing/async problems (I guess), my queryArray stays undefined all the time and I don't get it solved...
server.js:
//TEST
var querymongo = require('./config/queryMongo');
var queryobject = new querymongo;
var queryArray = queryobject.results();
queryArray stays undefined all the time...
queryMongo.js:
//require mongo model + db connection
var queryDB = require('./queryDB');
//constructor
...
//mongoRequest method
this.mongoRequest = function(){
console.log("Function mongoRequest called now!");
return new Promise(function(resolve, reject){
queryDB.queries.find({'SearchIndex': 'All'}, function(err, doc){
if(err) return reject (err)
else resolve (doc)
});
});
}
//resolve results
this.results = function(){
var queryArray = [];
this.mongoRequest().then(function(doc, err){
console.log(doc)
queryArray = doc;
return queryArray;
}).catch(function(err){
console.log(err)
});
}
}
module.exports = QueryMongo;
The console.log(doc) in the result method works, but returns all docs at the end of all code being executed. What is the problem here?
I would be very thankful as this makes me crazy!
Thanks
Hucho
So your question is why queryArray is undefined?
Because the Mongo query is asynchronous, and you're executing console.log right after, that is, before you get the results.
//resolve results
this.results = function(){
var queryArray = [];
this.mongoRequest().then(function(doc, err){
//////// DO SOMETHING WITH THE RESULT HERE
}).catch(function(err){
console.log(err)
});
}

Database call in a function without callback with Node.js and Postgresql

I'm trying out the framework node.js on one of my projects.
I'm really seeing some good advantages on what they called "event-driven, non-blocking I/O model" however if my project there are some moments where I don't necessarily want to have some asynchronous calls and to be able to several operation before launching some asynchronous call.
Especially when I want to do some factorization and create some functions.
Typically I have the following case:
I know that in several part of my program I have to check if a media is existing in my database for a given string or id.
So as a guy who tried to stay organize I want to create a function that I will call each time I need to check this.
However, I did not find the way to do that with node.js and pg (the npm PostgreSQL library (https://github.com/brianc/node-postgres/) . Indeed, there is always a callback in the function and the return is null because of the callback. Here is an example below
/*
Function which is supposed to check if a media existing
*/
function is_media_existing (url_or_id){
log.debug("is_media_existing : begin of the function", {"Parameter" : url_or_id});
pg.connect(connectionstring, function (err, client, done) {
if (err) {
log.warning("is_media_existing : Problem with Database connection", {
"Parameter": url_or_id,
"Error": err
});
}
if (isNaN(url_or_id)) {
// Case is parameter is not a number (string)
var query = client.query('SELECT COUNT(*) as count FROM media WHERE url = $1::string ', url_or_id);
query.on('error', function (error) {
log.warning("is_media_existing : Problem with Database query (connection to db passed but not query " +
"", {"Parameter": url_or_id, "Error": error});
});
return query;
} else {
// Case is parameter is a int
log.debug("is_media_existing : Type of Parameter is a string");
// Case is parameter is not a number (string)
var query = client.query('SELECT COUNT(*) as count FROM media WHERE id = $1::id ', url_or_id);
query.on('error', function (error) {
log.warning("is_media_existing : Problem with Database query (connection to db passed but not query " +
"", {"Parameter": url_or_id, "Error": error});
});
return query;
}
});
}
// Executing the function
var test = is_media_existing("http://random_url_existing_in_db");
// test is always null as the return is in a callback and the callback is asynchronous
i have the feeling my question is touching the core concepts of node.js, and perhaps my approach is wrong and I apologize in advance.
I know it's not good to wait for a response before doing something.
But what's the alternative? How can I factorize my code into functions when I need some functionalities in several part of my code?
So if there would be anyone who could explain how to do that with a best practice of programming it would be great.
Thanks
Anselme
As Cody says, you probably dont want to do synchronous function.
The way you should handle the situation in your example is to pass in your own callback like this
function is_media_existing (url_or_id, callback){
and then instead of return query; use your callback like this-
callback(query);
or probably better to follow the node convention for callback functions to have two parameters (err, result) so your callback would look like this
callback(null, query);
Here is a rework of your sample
function is_media_existing (url_or_id, callback){ /* callback(err, result) */
log.debug("is_media_existing : begin of the function", {"Parameter" : url_or_id});
pg.connect(connectionstring, function (err, client, done) {
if (err) {
done(err);
log.warning("is_media_existing : Problem with Database connection", {
"Parameter": url_or_id,
"Error": err
});
return callback(err, null);
/* note that this return is simply used to exit the connect's callback and the return value is typically
* not used it is the call to callback() that returns the error value */
}
var qrystr;
if (isNaN(url_or_id)) {
log.debug("is_media_existing : Type of Parameter is a string");
qrystr = 'SELECT COUNT(*) as count FROM media WHERE url = $1::string;';
} else {
qrystr = 'SELECT COUNT(*) as count FROM media WHERE id = $1::id;';
}
client.query(qrystr, [url_or_id], function(err, result){
done();
if(err){
/* .. */
}
callback(err, result);
});
});
}
// Executing the function
var test = is_media_existing("http://random_url_existing_in_db", function(err, result){
if(err){
}else {
}
});
If you end up with a hard nest of callbacks, promises are really worth looking into.
I don't think you really do want a synchronous call. The problem with synchronous calls in node is that it stops the entire process from doing anything while a synchronous function is running as it will stop the event loop. As an example lets say your sync function takes 2 seconds to complete. Your server will then do nothing for 2 full seconds. That 2 seconds includes everything (accepting new connections, everything else, etc). The reason blocking functions don't exist is because they are (very) bad. Here is an example how your function will react in an async manor.
is_media_existing("http://random_url_existing_in_db", function(exists){
if (exists){
//do stuff
} else {
//do this other stuff
}
});
Then within is_media_existing you will need to call that callback function when your query completes.
//psuedo
function is_media_existing(url, callback){
query('select COUNT(*) as count FROM media WHERE id = $1::id '. [url], function(err, result){
if (err)
callback(false)
else
callback(result.count > 0)
})
}
With the new ES6 plus async stuff and babel its simpler. You can npm i -g babel npm i babel-runtime then compile and run the following with babel test.js --optional runtime --stage 2 | node. Please read the following example carefully to see how to adapt it to your use case:
let testData = [
{ id: 0, childIds: [1,2]},
{ id: 1, childIds:[] }
];
function dbGet(ids) {
return new Promise( r=> {
// this an example; you could do any db
// query here and call r with the results
r(ids.map((id) => { return testData[id];}));
});
}
async function dbExists(ids) {
let found = await dbGet(ids);
return (found && found.length>0);
}
async function test() {
var exists = await dbExists([0]);
console.log(exists);
}
test().then(f=>{}).catch( e=> {console.log('e',e)});

Wrapping MongoDB calls within a Promise

I'm using Meteor (1.0.3) in general, but for one particular case I'm using a raw server side route to render a file -- so I'm outside a Meteor method.
I'm using node fs.writeFile/fs.readFile and exec commands to call out to Linux command-line utilities too.
My only point in brining this up is that the node calls are async of course. And so I'm opted to use the node Q library in order to manage async callbacks.
This all worked until I added a line to call out to the MongoDB database.
A call like so:
var record_name = Mongo_Collection_Name.findOne({_personId: userId}, {fields: {'_id': 0}});
Produces the following error:
[Error: Can't wait without a fiber]
The error only occurs when I wrap the function in a Promise.
For example, something like this will throw:
getRecordExample = function () {
var deferred = Q.defer();
var record_name = Mongo_Collection_Name.findOne({_personId: userId}, {fields: {'_id': 0}});
// do something
// if no error
deferred.resolve(record_name);
return deferred.promise;
}
If I use the Meteor Fibers library I don't get the error:
getRecordExample = function () {
var deferred = Q.defer();
Fiber = Npm.require('fibers');
var record_name
Fiber(function () {
record_name = Mongo_Collection_Name.findOne({_personId: userId});
}).run()
// do something
// if no error
deferred.resolve(record_name);
return deferred.promise;
}
but, the record_name variable is undefined outside the fiber, so I don't have a way to pass the variable outside of the Fiber scope as far as I can tell.
A More Precise Example
This is a little long, so you have to scroll down to see it all. I'm basically building a workflow here so there are processes and subprocesses.
// both/routes.js
Router.route('/get-route', function(req, res) {
// get the userId then start the workflow below
// using Promises here because these were firing concurrently
Q(userId)
.then(process_1)
.then(process_2)
.done();
}, { name: 'server-side-ir-route', where: 'server' }
// server.js
process_1 = function (userId) {
sub_process_1(userId);
sub_process_2(userId);
return userId;
}
process_2 = function (userId) {
sub_process_3(userId);
sub_process_4(userId);
return userId;
}
sub_process_1 = function (userId) {
var result = get_record_1(userId);
// do stuff with result
// using Q library to call out to async fs.writeFile, return Promise
fs_writeFile_promise(result)
.catch(function (error) {
console.log('error in sub_process_1_write', error);
})
.done(function () {
console.log('done with sub_process_1');
}
return userId;
}.future() // <-- if no future() here, the exception is thrown.
sub_process_2 = function (userId) {
var result = get_record_2(userId);
// do stuff with result
// using Q library to call out to async fs.writeFile, return Promise
fs_writeFile_promise(result)
.catch(function (error) {
console.log('error in sub_process_1_write', error);
})
.done(function () {
console.log('done with sub_process_1');
}
return userId;
}.future()
// async because of I/O operation (I think)
get_record_1 = function (userId) {
var record_1 = Mongo_Collection_Name.findOne({'userId': userId});
// do stuff
return record_1;
}
get_record_2 = function (userId) {
var record_2 = Mongo_Collection_Name.findOne({'userId': userId});
// do stuff
return record_2;
}
// async operation using Q library to return a Promise
fs_writeFile_promise = function (obj) {
var deferred = Q.defer();
fs.writeFile(obj.file, obj.datas, function (err, result) {
if (err) deferred.reject(err);
else deferred.resolve('write data completed');
});
return deferred.promise;
}
For now, lets assume that the process_2 function is exactly like process_1
Also, we should assume I have console.log('step_start') and console.log('step_end') in each function. This is what it would look like on the command line:
start processes
end processes
start processes 1
end processes 1
start processes 2
start sub processes 1
getting record 1
start sub processes 2
getting record 2
returning record 1
end sub processes 1
called writeData in sub process 1
returning record 2
called writeData in sub process 2
end processes 2
ending sub processes 1
The reason I had to place a Fiber (future) on the sub_process_1() function was because when I placed the function process_1() in the Q chain at the top I got the Error: Can't wait without a fiber].
If I remove the process_1() in the Q chain at the top and remove the .future() from sub_process_1() no exception is thrown.
Questions
Why does calling out to a Mongo collection within a Promise cause a
fiber error within a Meteor application?
Does calling a async function within a sync function in general cause the sync function to become a async function?
How do I solve this problem?
The most common way to solve this is wrap your asynchronous callbacks that use Meteor functions in Meteor.bindEnvironment().
If you are using the Meteor core WebApp package to handle your server side route, the code would be like this (also in meteorpad):
WebApp.connectHandlers.use(
'/test',
Meteor.bindEnvironment(function(req, res, next) {
var someSyncData = Players.findOne();
res.write(JSON.stringify(someSyncData));
res.end();
})
);
Working with fibers or promises yourself is unnecessary unless you are trying to get multiple async events to run concurrently.
To deal with file reading or other functions that are not already synchronous, Meteor also provides Meteor.wrapAsync() to make them synchronous.
There are also packages and a help page that give you other high level alternatives.

Nodejs Running Functions in Series

So right now I'm trying to use Nodejs to access files in order to write them to a server and process them.
I've split it into the following steps:
Traverse directories to generate an array of all of the file paths
Put the raw text data from each of file paths in another array
Process the raw data
The first two steps are working fine, using these functions:
var walk = function(dir, done) {
var results = [];
fs.readdir(dir, function(err, list) {
if (err) return done(err);
var pending = list.length;
if (!pending) return done(null, results);
list.forEach(function(file) {
file = path.resolve(dir, file);
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
walk(file, function(err, res) {
results = results.concat(res);
if (!--pending) done(null, results);
});
} else {
results.push(file);
if (!--pending) done(null, results);
}
});
});
});
};
function processfilepaths(callback) {
// reading each file
for (var k in filepaths) { if (arrayHasOwnIndex(filepaths, k)) {
fs.readFile(filepaths[k], function (err, data) {
if (err) throw err;
rawdata[k] = data.toString().split(/ *[\t\r\n\v\f]+/g);
for (var j in rawdata[k]) { if (arrayHasOwnIndex(rawdata[k], j)) {
rawdata[k][j] = rawdata[k][j].split(/: *|: +/);
}}
});
}}
if (callback) callback();
}
Obviously, I want to call the function processrawdata() after all of the data has been loaded. However, using callbacks doesn't seem to work.
walk(rootdirectory, function(err, results) {
if (err) throw err;
filepaths = results.slice();
processfilepaths(processrawdata);
});
This never causes an error. Everything seems to run perfectly except that processrawdata() is always finished before processfilepaths(). What am I doing wrong?
You are having a problem with callback invocation and asynchronously calling functions. IMO I'll recommend that you use a library such as after-all to execute a callback once all your functions get executed.
Here's a example, here the function done will be called once all the functions wrapped with next are called.
var afterAll = require('after-all');
// Call `done` once all the functions
// wrapped with next() get called
next = afterAll(done);
// first execute this
setTimeout(next(function() {
console.log('Step two.');
}), 500);
// then this
setTimeout(next(function() {
console.log('Step one.');
}), 100);
function done() {
console.log("Yay we're done!");
}
I think for your problem, you can use async module for Node.js:
async.series([
function(){ ... },
function(){ ... }
]);
To answer you actual question, I need to explain how Node.js works:
Say, when you call an async operation (say mysql db query), Node.js sends "execute this query" to MySQL. Since this query will take some time (may be some milliseconds), Node.js performs the query using the MySQL async library - getting back to the event loop and doing something else there while waiting for MySQL to get back to us. Like handling that HTTP request.
So, In your case both functions are independent and executes almost in parallel.
For more information:
Async.js for use with Node.js
function processfilepaths(callback) {
// reading each file
for (var k in filepaths) { if (arrayHasOwnIndex(filepaths, k)) {
fs.readFile(filepaths[k], function (err, data) {
if (err) throw err;
rawdata[k] = data.toString().split(/ *[\t\r\n\v\f]+/g);
for (var j in rawdata[k]) { if (arrayHasOwnIndex(rawdata[k], j)) {
rawdata[k][j] = rawdata[k][j].split(/: *|: +/);
}}
});
}}
if (callback) callback();
}
Realize that you have:
for
readfile (err, callback) {... }
if ...
Node will call each readfile asynchronously, which only sets up the event and callback, then when it is done calling each readfile, it will do the if, before the callback probably even has a chance to get invoked.
You need to use either Promises, or a promise module like async to serialize it. What you would then do looks like:
async.XXXX(filepaths, processRawData,
function (err, ...) {
// function for when all are done
if (callback) callback();
}
);
Where XXXX is one of the functions from the library like series, parallel, each, etc... The only thing you also need to know is in your process raw data, async gives you a callback to call when done. Unless you really need sequential access (I don't think you do) use parallel so that you can queue up as many i/o events as possible, it should execute faster, maybe only marginally, but it'll better leverage the hardware.

Categories