running async with two tasks using nodejs? - javascript

I wanted to be able to run two tasks inside async each function using the "async module".
for example:
async.each(openFiles, function( file, callback) {
// task 1
function(){
callback();
}
function(){
callback(); // task 2, waits for task 1 to finish
}
}, function(err){
console.log("done");
});
Im using each because Im looping through each value and need apply two asks to each element.

You should be able to run async.series inside async.each. This will iterate openfiles and run the series inside, it will only then progress through the each loop when series has finished.
async.each(openFiles, function(file, eachCallback) {
async.series([
function(seriesCallback) {
seriesCallback();
},
function(seriesCallback) {
seriesCallback();
}
], function() {
eachCallback();
})
}, function(err) {
console.log("done");
});

Here is some code for the 2-async approach:
async.each(openFiles, function( file, callback) {
async.each([task1, task2], function(task, cb) {
task(file); // exec tasks 1, 2
cb(); // one completed
}, callback); // both completed
}, function(err){
console.log("done");
});

You can make use of javascript callback over here to create dependency of task1 on task2. Something like this:
async.each(openFiles, function( file, callback) {
// task 1
function task1(function(){
function task2 (function(){
//callback of async
callback();
});
});
}, function(err){
console.log("done");
});
and your task 1 and task 2 function will take the callback as an argument something like this:
function task1(callback){
//do whatever in task1
callback();
}
function task2(callback){
//do whatever in task1
callback();
}
In this way task2 will run only when task1 is complete inside async.each

Related

Waiting for Several CSV files to load with Papaparse

I have been trying to load several CSV files before running the code on my page as it uses the data from the CSV files. I have used PAPAPARSE.js as a library to help me with this and I have come up with the following solution.
function loadData(){
console.log("Loading Data!")
loadNodeData();
loadEdgeData();
loadHeadendData();
setup();
}
function loadNodeData(){
Papa.parse("Data/CSV1.csv", {
download: true,
step: function(row) {
NodeData.push(row.data)
},
complete: function() {
console.log("Loaded Node Data!");
load1 = true;
}
});
}
function loadEdgeData(){
Papa.parse("Data/CSV2.csv", {
download: true,
step: function(row) {
EdgeData.push(row.data)
},
complete: function() {
console.log("Loaded Edge Data!");
load2 = true;
}
});
}
function loadHeadendData(){
Papa.parse("Data/CSV3.csv", {
download: true,
step: function(row) {
HeadendArr.push(row.data)
},
complete: function() {
console.log("Loaded Headend Data!");
load3=true;
}
});
}
function setup() {
intervalID = setInterval(isDataLoaded,100)
}
function isDataLoaded(){
//Attempt to setup the page, this will only work if the data iss loaded.
if(load1 && load2 && load3){
console.log("LOADED");
_setupSearchOptions();
}
}
I have this following setup, however i don't know if this is the best way to go about doing something like this. the loadData triggers on page load
<head onload="loadData()">
Is this the correct way to make the program flow?
A more modern approach is to use promises.
You can cut down the code repetition by creating one function that passes in the url and step array to push to and wrap the Papa.parse() call in a promise that gets resolved in the complete callback.
Then use Promise.all() to call _setupSearchOptions() after all three promises resolve
Something like:
function parseCsv(url, stepArr){
return new Promise(function(resolve, reject){
Papa.parse(url, {
download:true,
step: function(row){
stepArr.push(row.data)
},
complete: resolve
});
});
}
function loadData(){
const nodeReq = parseCsv("Data/CSV1.csv", NodeData);
const edgeReq = parseCsv("Data/CSV2.csv", EdgeData);
const headReq = parseCsv("Data/CSV3.csv", HeadendArr);
Promise.all([ nodeReq, edgeReq, headReq]).then(_setupSearchOptions);
}
Note that no error handling has been considered here. Presumably the Papa.parse api also has some fail or error callback that you would use to call the reject() and use a catch() with Promise.all() to handle that failure

Using the parameter error to separate failed process's in async.mapLimit

I'm using async.mapLimit to make some concurrency procedures upon an array with limit of 10:
async.mapLimit(files, 10, function(file, callback) {
... etc...
}, function(error, files) {
... etc..
});
Inside the main function, i'm executing a async operation with child_process, and if everything happen as it should, i just call the callback:
callback(null, files);
But... when something bad happens, i also NEED call the callback passing the file, because i don't want end everything, i just assign the file with a error property and call the callback:
file.error = error;
callback(null, file);
So, when the second async.mapLimit callback is fired, i have an array of files:
, function(error, files) {
console.log(files);
});
output:
[
{
name: 'file_2',
error: 'something'...
},
{
name: 'file_1',
...etc
}
]
So, i need separate the files that failed, doing:
var failedFiles = [];
var okFiles = [];
files.forEach(function(file) {
if (file.error)
failedFiles.push(file);
else
okFiles.push(file;
});
I would like to know if isn't possible to return the files that failed as an array, and access then by the parameter error of the second async.mapLimit callback.
Thanks in advance :).
async.mapLimit() will stop immediately when an iteration "returns" an error, so it's not possible to do what you want.
As an alternative, instead of using async.mapLimit() you could use async.eachLimit() and push the file objects into the respective array inside the iterator function:
var failedFiles = [];
var okFiles = [];
async.eachLimit(files, 10, function(file, callback) {
if (SOME_ERROR) {
failedFiles.push(file);
} else {
okFiles.push(file);
}
callback();
}, function(err) {
...
});

How to run second function after the first has ended in javaScript?

function addItemToCart (id_1, id_2, id_3, id_etc) {
jQuery.ajax({
});
}
function clearCart()
{
jQuery.ajax({
});
}
function buyButton()
{
clearCart();
setTimeout(function()
{
redirect to cart;
addItemToCart(283746 , 1, 1, "Months", "1294");
}, 500);
}
I'm trying to make it so that instead of using a setTimeout it waits until the clearCart() has finished loading through ajax and then it runs the functions inside the setTimeout and I can't seem to figure this out.. I tried doing a callback but I'd say that it isn't correct..
function FirstFunction(callBack)
{
clearCart();
if (callback)
{
callback();
}
}
function SecondFunction()
{
buyButton();
}
FirstFunction(SecondFunction);
If you want to run some code when an Ajax response has arrived, put it in the done or success handler function. That is what those functions are for.
Don't guess how long the request is going to take.
Need to use done or success callback to avoid any random waiting time. like below:
function clearCart()
{
return jQuery.ajax({
});
}
function buyButton()
{
clearCart().done(function () {
addItemToCart(283746 , 1, 1, "Months", "1294");
});
}

Node Mocha Chai Async - Everything Passing even when it should fail

I was attempting to teach myself to use a Testing framework for automating tests instead of having to do them by hand. After a bit of trial and error, I finally got the unit tests to start passing ... but now, my problem is everything is passing regardless of if it should or not.
Currently I have the following code:
describe('create {authName, authPW}', function() {
it('no name', function() {
init({
path: ':memory:',
callback: function() {
var arg;
arg = {};
//arg['authName'] = 'Name';
arg['authPW'] = 'Pass';
arg['callback'] = function(r) {
// r.should.equal('create error');
r.should.equal('foobar');
done();
};
create(arg);
}
});
});
});
as you can guess ... r should NOT equal 'foobar'
What am I doing wrong here?
When creating async tests with mocha you need to let him know when it is done
describe('an asynch piece of code', function() {
var foo = new bar();
it('should call the callback with a result', function( done ) {
foo.doAsynchStuff( function( result ) {
result.should.be.ok;
done();
});
});
});
If done is present as an argument on the it then mocha will wait for the done to be called. It has a timeout of 2 seconds, that if exceeded fails the test. You can increase this timeout:
it('should resolve in less than 10 seconds', function( done ) {
this.timeout( 10000 );
foo.doAsynchStuff( function( result ) {
result.should.be.ok;
done();
});
}
it('no name', function(done) {
done has to be an argument of the function passed to it()

How can I analyse my JS program to ensure a particular method is always called with 2 arguments?

We're using promises in an AngularJS project and want to ensure that the then method is always called with 2 arguments, the 2nd being an error handler, like so:
$http.get(url).then(function () {
console.log('hooray!');
}, function (error) {
console.log('boo! error');
});
We're using jshint on the project. Can that perform this analysis?
Some calls to then do not require an error handler, i.e. in a chain of handlers:
$http.get(url1).then(function () {
console.log('hooray!');
return $http.get(url2);
}).then(function () {
console.log('hooray again! all our data loaded');
}, function (error) {
console.log('boo! error in one of our 2 requests');
});
We could mark these up using jshint's /* jshint ignore:start */ comments or similar.

Categories