Recursively traverse tree in Javascript - javascript

This is super simple task to do in Java but the asynchronous nature of javascript makes this task(for me) almost impossible, at least with my knowledge now.(I'm not trying to bash javascript. Love the language!).
It's very basic. A top level tree has a parent of null in my mysql database. It's easy finding children. The children have lines available to them. The depth of the tree is variable.
private static Set<Tree> getBranches( Tree trunk ) {
Set<Tree> treeSet = new HashSet<Tree>();
if ( trunk != null ) {
if ( trunk.hasLines() ) { //queries if tree has lines. returns true or false
treeSet.add( trunk );
}
for ( Tree tree : trunk.treeList ) {
treeSet.addAll( getBranches( tree ) );
}
}
return treeSet;
}
Basically the method tests if the tree has lines available. If it does it adds all of those to a set. If not it continues until it finds lines.
The asynchronous nature of the mysql node library turns this task into hell.
Here is what I have now
function hasLines(tree_id, callback) {
var ret;
pool.query('SELECT * from pkg_line_tree where tree_id = ?', [tree_id], function (err, rows) {
if (rows.length > 0) {
ret = true;
} else {
ret = false;
}
callback(ret);
});
}
function dig(tree_id, treeArray, callback) {
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
if (rows) {
for (var i in rows) {
hasLines(rows[i].tree_id, function (t) {
if (t) {
treeArray.push(rows[i].tree_id);
} else {
treeArray.concat(dig(rows[i].tree_id, treeArray));
}
});
}
if (callback) {
callback(treeArray);
}
}
});
return treeArray;
}
var treeArray = [];
dig(52, treeArray, function (t) {
res.json(t);
});
I really just need to output all the children available in this root tree.
Please let me know if this doesn't make sense. I'll try to refactor. I'm hoping I got some kind of point across. I'd hate to use something like Fibers to get this done but I'm out of options. Thanks.

Your use of dig() isn't currently consistent:
// asynchronous with callback
dig(52, treeArray, function (t) {
res.json(t);
});
// then synchronous with `return`?
treeArray.concat(dig(rows[i].tree_id, treeArray));
Also, the concat in the last line isn't actually doing much, since it doesn't alter the array it's called on. You probably wouldn't actually want it to as dig passes around the treeArray rather than defining a new treeSet like in getBranches. So, if it did, it would append treeArray onto the end of itself each time.
You could still use concat with multiple treeSets, but you'd have to store its return value:
treeSet = treeSet.concat(subSet);
And, you'll have to replace the for loop this with an asynchronous iterator as the loop won't wait for asynchronous operations before continuing. The async library has a few options for this, if you're up for trying it.
So, with multiple treeSets, concat, and async.forEachSeries, you could try:
function dig(tree_id, callback) {
var treeSet = [];
hasLines(tree_id, function (yep) {
if (yep) {
treeSet.push(tree_id);
}
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
function each(row, next) {
dig(row.tree_id, function (subSet) {
treeSet = treeSet.concat(subSet);
next(null);
});
}
function done() {
callback(treeSet);
}
async.forEachSeries(rows, each, done);
});
});
}
dig(52, function (treeSet) {
res.json(treeSet);
});

you have to use async https://github.com/caolan/async
I have modified your dig function to use async's forEach method
function dig(tree_id, treeArray, AllDone) {
pool.query('SELECT * from tree where parent_id = ?', [tree_id], function (err, rows) {
if (rows) {
async.forEach(
rows,
function(row, callback) {
hasLine(row.tree_id, function(t){
if (t) {
treeArray.push(row.tree_id);
callback();
}
else {
dig(row.tree_id, treeArray, callback);
}
});
},
function(err) {
if (err) AllDone(err, treeArray);
else AllDone(null, treeArray);
});
}
else
AllDone(null, treeArray)
});
}
treeArray = [];
dig(52, treeArray, function(err, t) {
res.json(t);
});
assuming rows is an array.. forEach go through each row and perform hasLine, each iteration will call the callback function when it finish, and AllDone will be called when all callback functions are called. the tricky part here is the recursion, each recursive call will have a forEach loop, and it will call the AllDone method only when all callbacks are finish.
however forEach execute in parallel, so order is not perserved
I think this should work, if you don't care about order.
Edit : you can use forEachSeries to solve the order problem.

Related

Calling Recursive Function with Async Callbacks in JavaScript

Context: I'm preparing for some job interviews and brushing up on some of the computer science basics. I was implementing several algorithms for traversing binary sort trees, in particular in-order traversal, and I was able to do it just fine with synchronous functions as well as promises and the async/await keywords.
For an added set of practice, I thought porting it over to asynchronous callback functions would be a useful challenge, especially since I'm interviewing for Node.js positions. But I'm absolutely stuck on it, and while I'm better off moving on and studying other topics, this defeat has been bothering me and I keep spinning my wheels!
So consider this a fun yet challenging twist on a classic algorithm problem, not for the faint of heart!
Data So imagine you're given an array of objects, each one representing a node. A node must have a value and optionally left and right attributes that correspond to the index number of another node in the array. So something like this:
[
{"value":"root","left":1,"right":2},
{"value":"L1","right":3},
{"value":"R1"},
{"value":"R2"}
]
If you draw this out as a binary tree, it looks like:
root
/ \
/ \
L1 R1
\
\
R2
And implement in order traversal, the values should appear as `L1, R2, root, R1'.
In the synchronous solution, I have written up a couple of helper methods to to get the root node (assuming the first element of the array is always the root) and another method to find the node at a specific index. And the solution is as follows:
function SyncTree(data) {
this.data = data;
};
SyncTree.prototype.getRoot = function () {
return this.data[0];
};
SyncTree.prototype.getNode = function (index) {
return this.data[index];
};
SyncTree.prototype.orderTree = function () {
const results = [];
const inorder = node => {
if (node) {
inorder(this.getNode(node.left));
results.push(node.value);
inorder(this.getNode(node.right));
}
}
inorder(this.getRoot());
return results;
}
And for the promise based solution, I made my helper functions return a promise object instead of just the value, and I was able to handle that pretty easily because async/await makes code look synchronous anyway.
function PromiseTree(data) {
if (!Array.isArray(data)) {
throw new Error('The data must be an Array');
}
this.data = data;
};
PromiseTree.prototype.getRoot = function (callback) {
return Promise.resolve(this.data[0]);
};
PromiseTree.prototype.getNode = function (index, callback) {
return Promise.resolve(this.data[index]);
};
PromiseTree.prototype.orderTree = function () {
const results = [];
const inorder = async node => {
if (node) {
const left = await this.getNode(node.left);
await inorder(left);
results.push(node.value);
const right = await this.getNode(node.right);
await inorder(right);
}
}
return this.getRoot().then(async root => {
await inorder(root);
return Promise.resolve(results);
});
}
For the callback challenge, I wrote some simple logic that would make it return a value immediately or delayed at random. I know in the real world, using callback functions for this use case would be absolutely retarded and the synchronous method would be preferred, but I'm banging my head on the wall and I'm stuck. So far, this is what I've got, but it returns inconsistent results because certain callback functions are firing off before others.
function CallbackTree(data) {
if (!Array.isArray(data)) {
throw new Error('The data must be an Array');
}
this.data = data;
};
function inconsistentCallback(value, callback) {
if (Math.random() > 0.5) {
setImmediate(function () {
callback(null, value);
});
} else {
callback(null, value);
}
}
CallbackTree.prototype.getRoot = function (callback) {
inconsistentCallback(this.data[0], callback);
};
CallbackTree.prototype.getNode = function (index, callback) {
inconsistentCallback(this.data[index], callback);
};
CallbackTree.prototype.orderTree = function (callback) {
const results = [];
const inorder = (node, cb) => {
if (node) {
results.push(node.value)
this.getNode(node.left, (err, left) => {
this.getNode(node.right, (err, right) => {
inorder(node.left, cb)
inorder(node.right, cb)
cb()
})
})
}
}
this.getRoot((err, root) => {
inorder(root, () => {
if (results.length === this.data.length) {
callback(null, results)
}
});
});
}

Returning results with callbacks

I'm trying to query a database, then make an array of objects from the results, turn them into a JSON object.
I'm not used to Javascript's asynchronous nature and I'm really confused about how to implement something that needs to work synchronously. I know I probably need to use callbacks, but after looking at many tutorials I'm only left more confused.
This is the code without callbacks:
var foreignTable = (tablename,idArr)=>{
var dataArray = [];
//call a query for each of the ids
var objectToAdd;
for(var id of idArr){
objectToAdd = queryForeignTable(tablename,id);
dataArray.push(objectToAdd);
}
return dataArray;
connection.end();
};
var queryForeignTable = (tablename,id)=>{
connection.query("SELECT * FROM "+tablename+" WHERE id="+id, function (error, results, fields) {
if(error)throw error;
var objectToAddToArray={};
//Go through each field in a result and make the object
for(packet of fields){
var label = packet.name;
objectToAddToArray[label] = results[0][label];
}
return objectToAddToArray;
});
};
var arrayOfDrivers = foreignTable("driver",[1,2]);
outputJson["drive"]=arrayOfDrivers;
console.log(outputJson); // { drive: [ undefined, undefined ] }
I attempted foreignTable(tablename, idArr, callback) with the callback calling queryForeignTable with no luck.
Can someone explain how I can get this piece of code working with callbacks?
A callback function is a function passed into another function as an argument, which is then invoked inside the outer function to complete some kind of routine or action.
from MDN: Callback function
Callbacks are a way of telling a function what to do next, as in "after you're done, run this function".
For example:
first = function (callback) {
console.log("First!");
callback();
}
second = function () {
console.log("Second!");
}
first(second);
Will produce:
First!
Second!
You can also use anonymous function to product the same result:
first(function() {
console.log("Second!")
});
Regarding the specific example from your question, you are correct that you need to use callbacks a bit differently. Rather than using a return statement in each of your two functions, you'll need to use a callback. connection.query is asynchronously coming back with your results as results, but you can't return them to your queryForeignTable function. Instead, give queryForeignTable a callback function to run. The same idea goes for your foreignTable function.
I'm not connected to your database, obviously, so I stubbed a DB connection and simplified what you're trying to do, but it should look something like this:
// Stubbed DB connection
var connection = {};
connection.query = (id, cb) => {
var results = [
{
id: id,
name: 'Name of ' + id,
},
];
cb(null, results);
};
var foreignTable = (ids, cb) => {
var data = [];
for (var i = 0; i < ids.length; i++) {
queryForeignTable(ids[i], (error, obj) => {
data.push(obj);
if (i == ids.length - 1) {
cb(null, data);
}
});
}
};
var queryForeignTable = (id, cb) => {
connection.query(id, (error, results) => {
if (error) {
cb(error, null);
}
cb(null, results[0]);
});
};
foreignTable([1, 2], (error, data) => {
if (error) {
console.error(error);
}
console.log(data);
});
That produces:
[ { id: 1, name: 'Name of 1' }, { id: 2, name: 'Name of 2' } ]
In essence, when you have an urge to return some value(s) from a function in an asynchronous way, give the function a callback parameter, then invoke that callback with your return values.
You can run the code here: https://repl.it/K0YI/3
When you have an asynchronous call, like connection.query(statement, callback), then whatever you want to do with the results of that call needs to be done within the callback.
Bear in mind that async functions don't return the final value that you generally want (usually they return undefined). Instead of using a return value, you pass a callback as a way of saying, "when you're finished, carry on and do this with the results", aka. continuation-passing style.
One of the challenges in your code is that you're issuing separate queries for each ID, and then aggregating the results into an array of responses. This means you'll need to check when all the queries have completed, and only then proceed to display the final results.
Here's your example, re-written, commented, and simplified. Hopefully this helps to explain how the control flow works.
// Read row IDs 1 and 2 from the "driver" table.
readTable("driver", [1, 2], displayData);
// Print the results.
function displayData (arrayOfDrivers) {
console.log(arrayOfDrivers);
}
// Read all rows matching IDs in `idArray` from `tableName`,
// put results into an array, and finally invoke `callback`.
function readTable (tablename, idArray, callback) {
var resultsArray = [];
// Queue up all the async queries.
for (var id of idArray){
queryTable(table, id, handleResponse);
}
// A query finished, so handle the result.
function handleResponse (error, results, fields) {
if (error) {
throw error;
}
// Add the query result to array of results.
resultsArray.push(results[0]);
// Check if all queries are done.
if (resultsArray.length === idArray.length) {
// Invoke the callback with the resultsArray.
// The callback is in fact the `displayData` function.
callback(resultsArray);
}
}
}
// Execute a query, using the `cb` callback to handle the response.
function queryForeignTable (tablename, id, cb) {
var query = "SELECT * FROM " + tablename + " WHERE id=" + id;
connection.query(query, cb);
}
Note that the handleResponse function is defined within the scope of the readTable function, so it can access the variables in readTables scope, such as resultsArray and callback.
Hope that helps.

sync independet callbacks result

I am searching for an elegant way to sync indepentent callbacks result invoked in unknown order.
function callback1() {
var result;
};
function callback2() {
var result;
};
//When done then call
function success(res1, res2) {
// do whatever
}
I know I can do something like:
var res = {};
var dfd = $.Deferred();
function callback1() {
var result;
res.res1 = result;
(res.res1 && res.res2) && (dfd.resolve(res));
};
function callback1() {
var result;
res.res2 = result;
(res.res1 && res.res2) && (dfd.resolve(res));
};
dfd.done(function(result){
// do whatever
});
but I would appreciate if somebody comes up with more elegant solution
If you return promises (builtin promises, not jQuery deferreds) and you don't care about order, then you can use Promise.all:
function callback1() {
return Promise.resolve(1)
}
function callback2() {
return Promise.resolve(2)
}
var ps = [callback1(), callback2()]
function add(x, y) {
return x + y
}
Promise.all(ps).then(function(result) {
return result.reduce(add)
}).then(console.log) // => 3
If you want to sequence them you can do it in such a way that you can apply a curried function that expects as many arguments as resolved promises there are by lifting it into the promise world. In other words:
function apply(pa, pf) {
return pf.then(function(f) {
return pa.then(f)
})
}
function lift(f, ps) {
return ps.reduce(function(pa, pb) {
return apply(pb, pa)
}, Promise.resolve(f))
}
function add(x) {
return function(y) {
return x + y
}
}
lift(add, ps).then(console.log) //=> 3
You can also sequence them in such a way that you don't need a curried function, by collecting the results in an array first then reducing it:
function sequence(ps) {
return ps.reduceRight(function(pa, pb) {
return pa.then(function(a) {
return pb.then(function(b) {
return [b].concat(a)
})
})
}, Promise.resolve([]))
}
function add(x, y) {
return x + y
}
// This looks similar to the Promise.all approach
// but they run in order
sequence(ps).then(function(result) {
return result.reduce(add)
}).then(console.log) // => 3
There are libraries that do this, such as the async library, but here's a "from scratch" solution. I'm also avoiding promises to avoid overwhelming you, but you should really read about them as they are the most elegant solution, albeit complicated for first timers.
function runInParallel(jobs, done) {
// Store all our results in an array.
var results = [];
// If one job fails, set this to true and use it to
// ignore all job results that follow.
var failureOccurred = false;
// Iterate over each of our registered jobs.
jobs.forEach(function (runJob, index) {
// Create a jobDone callback to pass to the job.
var jobDone = function (err, result) {
// If another job failed previously, abort processing
// this job's result. We no longer care.
if (failureOccurred) return;
// If this job passed in an error, set failure to true
// and pass the error to the final done callback.
if (err) {
failureOccurred = true;
done(err);
return;
}
// If we made it this far then push the job result into
// the results array at the same position as the job in
// the jobs array.
results[index] = result;
// If the results array contains as many results as the
// jobs array had jobs then we have finished processing
// them all. Invoke our done callback with an array of
// all results.
if (results.length === jobs.length) {
done(null, results);
}
};
// Begin the job and pass in our jobDone callback.
runJob(jobDone);
});
}
This will call all of your job functions in the array, passing in a jobDone callback the job should call when finished. If any job passes an error in then the function will immediately invoke the result callback with the error and ignore everything else. If the jobs succeed then you'll end up with an array of job results in the same positions as the jobs were in the jobs array. Simply modify your job functions to accept the jobDone callback.
var jobs = [
function job1(done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
},
function job2(done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
}
];
runInParallel(jobs, function (err, results) {
if (err) {
console.error(err);
return;
}
// results[0] = jobs[0] result
// results[1] = jobs[1] result
// etc...
});
Instead of an array of jobs you could modify this code to accept an object with property names. Then instead of assigning the results to the same position as the jobs in the jobs array you could assign the results to an object using the same property names.
Example (without comments this time):
function runInParallel(jobs, done) {
var results = {};
var failureOccurred = false;
Object.keys(jobs).forEach(function (jobName) {
var jobDone = function (err, result) {
if (failureOccurred) return;
if (err) {
failureOccurred = true;
done(err);
return;
}
results[jobName] = result;
if (results.length === jobs.length) {
done(null, results);
}
};
jobs[jobName](jobDone);
});
}
Then you can consume it like this:
var jobs = {
job1: function (done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
},
job2: function (done) {
try {
var result;
done(null, result);
} catch (err) {
done(err);
}
}
};
runInParallel(jobs, function (err, results) {
if (err) {
console.error(err);
return;
}
// results.job1 = job1 result
// results.job2 = job2 result
// etc...
});
The parallel function in the async library does almost exactly what we've done above. It even accepts an array of jobs or an object of named jobs like we did :)
Assuming your tasks (callback1() and callback2()) are synchronous, you might choose to write a reusable generalisation of the code in the question, in the form of a function that returns a function, trapping a couple of private vars in a closure :
function resultAggregator(n, fn) {
var results = {},
count = 0;
return function(id, res) {
count++;
results[id] = res;
if (count == n) {
fn(results);
}
}
}
So after calling resultAccumulator(), you have a function that can kept in-scope of other functions or passed to other parts of your code base. It makes no assumptions about ids or the nature of the results, except that they are synchronously derived. It will fire its callback when n results have been delivered.
var myResults = resultAggregator(2, function(results) {
// do whatever;
});
//The following commands may be in different parts of your code base
myResults('id1', synchTask1());
...
myResults('id2', synchTask2());
...
myResults('id3', synchTask3());
//The second tasks to deliver its data (ostensibly `synchTask1()` and `synchTask2()`, but not necessarily) will trigger the callback.
Demo
This is just one way to perform result aggregation. You might do something different depending on the exact scenario. Here's a slightly different formulation which records the order in which the results arrived :
Demo
Whatever you write, Deferreds/Promises are not necessary for the aggregation of synchronously derived data.
However, if any one task is, or may be, asynchronous then you may need a promise aggregator, eg jQuery.when() or Promise.all(), somewhere in the pattern.

Node MySql Callback for Multiple Queries

I ran into an issue whilst attempting to create the logic to add rows to a new table I made on my MySql database. When adding a row I need to query the database 4 times to check other rows and to then add the correct value to the new row. I am using node.js and the mysql module to accomplish this. While coding I ran into a snag, the code does not wait for the 4 queries to finish before inserting the new row, this then gives the values being found a value of 0 every time. After some research I realize a callback function would be in order, looking something like this:
var n = 0;
connection.query("select...", function(err, rows){
if(err) throw err;
else{
if(rows.length === 1) ++n;
}
callback();
});
function callback(){
connection.query("insert...", function(err){
if(err) throw err;
});
}
Note: The select queries can only return one item so the if condition should not effect this issue.
A callback function with only one query to wait on is clear to me, but I become a bit lost for multiple queries to wait on. The only idea that I had would be to create another variable that increments before the callback is called, and is then passed in the callback function's arguments. Then inside the callback the query could be encapsulated by an if statement with a condition of this being the variable equaling the number of queries that need to be called, 4 for my purposes here. I could see this working but wasn't sure if this sort of situation already has a built in solution or if there are other, better, solutions already developed.
You need async (https://github.com/caolan/async). You can do a very complex logic with this module.
var data = {} //You can do this in many ways but one way is defining a global object so you can add things to this object and every function can see it
firstQueryFunction(callback){
//do your stuff with mysql
data.stuff = rows[0].stuff; //you can store stuff inside your data object
callback(null);
}
secondQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
thirdQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
fourthQueryFunction(callback){
//do your stuff with mysql
callback(null);
}
//This functions will be executed at the same time
async.parallel([
firstQueryFunction,
secondQueryFunction,
thirdQueryFunction,
fourthQueryFunction
], function (err, result) {
//This code will be executed after all previous queries are done (the order doesn't matter).
//For example you can do another query that depends of the result of all the previous queries.
});
As per Gesper's answer I'd recommend the async library, however, I would probably recommend running in parallel (unless the result of the 1st query is used as input to the 2nd query).
var async = require('async');
function runQueries(param1, param2, callback) {
async.parallel([query1, query2, query3(param1, param2), query4],
function(err, results) {
if(err) {
callback(err);
return;
}
var combinedResult = {};
for(var i = 0; i < results.length; i++) {
combinedResult.query1 = combinedResult.query1 || result[i].query1;
combinedResult.query2 = combinedResult.query2 || result[i].query2;
combinedResult.query3 = combinedResult.query3 || result[i].query3;
combinedResult.query4 = combinedResult.query4 || result[i].query4;
}
callback(null, combinedResult);
});
}
function query1(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query1 = result;
callback(null, interimResult);
});
}
function query2(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query2 = result;
callback(null, interimResult);
});
}
function query3(param1, param2) {
return function(callback) {
dataResource.Query(param1, param2, function(err, result) {
var interimResult = {};
interimResult.query3 = result;
callback(null, interimResult);
});
}
}
function query4(callback) {
dataResource.Query(function(err, result) {
var interimResult = {};
interimResult.query4 = result;
callback(null, interimResult);
});
}
Query3 shows the use of parameters being 'passed through' to the query function.
I'm sure someone can show me a much better way of combining the result, but that is the best I have come up with so far. The reason for the use of the interim object, is that the "results" parameter passed to your callback is an array of results, and it can be difficult to determine which result is for which query.
Good luck.

What is the right way (nested functions or...)

Are both my examples the same in terms of functionality considering the fact that in error handeling I'm terminating by res.json(400, err)? Also I would like to know that in my second example the second async.each always run after the first async.each, so using results1 in the second async.each is safe? Sorry I'm new to Node and async!
Example1: where I'm using the results of each async.each in the last block as an input of the other async.each:
var results1 = {};
var results2 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
async.each(results1, function (item, callback) {
//Do something here and add some data to results2
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results2);
}
});
}
});
or Example2: where I have separate async.each blocks
var results1 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
}
});
var results2 = {};
async.each(results1, function (item, callback) {
//Do something here and add some data to results2
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results2);
}
});
UPDATED:
Since the second approach is not right way and it is not guaranteed that the second async.each runs after the first one the problem is: Does it mean I cannot have a simple for loop like the following example either? If yes, it is easy to change this one to async.each, but the problem is this one is recursive and that's make it complicated! If this needs to be async as well and not a for loop, do you know how I can have this recursive functionality here?
var results1 = {};
var results2 = [];
var results3 = {};
async.each(inputs, function (input, callback) {
//Do something here and add some data to results1
callback();
}, function (err) {
if (err) {
//Handeling error
} else {
// So in this case that I need to have nested function, does it mean I cannot have a simple for loop like this as well?
// If yes, it is easy to change this one to async.each, but the problem is this one is recursive and that's make it complicated! If this needs to be async as well, do you know how I can have this recursive functionality here?
for (var input in inputs) {
inferFromUnion(inputs[input], results1);
results2.push(inputs[input]);
}
async.each(results2, function (item, callback) {
//Do something here and add some data to results2
}, function (err) {
if (err) {
//Handeling error
} else {
console.log("Final result", results3);
}
});
}
});
// Here just checking each object schema and if they are missing any fields from results1 we add that field with a value of null
function inferFromUnion(obj, allFields) {
Object.keys(allFields).forEach(function (key) {
if (lodash.isUndefined(obj[key])) {
if (lodash.isPlainObject(allFields[key])) {
obj[key] = {};
inferFromUnion(obj[key], allFields[key]);
} else {
obj[key] = null;
}
}
});
}
The first example is the way to go, if you want to use results of the first bunch of calls in the second bunch. The second example won't work, because the second async.each() is guaranteed to run before the callbacks bound to your asynchronous operations.
Asynchronous recursion with loops is very much possible:
(function doSomeAsyncRecursion (results) {
async.each(someItems, function (item, callback) {
// ...
}, function () {
if (results /* ... (are incomplete) */) {
doSomeAsyncRecursion(results);
} else {
// ... (results are complete now, do something with them)
}
});
})(/* initial value of results */);
These two examples are different in desing. First example will run second async after first async is successful. But second example will run second async everytime, if theres an error or not.

Categories