Will Mongo handle my service? - javascript

I've been using MongoDB with node.js and mongoose library. I decided to start using MongoDB because I found everywhere that it is the best solution for node.js applications.
Although the response times of my API are good, I'm unsure that MongoDB will handle it when scaling it.
I've noticed that most of my queries aren't enough to get all the data I need, so I rely on creating several queries and using some javascript map/reduce functions (that is what I'm afraid of).
Look at this example:
User
.find({
idol : true
})
.sort({
'metas.followers' : -1
})
.select('-password -__v -posts -email')
.skip(offset)
.limit(30)
.exec(function(err, retData)
{
promisedIdols = retData.map(function(idol)
{
return idol.withStatistics(Post, Follow, req.user);
});
idols = [];
if(promisedIdols.length == 0)
{
callback();
}
for(var i=0; i<promisedIdols.length; i++)
{
promisedIdols[i].delegate(function(result)
{
idols.push(result);
if(idols.length == promisedIdols.length)
{
callback();
}
});
}
});
I've used a map to gather an array of promises that will be resolved after running the following code:
var obj = this.toObject();
var deferred = new Promise();
Post
.find({ idol : obj._id })
.lean()
.exec(function(err, posts)
{
var postViews = 0;
var postLikes = 0;
var postShares = 0;
posts.reduce(function(prev, next)
{
postViews += next.views.length;
postLikes += next.likes.length;
postShares += next.shares.length;
}, 0);
obj.metas.postViews = postViews;
obj.metas.postLikes = postLikes;
obj.metas.postShares = postShares;
obj.metas.postCount = posts.length;
Subscription
.count({ idol : obj._id }, function(err, count)
{
obj.metas.subscribers = count;
deferred.deliver(obj);
});
});
that uses a reduce function.
I can't see this code working well on big scale. Maybe should I restructure my database? Maybe should I change my database system? Maybe I'm using MongoDB wrongly?
Experts?
Thanks.

Mongo can handle a lot, if you setup a good data model. There are a few things to keep in mind when you want to scale.
Try to avoid normalizing the data much and split it into different collections.
Data duplication is (sometimes, when used wisely) your friend, it will help you make simpler queries, populate right away. Yeah, that may mean that when you're updating data, you'll have to update in two places, but Mongo is ok with a lot of writes if you do it asynchronously (promises or not).
To your specific query, I don't see the full data model, but maybe you can use aggregation framework. That pipeline is native (C++, as opossed to mapReduce JavaScript) and will work really really fast.
Something like:
db.post.aggregate(
// First $match to reduce the dataset
{
$match: {idol : obj._id}
},
// then group and aggregate your data
{
$group: {
_id: '$idol', // group by that idol thing
postViews: {$sum: '$postViews'},
postLikes: {$sum: '$postLikes'}
},
},
// Then use project to arrange the result the way you like it
{
$project: {
_id: false, //or true if you need it
metas: {
postViews: '$postViews'
},
likeCountOfPosts: '$postLikes', // that's how you'd rename
whatIsIt: {$literal: 'a great post'}
}
}
);
You can also do a lot of conditional, groupings, sortings, winding and unwinding, mixing and shuffling the pipeline.
It's much much faster then Mongo mapReduce.

Related

How to update array inside object, inside array mongoDB

So the document contains an array of objects, each object containing it's own array. So how would I go about updating one of the elements in the array that's inside the object which is inside another array. I've read some things with $. But I don't understand completely how to use it to call a position. I know the position of the element. But I can't just say $[] because the position is defined in a variable and not a string...
I've tried doing a simple
db.collection.findOne({...}, (err, data) => {...});
and then changing the arrays in the objects in the array in there with a simple:
data.arr[x].type[y] = z; data.save().catch(err => {console.log(err)});
But it doesn't save the new values I set for for the element of the array.
Sample structure after proposed solution from #Tom Slabbaert:
Data.findOne({
userID: 'CMA'
}, (err, doc) => {
if(err) {console.log(err)}
if(doc) {
for(var i = 0; i<CMA.stockMarket.length; i++) {
if(CMA.stockMarket[i].name == data.userID) {
for(var z = 0; z<CMA.stockMarket[i].userStock.length; z++) {
if(z == company) {
var updateAmount = CMA.stockMarket[i].userStock[z]+args[1]
var updateKey = `stockMarket.${i}.userStock.${z}`
Data.updateOne({userID: 'CMA'}, {'$set': {[updateKey]: updateAmount}})
}
}
}
}
}
});
-------------------------EDIT-------------------------
So I tried changing some things around in the data base to see if that would fix the problem I was having. I modified the updated code that was provided by #Tom Slabbaert. But nothing seems to work for some reason :/ Here's what I have so far, at this point I hope it's just a syntax error somewhere. Cause this is really frustrating at this point. Note that I'm still using the for loops here to find if the info exists. And if not, push that info into the database. This might only be temporary until I find a better way / if there is a better way.
for(var i = 0; i<CMA.userStocks.length; i++) {
if(CMA.userStocks[i].name == data.userID) {
for(var z = 0; z<CMA.userStocks[i].shares.length; z++) {
//console.log(CMA.userStocks[i].shares[z].companyName)
if(CMA.userStocks[i].shares[z].companyName == args[0]) {
var updateKey = `CMA.userStocks.$[elem1].shares.$[elem2].amount`
Data.updateOne(
{userID: 'CMA'},
{
"$inc": {
[updateKey]: args[1]
}
},
{
arrayFilters: [
{
"elem1.name": data.userID,
"elem2.companyName": args[0]
}
]
}
)
purchaseComplete(); return;
}
}
CMA.userStocks[i].shares.push({companyName: args[0], amount: parseInt(args[1])})
CMA.save().catch(err => {console.log(err)});
purchaseComplete(); return;
}
}
CMA.userStocks.push({name: data.userID, shares: [{companyName: args[0], amount: parseInt(args[1])}]});
CMA.save().catch(err => {console.log(err)});
purchaseComplete(); return;
The data I'm trying to find and change is structured like the following:
And what I'm trying to change in the end is the 'amount' (which is an integer)
_id: (Not relavent in this question)
userID: 'CMA'
stockMarket: [...] (Not relavent in this question)
userStocks: [
Object: (position 0 in userStocks array)
name: 'string' (equal to data.userID in the code)
shares: [
Object: (position 0 in shares array)
companyName: 'string' (this is args[0] in the code)
amount: integer
]
]
You can just prepare the "key" ahead of time. like so:
const updateKey = `arr.${x}.type.${y}`
db.collection.updateOne(
{...},
{
"$set": {
[updateKey]: z
}
})
Mongo Playground
Using Mongo's positional operators ($ and $[]) are usually required when you don't know the position in the array and want to use a condition to update the element.
------ EDIT-----
After given your sample code you just have a minor syntax error:
var updateKey = `stockMarket.${i}.userStock.${z}`
Should just be:
var updateKey = `CMA.stockMarket.${i}.userStock.${z}`
However After seeing your code I recommend you execute the following solution which uses a single update with arrayFilters, it just cleans up the code quite a bit:
const updateKey = `CMA.stockMarket.$[elem1].userStock.${company}`;
db.collection.update(
{userID: 'CMA'},
{
"$inc": {
[updateKey]: args[1]
}
},
{
arrayFilters: [
{
"elem1.name": data.userID
}
]
})
Mongo Playground
Well I found something that worked. Apparently it didn't save the db.collection.updateMany unless I made a .then() function on the end? I have no idea why, but it's the same with an aggregate I made. (It basically does the same as a Data.findOne and save it too, but it isn't limited by the parallel save error)
Solution I found with aggregation:
<collection field> = <new data for collection field>
Data.aggregate([
{
$match: { //This is used to create a filter
['<insert field>']: <insert filter>
}
}, {
$addFields: { //This is used to update existing data, or create a new field containing the data if the field isn't found
['<collection field>']: <new data for collection field>
}
}, {
$merge: { //This is used to merge the new data / document with the rest of the collection. Thus having the same effect as a standard save
into: {
db: '<insert database name>',
coll: '<insert collection name>'
}
}
}
]).then(() => {
//After it's done, do something here. Or do nothing at all it doesn't matter as long as the .then() statement remains. I found that not having this part will break the code and make it not save / work for some reason.
}); return;
Solution I found with db.collection.updateMany
db.collection.updateMany(
{<insert field>: filter}, {$set: {'<insert field>': <new data>}}
).then(() => {
//This .then() statment in my case was needed for the updateMany function to work correctly. It wouldn't save data without it for some reason, it does not need to contain any actual info in this part. As long as it's here.
});
With this new info I could simply access and change the data that I was trying to before using the previous instructions provided by #Tom Slabbaert and my new method of actually making it save the changes made into the document.

Painful querying API with firebase?

I'm starting to learn firebase with firestore.
I have spent more hours than I would've like understanding the reference type and trying to get it to work with a simple query that references a portfolio's category.
This is the code:
try {
const portfolioSnap = await db.collection("portfolio").get();
let portfolioDoc = portfolioSnap.docs;
let categoriesRef = [];
portfolioDoc.forEach(p => {
categoriesRef.push(p.data().category.get());
});
let categories = await Promise.all(categoriesRef);
let portfolio = [];
portfolioDoc.map((p, i) => {
let portfolioObject = {
...p.data(),
category: categories[i].data().name
};
portfolio.push(portfolioObject);
});
return portfolio;
} catch (error) {
console.warn("ERROR: ", error);
}
I'm not sure if this makes sense.
I'm trying to get the category for each portfolio document but I feel this is over-engineered or I'm totally doing it the wrong way.
And this is not counting if I have references for images or files which I feel would make things... well, not pretty.
Nothing strange here. This is the way that nosql databases work (since there is no join operation, nor is there any explicit relationships between documents other than what you define).

Bulk insert in MongoDB using mongoose

I currently have a collection in Mongodb say "Collection1".
I have the following array of objects that need to be into inserted into MongoDB. I am using Mongoose API. For now, I am iterating through the array and inserting each of them into mongo.
This is ok for now, but will be a problem when the data is too big.
I need a way of inserting the data in bulk into MongoDB without repetition.
I am not sure how to do this. I could not find a bulk option in Mongoose.
My code below
myData = [Obj1,Obj2,Obj3.......]
myData.forEach(function(ele){
//console.log(ele)
saveToMongo(ele);
});
function saveToMongo(obj){
(new Collection1(obj)).save(function (err, response) {
if (err) {
// console.log('Error while inserting: ' + obj.name + " " +err);
} else {
// console.log('Data successfully inserted');
}
});
return Collection1(obj);
}
You might want to use the insertMany() method here if you're using the latest Mongoose version 4.4.X and greater, which essentially uses Model.collection.insertMany() under the hood and the driver might handle parallelizing >= 1000 docs for you.
myData = [Obj1, Obj2, Obj3.......];
Collection1.insertMany(myData, function(error, docs) {});
or using Promises for better error handling
Collection1.insertMany(myData)
.then(function(docs) {
// do something with docs
})
.catch(function(err) {
// error handling here
});
It works by creating a bunch of documents, calls .validate() on them in parallel, and then calls the underlying driver's insertMany() on the result of toObject({ virtuals: false }); of each doc.
Although insertMany() doesn't trigger pre-save hooks, it has better performance because it only makes 1 round-trip to the server rather than 1 for each document.
For Mongoose versions ~3.8.8, ~3.8.22, 4.x which support MongoDB Server >=2.6.x, you could use the Bulk API as follows
var bulk = Collection1.collection.initializeOrderedBulkOp(),
counter = 0;
myData.forEach(function(doc) {
bulk.insert(doc);
counter++;
if (counter % 500 == 0) {
bulk.execute(function(err, r) {
// do something with the result
bulk = Collection1.collection.initializeOrderedBulkOp();
counter = 0;
});
}
});
// Catch any docs in the queue under or over the 500's
if (counter > 0) {
bulk.execute(function(err,result) {
// do something with the result here
});
}
you can pass an array of objects to mongoose model create function
var Collection1 = mongoose.model('Collection1');
Collection1.create(myData,function(err){
if(err) ...
});

Why is hashtagseen[] empty after I call the addposthashtags function?

I am trying to add hashtags in the post's hashtag[] array as a object with a num:1 variable to the users hashtagseen[] array if it is not already in it else add 1 the num if the hashtag is already in the hashtagseen[] array. How do I fix my code? Here is the code, thanks in advanced.
edit: I think I am not finding post.hashtag with this.hashtag and that is why it will not go to else. Just a guess.
The user object
Accounts.createUser({
username: username,
password: password,
email: email,
profile: {
hashtagsl:[],
}
});
collections/post.js
var post = _.extend(_.pick(postAttributes, 'title', 'posttext','hashtags'), {
userId: user._id,
username: user.username,
submitted: new Date().getTime(),
commentsCount: 0,
upvoters: [], votes: 0,
});
calling it
Meteor.call('addposthashtags',this.hashtags,Meteor.user().profile.hashtagsl);
lib/usershash
Meteor.methods({
addposthashtags: function (hashtags,hashtagsl) {
//supposed to make hashtagseen a array with the names from the hashtagsl object in it
var hashtagseen = _.pluck(hashtagsl, 'name');
//supposed to run once per each hashtag in the posts array.
for (var a = 0; a < hashtags.length; a++) {
//supposed set hashtagnumber to the number indexOf spits out.
var hashnumber=hashtagseen.indexOf(hashtags[a]);
//supposed to check if the current hashtag[a] === a idem in the hashtagseen.
if(hashnumber===-1){
var newhashtag = this.hashtags[a];
//supposed to make the object with a name = to the current hashtags
Meteor.users.update({"_id": this.userId},{"$push":{"profile.hashtagsl": {name: newhashtag, num: 1}}})
} else {
var hashi = hashtagseen[hashnumber];
//supposed to ad one to the num variable within the current object in hashtagsl
Meteor.users.update({"_id": this.userId, "profile.hashtagsl.name":hashi},{"$inc":{"profile.hashtagsl.num":1}});
}
}
}
});
Your addposthashtags function is full of issues. You also haven't provided a "schema" for hashtag objects.
addposthashtags: function () {
for (a = 0; a < this.hashtag.length; a++) {
// Issue1: You're querying out the user for every iteration of the loop!?
for (i = 0; i < Meteor.user().profile.hashtagseen.length; i++) {
// Issue2: You're comparing two _objects_ with ===
// Issue3: Even if you use EJSON.equals - the `num` property wont match
// Issue4: You're querying out the user again?
if (this.hashtag[a] === Meteor.user().profile.hashtagseen[i]) {
// Issue5 no `var` statement for hashtagseeni?
// Issue6 You're querying out the user again??
hashtagseeni = Meteor.user().profile.hashtagseen[i];
//Issue7 undefined hashtagsli?
//Issue8 Calling multiple methods for the one action (eg in a loop) is a waste of resources.
Meteor.call('addseen', hashtagsli);
} else {
//Issue9 no `var` statement for newhashtag?
newhashtag = this.hashtag[a];
newhashtag.num = 1;
//Issue8b Calling multiple methods for the one action (eg in a loop) is a waste of resources.
Meteor.call('updateUser', newhashtag, function (err, result) {
if (err)
console.log(err);
});
}
}
}
}
Also, the method has similiar issues:
addseen: function (hashtagseeni) {
// Issue10: var `profile` is undefined
// Issue11: should use `this.userId`
// Issue12: hashtagseeni wouldn't match profile.hashtagseen due to "num" field.
Meteor.users.update({"_id": Meteor.userId, "profile.hashtagseen": profile.hashtagseeni}, {"$inc":{"profile.hashtagseen.$.num":1}});
}
New issues with your new set of code:
Meteor.methods({
addposthashtags: function (hashtags,hashtagsl) {
//Issue1 `hashtag` is undefined, guessing you mean `hashtags`
//Issue2 no `var` for a
for (a = 0; a < hashtag.length; a++) {
//Issue3 no `var` for i
//Issue4 Why are you looping through both?
// don't you just want to check if hashtag[a] is in hashtagsl?
for (i = 0; i < hashtagsl.length; i++) {
if (hashtags[a] === hashtagsl[i].name) {
var hashi = hashtagsl[i].name;
//supposed to ad one to the num variable within the current object in hashtagsl.
// Issue5: This query wont do what you think. Test until you've got it right.
Meteor.users.update({"_id": Meteor.userId, 'profile.hashtagsl':hashi}, {"$inc":{"num":1}});
} else {
// Issue6 `this.hashtag` isn't defined. guessing you mean `hashtags[a]`
var newhashtag = this.hashtag[a];
// Issue7 superfluous statement
var newhashtagnum = num = 1;
// Issue8 Obvious syntax errors
// Perhaps try Meteor.users.update({"_id": this.userId},{"$push":{"profile.hashtagsl": {name: newhashtag, num: 1}}})
Meteor.users.update({"_id": Meteor.userId, 'profile'},{"$addToSet":{"hashtagsl"[newhashtag]=newhashtagnum}})
};
};
};
};
});
I'd suggest you trying the following
1) Assuming that after newhashtag=hashtag[a] you get a JSON object in newhashtag variable, try replacing newhashtag:{num:1}; with newhashtag.num = 1 - this will add the num variable to the object and set the value.
1.a) For debugging purposes try adding some console.log(JSON.stringify(newhashtag)); after each of the two lines where you're setting and changing the newhashtag variable - this way you'll know exactly what you're trying to add to the mongoDB document.
2) The update to increment the views also doesn't seem to me that will work. Couple of things to note here - $set:{'profile.hashtagseen[i]':num++} - MongoDB won't be able to identify the 'i' in 'profile.hashtagseen[i]' and 'num++' is not how increments are done in Mongo.
I'd suggest you look into the $inc and to the positional update documentation of MongoDB.
Your final increment update statement will look something like
Meteor.users.update({"_id": Meteor.userId, "profile.hashtagseen": profile.hashtagseen[i]}, {"$inc":{"profile.hashtagseen.$.num":1}});
I see that executing addposthashtags is in the client, and you must to pay attention because this function will execute in minimongo and doesn't work all operations. First you try execute this operation under mongo if it's work you must to create one function inside the folder server.
Add text of the documentation of Minimongo
In this release, Minimongo has some limitations:
$pull in modifiers can only accept certain kinds of selectors.
findAndModify, aggregate functions, and map/reduce aren't supported.
All of these will be addressed in a future release. For full Minimongo
release notes, see packages/minimongo/NOTES in the repository.
Minimongo doesn't currently have indexes. It's rare for this to be an
issue, since it's unusual for a client to have enough data that an
index is worthwhile.
You try create one method on the server, with the same operation.
Server:
Meteor.methods({
updateUser: function (newhashtag) {
Meteor.users.update(this.userId,
{
$addToSet: {'profile.$.hashtagseen': newhashtag}
});
}
});
Client:
Meteor.call('updateUser',newhashtag,function(err,result){
if (err)
console.log(err);// there you can print the erro if there are
});
Minimongo doesn't support alls operation, for test you can to execute in the console for testing the method if supported. After that you can to execute the operation under mongo directly, that clears your doubts.

Best method to string together variety of DB calls in Node js

I basically need to make about 3 calls to get the data for a json object.. It basically JSON array of JSON object which have some attributes, one of which is an array of other values selected using a second query, then that one also has an array inside which is selected with another db call.
I tried using asyn.concatSeries so that I can dig down into the bottom call and put together all the information I collected for one root json object but that's creating a lot of unexpected behaviour..
Example of JSON
[
{
"item" : "firstDbCall"
"children" : [ {
"name" : "itemDiscoveredWithSecondDBCall"
"children" : [ itemsDiscoveredwith3rdDBCall]
},
]
}
]
This is really difficult using node.js. I really need to figure out how to do this properly since I have to do many of these for different purposes.
EDIT
This is the code i have. There's some strange behaviour with async.concatSeries. The results get called multiple times after each one of the functions finish for each array. So i had to put a check in place. I know it's very messy code but i've been just putting band-aids all over it for the past 2 hours to make it work..
console.log("GET USERS HAREDQARE INFO _--__--_-_-_-_-_____");
var query = "select driveGroupId from tasks, driveInformation where agentId = '"
+ req.params.agentId + "' and driveInformation.taskId = tasks.id order by driveInformation.taskId desc;";
connection.query(query, function(err, rows) {
if (rows === undefined) {
res.json([]);
return;
}
if(rows.length<1) { res.send("[]"); return;}
var driveGroupId = rows[0].driveGroupId;
var physicalQuery = "select * from drives where driveGroupId = " + driveGroupId + ";";
connection.query(physicalQuery, function(err, rows) {
console.log("ROWSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSSS");
console.log(rows);
async.concatSeries(rows, function(row, cb) {
console.log("-------------------------------SINGLE ROW-------------------------------------");
console.log(row);
if(row.hasLogicalDrives != 0) {
console.log("HAS LOGICAL DRIVES");
console.log(row.id);
var query = "select id, name from logicalDrives where driveId = " + row.id;
connection.query(query, function(error, drives) {
console.log("QUERY RETURNED");
console.log(drives);
parseDriveInfo(row.name, row.searchable, drives, cb);
});
}
else
var driveInfo = { "driveName" : row.name, "searchable" : row.searchable};
console.log("NO SUB ITEMS");
cb(null, driveInfo);
}, function(err, results) {
console.log("GEETTTTINGHERE");
console.log(results);
if(results.length == rows.length) {
console.log("RESULTS FOR THE DRIVE SEARCH");
console.log(results);
var response = {"id": req.params.agentId};
response.driveList = results;
console.log("RESPONSE");
console.log(response);
res.json(response);
}
});
});
});
};
parseDriveInfo = function(driveName, searchable, drives, cb) {
async.concatSeries(drives, function(drive,callback) {
console.log("SERIES 2");
console.log(drive);
console.log("END OF DRIVE INFO");
var query = "select name from supportedSearchTypes where logicalDriveId = " + drive.id;
connection.query(query, function(error, searchTypes) {
drive.searchTypes = searchTypes;
var driveInfo = { "driveName" :driveName,
"searchable" : searchable,
"logicalDrives" : drive
};
callback(null, driveInfo);
});
}, function (err, results) {
console.log("THIS IS ISISIS ISISISSISISISISISISISISISIS");
console.log(results);
if(results.length === drives.length) {
console.log("GOTHERE");
cb(null, results);
}
});
}
Getting good enough with async to use exactly the right combination of methods under the right circumstances takes a fair amount of experience. Most likely your case in particular can be handled with async.waterfall if its query1 then query2(dataFoundByQuery1) then query3(dataFoundByQuery2). But depending on the circumstances you need to mix and match async methods appropriately and sometimes have 2 levels - for example a "big picture" async.waterfall where some of the steps in the waterfall do async.parallel or async.series as needed. I've never used async.concat and given your needs I think you have chosen the wrong method. The workhorses are async.each, async.eachSeries, async.waterfall, and async.map, at least for the web app & DB query use cases I mostly encounter, so make sure you really have those understood before exploring the more specific convenience methods.
EDIT: This is a more in depth example based on use of the connection library you seem to be using. Please note, some of this is javascript psuedo code. Things like adding objects to the resultsArray are clearly not complete, the only thing I took time to make sure was correct is the "flow of logic" as it pertains to callbacks. Everything else is for you to implement. In order to support multiple calls to the same callback function and maintain state from call to call, the best way is to wrap the set of callbacks in a closure. This allows the callbacks to share some state with the main event loop. This allows you to pass arguments to the callbacks, without actually having to pass them as arguments, much like class variables in c++, or even globals in javascript, but we haven't poluted the global scope :)
function queryDataBase(query) {
//wrap the whole query in a function so the callbacks can share some
//variables with similar scope. This is called a closure
int rowCounter = 0;
var dataRowsFromStep2;
var resultsArray = {};
connection.query(query, dataBaseQueryStep2);
function dataBaseQueryStep2(err, rows) {
//do something with err and rows
dataRowsFromStep2 = rows;
var query = getQueryFromRow(dataRowsFromStep2[rowCounter++]);//Always zero the first time. Might need to double check rows isn't empty!
connection.query(query, dataBaseQueryStep3);
}
function dataBaseQueryStep3(err, rows) {
//do something with err and rows
if(rowCounter < dataRowsFromStep2.size) {
resultsArray.add(rows);//Probably needs to be more interesting, but you get the idea
//since this is within the same closure, rowCounter maintains it's state
var query = getQueryFromRow(dataRowsFromStep2[rowCounter++]);
//recursive call query using dataBaseQueryStep3 as it's callback repeatedly until
//we run out of rows to call it on.
connection.query(query, dataBaseQueryStep3)
} else {
//when the if statement fails we have no more rows to run queries on so return to main program flow
returnToMainProgramLogic(resultsArray);
}
}
}
function returnToMainProgramLogic(results) {
//continue running your program here
}
I personally like the above logic better than the syntax async produces... I believe the heart of your problem rests in your nested calls to async, and the fact that ASYN itself, runs the series of functions asynchronously, but in order(confusing I know). If you write your program like this, you won't have to worry about it!
I would strongly suggest using sequelize.js It provides a really powerful orm that allows you to chain queries together. It also allows you to directly load your data into js objects, write dynamic sql, and connect to many different databases. Picture ActiveRecord from the Ruby world for Node.

Categories