Sending multiple mail in Nodejs - javascript

A web app I'm building will send out invoices to clients every third month. This will be a scheduled event that is run in the middle of the night, but under development I have put this code into a route so I can test it.
In short i want the code to do the following.
QUery all unsent invoices from DB.
Make a call to Mandrill for each invoice (In this call I'm also invoking a function creating a Mandrill message object from the invoice).
For every message Mandrill send, Update the DB invoice sent: true.
When all invoices are sent, make a final callback in the async.waterfall
The code below works. but i have some concerns regarding the _.each.
invoices.post('/invoices/send/', function(req, res, next) {
async.waterfall([
// Query all unsent invoices
function(callback) {
db.invoices.find({sent: false}).toArray(callback);
},
// Send all unsent invoices
function(invoices, callback) {
if (invoices.length === 0) {
var err = new Error('There are no unsent invoices');
err.status = 400;
return next(err); //Quick escape if there are no matching invoice to process
}
// Make a call to Mandrill transactional email service for every invoice.
_.each(invoices, function(invoice) {
mandrillClient.messages.sendTemplate({template_name: "planpal-invoice", template_content: null, message: mandrillClient.createInvoiceMessage(invoice)}, function(sendResult) {
console.log(sendResult);
db.invoices.updateById(invoice._id, {$set: {sent: true}}, function(err, saveResult) {
console.log(saveResult);
});
}, function(err) {
return next(err);
});
});
callback(null, 'done');
}
],
function(err, result) {
if (err) {
return next(err);
}
res.json(result);
});
});
I'm thinking I should use async.eachLimit instead.... but I dont know how to write it.
I have no idea what i should set the limit to, but I guess several parallel request would be better than running all mandrill request in serie like above, am I wrong? EDIT _.each run the callbacks in parallel. The difference from a async.each is that I dont get a "final callback"
Conclusion: Should i use a async.eachLimit above? If Yes, what is a good limit value?

I think you can use the https://github.com/caolan/async#each function.
it will execute the queries in parallel too

Related

How to avoid OVER_QUERY_LIMIT nodejs

I'm hitting this error every time when I want to create new place in my database, I was trying implement setTimeout() to avoid this case, but I'm not suer if I doing this properly. Any advise ?
router.post("/",middleware.isLoggedIn ,upload.single("image"),function(req, res){
geocoder.geocode(req.body.place.location, function(err, data){
while(status == google.maps.GeocoderStatus.OVER_QUERY_LIMIT)
{
setTimeout(3000);
}
if ( err){
req.flash("error", err.message);
console.log("error");
return res.redirect("back");
}
req.body.place.lat = data[0].latitude;
req.body.place.lng = data[0].longitude;
//cloudinary configuration
cloudinary.uploader.upload(req.file.path, function(result){
// add cloudinary url for the image to the campground object under image property
req.body.place.image = result.secure_url;
// add author to campground
req.body.place.author = {
id: req.user._id,
username: req.user.username
};
Place.create(req.body.place, function(err, newlyCreated){
if(err){
res.send(err);
}else{
res.redirect("/");
}
});
});
});
});
From GOOGLE documentation concerning query limits
"Upon receiving a response with status code OVER_QUERY_LIMIT, your application should determine which usage limit has been exceeded. This can be done by pausing for 2 seconds and resending the same request. If status code is still OVER_QUERY_LIMIT, your application is sending too many requests per day. Otherwise, your application is sending too many requests per second."
Your first step needs to be the 2 second timeout to determine the cause. If it is too many per second, reduce until you don't get the error any more.
Cd&

Node.js and Heroku

Getting started with Node.js and Heroku; I am trying to make sense of the following code, in order to build something of my own:
app.get('/db', function (request, response) {
pg.connect(process.env.DATABASE_URL, function(err, client, done) {
client.query('SELECT * FROM test_table', function(err, result) {
done();
if (err)
{ console.error(err); response.send("Error " + err); }
else
{ response.render('pages/db', {results: result.rows} ); }
});
});
});
Where can I find a tutorial or some comments or explanations for that?
Even though I can do some guessing, a good deal of this code is pretty mysterious.
Currently my main concerns are:
What happens if I change the SQL query, replacing it by 'SELECT
count(*) FROM test_table'? How do I then render the result?
What does "done();" do? Is it something I can modify or make use
of?
The parameter "request" is never used. Can it be used for
something at some point?
Before handling heroku, you should first look at tutorials about web application in node.js which will answers your last question.
You can see how works express.js, a web framework.
Then look at node-postgre documentation. You will find your answers about the second question here :
//this initializes a connection pool
//it will keep idle connections open for a 30 seconds
//and set a limit of maximum 10 idle clients
var pool = new pg.Pool(config);
// to run a query we can acquire a client from the pool,
// run a query on the client, and then return the client to the pool
pool.connect(function(err, client, done) {
if(err) {
return console.error('error fetching client from pool', err);
}
client.query('SELECT $1::int AS number', ['1'], function(err, result) {
//call `done()` to release the client back to the pool
done();
if(err) {
return console.error('error running query', err);
}
console.log(result.rows[0].number);
//output: 1
});
});
And finanlly, why don't you just log result output after changing the SQL query and look what you get ?

Node Express 4 send a response after multiple API calls

I am building a NodeJS server using Express4. I use this server as a middleman between frontend angular app and 3rd party API.
I created a certain path that my frontend app requests and I wish on that path to call the API multiple times and merge all of the responses and then send the resulting response.
I am not sure how to do this as I need to wait until each API call is finished.
Example code:
app.post('/SomePath', function(req, res) {
var merged = [];
for (var i in req.body.object) {
// APIObject.sendRequest uses superagent module to handle requests and responses
APIObject.sendRequest(req.body.object[i], function(err, result) {
merged.push(result);
});
}
// After all is done send result
res.send(merged);
});
As you can see Im calling the API within a loop depending on how many APIObject.sendRequest I received within request.
How can I send a response after all is done and the API responses are merged?
Thank you.
Check out this answer, it uses the Async module to make a few requests at the same time and then invokes a callback when they are all finished.
As per #sean's answer, I believe each would fit better than map.
It would then look something like this:
var async = require('async');
async.each(req.body.object, function(item, callback) {
APIObject.sendRequest(item, function(err, result)) {
if (err)
callback(err);
else
{
merged.push(result);
callback();
}
}
}, function(err) {
if (err)
res.sendStatus(500); //Example
else
res.send(merged);
});
First of all, you can't do an async method in a loop, that's not correct.
You can use the async module's map function.
app.post('/SomePath', function(req, res) {
async.map(req.body.object, APIObject.sendRequest, function(err, result) {
if(err) {
res.status(500).send('Something broke!');
return;
}
res.send(result);
});
});

ExpressJS response to POST is undefined after accessing MongoDB?

I'm making a web application using the MEAN framework and MVC design pattern. I am trying to perform a POST request from the Angular front-end for finding a document in my server-side MongoDB (version 2.4.9). The console logs show that the query is successful, but when I try to send the response back to the client, the query result is undefined.
I understand that NodeJS is asynchronous and uses callbacks, but I am having trouble understanding what is wrong with my code. I tried using returns and callbacks but I can't get it working. I'm confused how to use the controller to access the model and have the controller ultimately send the response.
Here is my code to connect to the database (model):
module.exports = {
readDocument : function(callback, coll, owner) {
// Connect to database
MongoClient.connect("mongodb://localhost:27017/tradingpost", function(err, db) {
if (err) {
console.log("Cannot connect to db (db.js)");
callback(err);
}
else {
console.log("Connected to DB from db.js: ", db.databaseName);
//Read document by owner
// Get the documents collection
var collection = db.collection(coll);
// Find document
collection.find({owner: owner}).toArray(function (err, result) {
if (err) {
console.log(err);
} else if (result.length) {
console.log('Found:', result);
} else {
console.log('No document(s) found with defined "find" criteria!');
}
// Close connection
db.close();
return callback(result);
});
}
})
}}
And here is my controller that sends the response:
var model = require('../models/db');
exports.sendRecentPosts = function (req,res) {
// Connect to the DB
// Run query for recent posts
// Close the connection
// Send the data to the client
var result = model.readDocument(dbCallback, "gs", "Mana");
res.end( result );
};
Client's post request:
// Use post for secure queries
// Need recent posts for display
$http.post('/recent').
success(function(responseData) {
$scope.testValue = responseData;
}).
error(function(responseData) {
console.log('Recent posts POST error. Received: ', responseData);
});
Snippet for my express route:
var goodsServices = require('../controllers/gs-server-controller.js');
app.post('/recent', goodsServices.sendRecentPosts);
I have been struggling with this for a long time and searched the forum for solutions but could not find any. Thanks for any feedback.
I do not know why this question has not been answered yet. When I faced the same problem, I learnt that the response to all DB queries are returned after the DB transaction is complete. Try placing db.close() within the success callback response of the find() call.

Express | Making multiple http requests asynchronously

I am currently building a small node application that makes a few api calls and renders a webpage with charts on it. I'm using express and jade as the render engine.
The problem is that I'm quite new to javascript and I don't know how to scheme out my http requests so I can pass an object of variables I got from the api (http get) when there is more than one request. I don't know how to map it out to make a single object and send it to the jade rendering engine.
Here is what I have so far :
app.get('/test', function(req, res) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
});
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
});
res.render('test', data);
});
As it is right now, the page renders and the requests are not done yet, and if I place res.render inside one of the request, I can't access the other's data.
So what I want is a way to set it up so I can have multiple api calls, then make an object out of some elements of what is returned to me from the rest api and send it to Jade so I can use the data on the page.
You probably want to use async to help with this. async.parallel is a good choice for something simple like this:
app.get('/test', function(req, res) {
async.parallel([
function(next) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
next(null, firstData);
});
},
function(next) {
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
next(null, secondData);
});
}], function(err, results) {
// results is [firstData, secondData]
res.render('test', ...);
});
});
The first argument to those functions next should be an error if relevant (I put null) - as soon as one is called with an error, the final function will be called with that same error and the rest of the callbacks will be ignored.
You can async parallel.
async.parallel([
function(callback){
// Make http requests
// Invoke callback(err, result) after http request success or failure
},
function(callback){
// Make http requests
// Invoke callback(err, result) after http request success or failure
}
],
// optional callback
function(err, results){
// the results array will be array of result from the callback
});
The reason your page renders is the callbacks haven't "called back" yet. To do what you want, you would need to do something like:
app.get('/test', function(req, res) {
apiRequestGoesHere(name, function(error, profile) {
//Get some data here
anotherApiRequest(tvshow, function(error, list) {
//Get some data here
res.render('test', data);
});
});
});
This strategy leads to what is known as "pyramid code" because your nested callback functions end up deeper and deeper.
I would also recommend the step library by Tim Caswell. It would make your code look something like:
var step = require('step');
app.get('/test', function(req, res) {
step(
function () {
apiRequestGoesHere(name, this)
},
function (error, profile) {
if error throw error;
anotherApiRequest(tvshow, this)
},
function done(error, list) {
if error throw error;
res.render('test', list)
}
)
});
You could also use the group method to make the calls in parallel and still maintain the sequence of your callbacks.
Gl,
Aaron

Categories