node.js design pattern for creating db connection once - javascript

I am looking for help with a design pattern for creating a database connection in my node.js application.
It seems obvious to do:
module1:
var db;
exports.get_db = function(callback) {
if (db == null) {
dblibrary.create(connection_params, function(error, conn) {
if (error == null) {
db = conn;
callback(null, db);
}
});
} else {
callback(null, db);
}
};
module2:
exports.do_something = function () {
module1.get_db(function (err, conn) {
if (err == null) {
// continue using query
}
});
};
It seems painful to have to penalize every single person who wants to get the db connection with the requirement of using a callback.
I could do this:
module1:
var db;
dblibrary.create_connection(connection_params, function (err, conn) {
if (err != null) {
console.log("can't create connection");
console.log(err);
process.exit();
} else {
db = conn;
}
});
exports.get_db = function() {
return db;
};
This makes it so that getting the db connection is simple and fast, but means we have to "wait" at node startup time for the connection to be established.
Which is the better design? Is there a better way of doing things?

mydb.js module:
var db
exports.db = function() {
if (db === null) {
db = dblibrary.createClient()
}
return db
}
Other modules:
var db = require('mydb').db()
...
db.query(...)
This creates the DB client instance once at startup. I like this solution because the creation code is encapsulated in a separate module and the other modules can get access to the client with one require() statement.

Best answer I've seen for this is:
in start.js:
function init_done() {
app.listen(8080);
}
init_databases(init_done);
in databases.js:
init_databases(init_done_cb) {
db.create_async(/* connect data */ , function (err, res) {
if (err == null) init_done_cb();
});
}
This way you can do the async startup of the database server without that awkward / dangerous waiting period.

I wrote connect-once just for solving this kind of problems. There are two main goals, that are achived by this module:
Connection should be initialized before request arrives
Connection should be initialized once, even there are multiple requests coming in at the same time
You can look at express-mongo-db and express-mongoose-db as examples of usage.

Related

Searching for best practice to use one mongodb connection in multiple javascript-files

At the moment, I develop a node.js REST webservice with express. I used MongoDB + Mongoose to establish a database.
Now, I have the problem, that I can only use the db connection in the file where I established the connection. I found a solution to use the connection also in other files by "module.exports" the _db variable. But I don't know, if this is the best practise. Here is my code:
databaseManager.js
// Establish a connection to the database.
mongoose.Promise = global.Promise
mongoose.connect('mongodb://'+cfg.db.ip+':'+cfg.db.port+'/'+cfg.db.name)
var _db = mongoose.connection
_db.on('error', console.error.bind(console, 'DB connection error'))
_db.once('open', function()
{
console.log("DatabaseM: Connected to the database")
})
[...]
module.exports =
{
db : _db,
}
otherFile.js
var database = require('./databaseManagement')
[...]
database.db.collection('users').findOne({ name: "ashton"}, function(err, user)
{
if (err) return callback(consts.ERROR_DB, null)
if (!user) return callback(consts.WARN_DB_NO_CLIENT)
callback(null, user)
})
It works great. But there may be a risk that I do not see?
Thanks a lot :-)
In your app.js file :
var url="mongdb:\\localhost:27017\dbname";
mongoose.connect(url); //it open default connection for mongodb and is handled by mongoose
Now perform all your task whatever you want :
mongoose.connection.on('connected', function () {
console.log('Mongoose default connection open to ' + dbURI);
});
Bring all your database model in app.js file like as such:
var model1 = require('./models/model1');
model1.js
var mongoose = require('mongoose');
var data = new mongoose.Schema({
name:{type:String, required:true}
});
module.exports = mongoose.model('collectionName', data);
Now, when all your tasks are over. Simply close default connection like this :
mongoose.connection.on('disconnected', function () {
console.log('Mongoose default connection disconnected');
});
If any error occurs in connection handle it like this :
mongoose.connection.on('error',function (err) {
console.log('Mongoose default connection error: ' + err);
});
If node service exits then close connection usig this code
process.on('SIGINT', function() {
mongoose.connection.close(function () {
console.log('Mongoose default connection disconnected through app termination');
process.exit(0);
});
});

LoopBack: cannot call method 'post' of undefined

I am new to loopback and node.js.
I have created two models: Rating and RatingsAggregate
using the loopback explorer, I can query and post against the API just fine.
I am try to setup some basic business logic so I am editing the file Rating.js in common/models
Here is the content of it:
module.exports = function(Rating) {
Rating.afterRemote('**', function(ctx, inst, next) {
var loopback = require('loopback');
var app = loopback();
var ratingsaggregate = app.models.ratingsaggregate;
ratingsaggregate.post({"source":"foobar","restaurantID":"foobar","itemMenuName":"foobar","itemSectionName":"foobar","itemName":"foobar","nRatings1":123,"nRatings2":123,"nRatings3":123,"nRatings4":123,"nRatings5":123,"hasImage":true,"imageSize":123,"latestImageRatingID":"foobar","imageCount":123,"lastUpdated":"foobar"}, function(err, response) {
if (err) console.error(err);
next();
});
});
};
I can load my API, but whenever I run a get statement against it, I get this error:
TypeError: Cannot call method 'post' of undefined
My guess is that somehow ratingsaggregate never gets a value... but I don't know what I am doing wrong. Obviously this is not the end state of my business logic, but I am trying some basic CRUD right now between two models
And... here is the answer. There was a getModel function hidden in the documentation
module.exports = function(Rating) {
Rating.afterRemote('create', function(ctx, inst, next) {
var loopback = require('loopback');
var ratingsaggregate = loopback.getModel('ratingsaggregate');
ratingsaggregate.create({"source":"foobar","restaurantID":"foobar","itemMenuName":"foobar","itemSectionName":"foobar","itemName":"foobar","nRatings1":123,"nRatings2":123,"nRatings3":123,"nRatings4":123,"nRatings5":123,"hasImage":true,"imageSize":123,"latestImageRatingID":"foobar","imageCount":123,"lastUpdated":"foobar"}, function(err, response) {
if (err) console.error(err);
next();
});
});
};
Fixes everything and the behaviour is the expected one

Express app.get given an array

I'm reading code from https://github.com/FrankHassanabad/Oauth2orizeRecipes which demonstrate the use of OAuth2orize, which can be used to implement OAuth2 authorization server.
The question I'm asking is nothing fancy though. I just have trouble with the basics of Express 3.x.
In app.js:
oauth2 = require('./oauth2')
. . .
app.get('/dialog/authorize', oauth2.authorization);
In Oauth2.js:
exports.authorization = [
login.ensureLoggedIn(),
server.authorization(function (clientID, redirectURI, scope, done) {
db.clients.findByClientId(clientID, function (err, client) {
if (err) {
return done(err);
}
if(client) {
client.scope = scope;
}
// WARNING: For security purposes, it is highly advisable to check that
// redirectURI provided by the client matches one registered with
// the server. For simplicity, this example does not. You have
// been warned.
return done(null, client, redirectURI);
});
}),
function (req, res, next) {
//Render the decision dialog if the client isn't a trusted client
//TODO Make a mechanism so that if this isn't a trusted client, the user can recorded that they have consented
//but also make a mechanism so that if the user revokes access to any of the clients then they will have to
//re-consent.
db.clients.findByClientId(req.query.client_id, function(err, client) {
if(!err && client && client.trustedClient && client.trustedClient === true) {
//This is how we short call the decision like the dialog below does
server.decision({loadTransaction: false}, function(req, callback) {
callback(null, { allow: true });
})(req, res, next);
} else {
res.render('dialog', { transactionID: req.oauth2.transactionID, user: req.user, client: req.oauth2.client });
}
});
}
];
So, is it because app.get() can take an array of middlewares? I'm trying to find where the code to app.get() is to figure out but I can't find it.
EDIT:
I'm on Express 3.6. So according to Infer-on's answer, correct me if I'm wrong.
You mean oauth2.authorization array instead of module?
app.VERB goes to this._router[method].apply(this._router, arguments);
where arguments is an array-like object with exactly one item, which is the oauth2.authorization array.
Then goes to router/index.js in the function defined by:
methods.forEach(function(method){
Router.prototype[method] = function(path){
var args = [method].concat([].slice.call(arguments));
this.route.apply(this, args);
return this;
};
});
Here, what previously was arguments is now path. And then becomes args. So the original array given by oauth2.authorization is still there and is an item inside args which has the length of 2, the first item is the method name "get" and the second is the array.
this.route is defined in the same file:
Router.prototype.route = function(method, path, callbacks){
var method = method.toLowerCase()
, callbacks = utils.flatten([].slice.call(arguments, 2));
// ensure path was given
if (!path) throw new Error('Router#' + method + '() requires a path');
// ensure all callbacks are functions
callbacks.forEach(function(fn){
if ('function' == typeof fn) return;
var type = {}.toString.call(fn);
var msg = '.' + method + '() requires callback functions but got a ' + type;
throw new Error(msg);
});
// create the route
debug('defined %s %s', method, path);
var route = new Route(method, path, callbacks, {
sensitive: this.caseSensitive,
strict: this.strict
});
// add it
(this.map[method] = this.map[method] || []).push(route);
return this;
};
Since there is utils.flatten([].slice.call(arguments, 2)); the array from oauth2.authorization gets flattened. So it's as if the things sent weren't array but normal arguments. (I don't know what the "2" is doing). The 3rd of the oauth2.authorization is the callback that's easy to understand. The first is login.ensureLoggedIn() which is a middleware? The second is server.authorization()..but I'm not entirely sure what it's doing.
for the get method, after the first argument, application will add the route, then will pass the other arguments to related controller
this._router[method].apply(this._router, arguments);
app.js
app.get('/', routes.index);
index.js
// controller
exports.index = function(req, res){
res.render('index', { title: 'Express' });
};
application.js
methods.forEach(function(method){
app[method] = function(path){
if ('get' == method && 1 == arguments.length) return this.set(path);
// deprecated
if (Array.isArray(path)) {
console.trace('passing an array to app.VERB() is deprecated and will be removed in 4.0');
}
// if no router attached yet, attach the router
if (!this._usedRouter) this.use(this.router);
// setup route
this._router[method].apply(this._router, arguments);
return this;
};
});
so
app.get('/dialog/authorize', oauth2.authorization);
for the /dialog/authorize view will be passed the authorization method exported by oauth2.authorization module
EDIT
I'm not sure of the array export, try something like Implement Authorization Endpoint:
app.get('/dialog/authorize',
login.ensureLoggedIn(),
server.authorization(function (clientID, redirectURI, scope, done) {
db.clients.findByClientId(clientID, function (err, client) {
if (err) {
return done(err);
}
if(client) {
client.scope = scope;
}
// WARNING: For security purposes, it is highly advisable to check that
// redirectURI provided by the client matches one registered with
// the server. For simplicity, this example does not. You have
// been warned.
return done(null, client, redirectURI);
});
}),
function (req, res, next) {
//Render the decision dialog if the client isn't a trusted client
//TODO Make a mechanism so that if this isn't a trusted client, the user can recorded that they have consented
//but also make a mechanism so that if the user revokes access to any of the clients then they will have to
//re-consent.
db.clients.findByClientId(req.query.client_id, function(err, client) {
if(!err && client && client.trustedClient && client.trustedClient === true) {
//This is how we short call the decision like the dialog below does
server.decision({loadTransaction: false}, function(req, callback) {
callback(null, { allow: true });
})(req, res, next);
} else {
res.render('dialog', { transactionID: req.oauth2.transactionID, user: req.user, client: req.oauth2.client });
}
});
});

How to use node-mysql correctly with Express.js?

I'm wondering how to use the module node-mysql correctly in Node.js (using Express.js). I have a main router with this:
var Post = require('./models/post.js');
app.get('/archives', function (req, res) {
Post.findArchives(function(posts, err) {
if(err)
res.send('404 Not found', 404);
else
res.render('archives', { posts: posts});
});
});
And here's the content of the file post.js:
var mysql = require('mysql');
var dbURL = 'mysql://root#localhost/mydatabase';
exports.findArchives = function(callback) {
var connection = mysql.createConnection(dbURL);
connection.query('SELECT * FROM blog_posts_view WHERE status != 0 ORDER BY date DESC', function(err, rows) {
if(err) throw err
callback(rows, err);
connection.end();
});
};
How can I improve it? Improve the error handling? Also, there's the function handleDisconnect(connection); on their Github (https://github.com/felixge/node-mysql) that I'm not really sure how to integrate to make sure that the application will not crash when the database is not responding.
Thanks!
Take a look at the mysql-simple library. It combines node-mysql with a pooling library to create a connection pool, and also includes the code to handle the disconnects.
If you want to make it super easy, you could just use that module.

Node.js Mongodb-native driver connection sharing

main.js
var http = require('http');
var UserModel = require('./models/user.js');
var server = http.createServer(function(req, res){
UserModel.create({
}), function(e, o){
if(e) { console.log(e); } else {
} console.log(o); }
});
}).listen(3000);
connections.js
var mongo = require('mongodb');
module.exports = {
dbMain: new mongo.Db('main', new mongo.Server('127.0.0.1', 27017, { auto_reconnect: true }, {})),
dbLog: new mongo.Db('log', new mongo.Server('127.0.0.1', 27017, { auto_reconnect: true }, {}))
};
/models/user.js
var mongodb = require('mongodb');
var db = require('./connections.js').dbMain;
module.exports = {
create: function(newData, callback){
db.open(function(e, db){
db.collection('users', function(e, collection){
collection.insert(newData, callback);
});
});
}
}
When I use the code above, the server crashes with the problem that, the SECOND time a request comes in, we still have the database connection opened, so lets add db.close to our Users.create function.
create: function(newData, callback){
db.open(function(e, db){
db.collection('users', function(e, collection){
collection.insert(newData, function(e, o){
db.close(); // Voila.
callback(e, o);
});
});
});
}
At this stage the server CAN still crash, because of multiple connections open, I don't understand why or how this can happen but it does.
How do I organize my project into models (I don't want to use Mongoose, my validation is done in a different layer not the model, so Mongoose would be an overkill for me)? Also how do I handle connections in the project?
you could have a library that wraps all this up nicely - it means that only one connection to the database will be opened and the same (open) connection will be returned for the second request - if you are getting 1000+ per second, this is a make-or-break issue (i.e. not re-opening the connection each request)...
users.js:
var connections = require('./connections.js');
var serverCache = connections('127.0.0.1', 27017);
module.exports = {
create: function(newData, callback){
serverCache('main', 'users', function(e, collection){
collection.insert(newData, callback);
})
}
}
connections.js
var mongo = require('mongodb');
// a mongo connection cache
// pass in host & port
// it returns a function accepting dbName, collectionName & callback
var mongoCache = function(host, port){
// keep our open connections
var mongoDatabases = {};
var ensureDatabase = function(dbName, readyCallback){
// check if we already have this db connection open
if(mongoDatabases[dbName]){
readyCallback(null, mongoDatabases[dbName]);
return;
}
// get the connection
var server = new mongo.Server(host, port, {auto_reconnect: true});
// get a handle on the database
var db = new mongo.Db(dbName, server);
db.open(function(error, databaseConnection){
if(error) throw error;
// add the database to the cache
mongoDatabases[dbName] = databaseConnection;
// remove the database from the cache if it closes
databaseConnection.on('close', function(){
delete(mongoDatabases[dbName]);
})
// return the database connection
readyCallback(error, databaseConnection);
})
}
var ensureCollection = function(dbName, collectionName, readyCallback){
ensureDatabase(dbName, function(error, databaseConnection){
if(error) throw error;
databaseConnection.createCollection(collectionName, function(error, collection) {
if(error) throw error;
// return the collection finally
readyCallback(error, collection);
})
})
}
return ensureCollection;
}
module.exports = mongoCache;
I'm currently using a global connection with multiple http requests. In the past I created a complex library that was creating several connections to MongoDB and picking up one randomly for each connection.
Later I found that the native driver can do that for me, which is pretty neat. Currently I'm using a single object, and the driver chooses to which connection send the query.
var srvOpts = {
auto_reconnect: true,
poolSize: 10,
};
var conn = new Mongo.Server("localhost", 27017, srvOpts),
db = new Mongo.Db("dbname", conn, {});
db.open(function (){});
As you can this is a great idea, I'm thinking to copy that idea into the Redis driver that I'm using, but I'm short on time so I doubt that I will do any time soon.

Categories