Using Async queue to control DB connection request - javascript

I'm buidling an app with Node anb Mongodb Native. I'm working on a db module which i can require and call in other modules so that I end up using just one connection. The module db.js started out with this code:
var _db = null;
var getDb = module.exports.getDb = function(callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('_db created');
_db = db;
callback(err, _db);
});
};
In my other modules that need a db connection I do this
db.getDb(function (err, connection) {
// Do something with connection
});
It works fine. But an unpleasant problem is that if my code would call getDb multiple times in a very short time span, I would end up with several copies of a connection. Like if I do my db.js requirements and getDb calls at the very beginning of all modules that need a db connection
I'm now thinking about controlling the calls to getDb by queuing them, so that only the absolute first call will create a connection and save it in _db. All later calls will get the created connection _db in return. I believe Async queue will help me with this...
The problem is that i dont understand how I write this with Async queue. The documentation is a little bit vague, and i dont find any better examples online. Maybe you can give me some hints. This is what i got so far...
var dbCalls = async.queue(function (task, callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('Connected to mongodb://localhost:' + config.db.port + '/' + config.db.name);
_db = db;
callback(null, _db);
});
}, 1);
// I guess this .push() must be the exposed (exported) API for other modules to get a connection, but how do I return it to them,
dbCalls.push(null, function (err) {
console.log('finished processing foo');
});
dbCalls.push(null, function (err) {
console.log('finished processing bar');
});
I dont understand the object passed as first argument to .push() What should i use if for? Right now its null How do I pass on the connection and possible error all the way out to the module that made the call?

A quick and dirty solution without async.queue:
var _db = null;
var _err = null;
var _queue = [];
var _pending = false;
var getDb = module.exports.getDb = function(callback) {
if (_err || _db) {
console.log('_db returned');
return callback(_err, _db);
} else if (_pending) { // already a connect() request pending
_queue.push(callback);
} else {
_pending = true;
_queue.push(callback);
MongoClient.connect(..., function (err, db) {
_err = err;
_db = db;
_queue.forEach(function(queuedCallback) {
queuedCallback(err, db);
});
});
};

Related

Returning queries from couchdb using nano

i've written a simple module to handle my couchdb CRUD operations using nano, however i'm having hardship returning from the results i query from the couch database. My Code is as follows.
couchdb.js
//Select from couch view
exports.couchSelect=function (_db, document,view) {
return _db.view(document, view,function(err, body){
if(!err){
var rows = body.rows; //the rows returned
console.log(rows);
return rows;
}else{
console.log(err);
}
}
);
}
routes.js
var couchdb = require('./couchdb');
app.get("/orders", function (req, res) {
var db = couchdb.couchConnect('ezyextension_orders');
var insert = couchdb.couchSelect(db, 'orders', 'orders');
console.log(insert);
});
On executing the returned output is only get Node http request parameters without the returned rows, need help to return the actual JSON rows queried.Thanx
You're using nano which use callback to make async calls. Returning _db.view only return a void function. I added comments to tell you what is happening :
exports.couchSelect = function(_db, document, view) {
_db.view(document, view, function(err, body) {
//This will be called after the couchSelect request.
if (!err)
console.log("Callback : " + body.rows);
});
}
//When you use it
var couchdb = require('./couchdb');
app.get("/orders", function(req, res) {
var db = couchdb.couchConnect('ezyextension_orders');
var insert = couchdb.couchSelect(db, 'orders', 'orders');
//This is synchronous. This will be called before the callback is called.
console.log(insert);
});
I decided to use blue-nano which uses promises instead of callbacks and the code is as below.
couchdb.js
var nano = require('nano-blue')('http://localhost:5984');
//Select from couch view
exports.couchSelect=function (_db, document,view) {
return _db.view(document, view);
}
routes.js
app.get("/orders", function (req, res) {
var db = couchdb.couchConnect('ezyextension_orders');
couchdb.couchSelect(db, 'orders', 'all').spread(function (body,header) {
res.send(body.rows);
}).catch(function(err) {
console.log(err.message);
});
});
This works perfectly

Node Js: Exporting available Mongo database names doesn't work

I am struggling to export available Mongo databases to ./routes/index.js.
Related part of app.js:
var ACCESSIBLE_DATABASES = [];
var Db = require('mongodb').Db,
MongoClient = require('mongodb').MongoClient,
Server = require('mongodb').Server,
assert = require('assert');
var db = new Db('test', new Server('localhost', 27017));
db.open(function(err, db) {
var existing_databases = [];
var adminDb = db.admin();
// List all the available databases
adminDb.listDatabases(function(err, dbs) {
assert.equal(null, err);
assert.ok(dbs.databases.length > 0);
ACCESSIBLE_DATABASES = dbs.databases;
db.close();
});
});
// Code below export empty array
module.exports.accessible_databases = ACCESSIBLE_DATABASES;
// After some milisec the array has already contain the databases
setTimeout(function() {
console.log(ACCESSIBLE_DATABASES);
}, 100);
I'd like to avoid code repetition in my index.js file, but I couldn't achieve it. Import can't work either inside of 'adminDb.listDatabases' function nor later in 'setTimeout' function. (I'd like to use the result later in the app.js file so migrating the code into the index.js file is not an option.
I suggest the reason of it is the asynchronous code execution.
Since this involves async operations, the way you're trying to export wouldn't work. You'll have to pass a callback to get the databases once the operation has finished.
for eg.
function getDBs(db, adminDb, callback) {
// List all the available databases
adminDb.listDatabases(function(err, dbs) {
assert.equal(null, err);
assert.ok(dbs.databases.length > 0);
db.close();
callback(null, dbs);
});
}
function openHandler(callback) {
return function(err, db) {
var adminDb = db.admin();
getDBs(db, adminDb, callback);
}
}
module.exports.databases = function(callback) {
db.open(openHandler(callback));
}
// usage in routes/index.js
const dbs = require('./app').databases;
dbs(function(err, availableDBs) {
console.log(availableDBs);
});
You are exporting the accessible_databases object before it is initialized.
Try something like this:
var DB_OBJ = {};
adminDb.listDatabases(function(err, dbs) {
assert.equal(null, err);
assert.ok(dbs.databases.length > 0);
//export the array here
DB_OBJ.accessible_databases = ACCESSIBLE_DATABASES = dbs.databases;
db.close();
});
module.exports = DB_OBJ;
// After some milisec the array has already contain the databases
setTimeout(function() {
console.log(ACCESSIBLE_DATABASES);
}, 100);

How to return JSON from MongoDB in Node.js?

I have a mongodb database called pokemon with a collection called pokemons. Here is my attempt to write a function that will do a find() operation in mongodb:
'use strict';
var MongoClient = require('mongodb').MongoClient;
var assert = require('assert');
// db url
var url = 'mongodb://localhost:27017/pokemon';
exports.getPokemonByName = function (name) {
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
var cursor = db.collection('pokemons').find({name: name});
// how to return json?
});
};
I then call this function in another file:
var express = require('express');
var router = express.Router();
router.get('/pokedex', function (req, res) {
res.jsonp(db.getPokemonByName('Dratini'));
})
This link is helpful in showing how to log mongodb data to the console by doing some sort of each() method on the cursor object, but I don't know how to return json through the getPokemonByName function. I tried to define an empty array on the root scope of the getPokemonByName function and push data into that array with each iteration of the .each method show in that link, but I think I still can't return that array because it happens after the fact.
BTW, I'm really just doing this for fun and to learn about MongoDB and Node.js, so I don't want to use or an ODM like Mongoose to do some of this work for me.
I was able to answer my question with help from node's native monogodb driver github page: See here.
In essence, what I did was to define my exported function within the MongoClient's connection function. For some reason I thought node exports had to be in the root of the module, but that's not the case. Here's a finished version:
'use strict';
var MongoClient = require('mongodb').MongoClient;
var assert = require('assert');
// db url
var url = 'mongodb://localhost:27017/pokemon';
var findDocuments = function(db, callback) {
// Get the documents collection
var collection = db.collection('pokemons');
// Find some documents
collection.find({name: 'Dratini'}).toArray(function(err, docs) {
assert.equal(err, null);
// assert.equal(2, docs.length);
console.log("Found the following records");
callback(docs);
});
}
// Use connect method to connect to the Server
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
console.log("Connected correctly to server");
findDocuments(db, function(docs) {
console.log(docs);
exports.getPokemonByName = function() {
return docs;
}
db.close();
});
});
And then in another file:
var express = require('express');
var router = express.Router();
router.get('/pokedex', function (req, res) {
res.jsonp(db.getPokemonByName());
});
Of course, this solution requires that I hardcode queries, but I'm okay with that for now. Will cross that bridge when I come to it.
Found a simple tweak for this. Let say the callback to the findOne returns result then you can convert the result to JSON object like this
result = JSON.parse(JSON.stringify(result))
Now you can access the result and its fields simply with the dot operator.
this may help
var cursor = db.collection('pokemons').find({name:name}).toArray(function(err,arr){
return arr;
});
You can use callbacks on find function to return the json.
Try
exports.getPokemonByName = function (name,callback) {
MongoClient.connect(url, function(err, db) {
assert.equal(null, err);
var cursor = db.collection('pokemons').find({name: name},function(err,result){
if(err)
{
callback(err,null);
}
if(result)
callback(null,result);
});
});
};
router.get('/pokedex', function (req, res) {
db.getPokemonByName('Dratini',function(err,result){
if(result)
{
res.jsonp(result);
}
});
})

Postgresql+Node.js queries start failing after a few tries

I have a node.js app with a postgresql database running on Heroku and for some reason select functions seem to break and start failing after many successful attempts for what seems to be no reason at all..
Example code:
pg.connect(database_url, function(err, client) {
if(client) {
var date = new Date();
var query = client.query('SELECT * FROM accounts',function(err, result) {
if(!err) {
res.send(JSON.stringify(result.rows));
accounts = JSON.stringify(result.rows);
} else {
res.send('failed');
}
});
} else {
res.send(JSON.stringify(err));
}
});
The above code works perfectly about 10 times and then out of nowhere seems to break.
How can i remedy this issue?
If it works 10 times, and fails on the 11'th then it's probably because the pool of connections has run out.
You should call done in your code after you have finished with the request, to send the database connection back to the pool.
pg.connect(database_url, function(err, client, done) {
if(client) {
var date = new Date();
var query = client.query('SELECT * FROM accounts',function(err, result) {
if(!err) {
res.send(JSON.stringify(result.rows));
accounts = JSON.stringify(result.rows);
} else {
res.send('failed');
}
done(); // call done to close the conection
});
} else {
res.send(JSON.stringify(err));
}
});
see the documentation for connect

Memory Leak with socket.io + node.js

I appear to have a memory leak with my Node.js application. I built it quickly, and my JavaScript isn't too strong, so this might be easy.
I've done some heap dumps on it, and it's the String object? leaking memory, at the rate of about 1MB every 5 minutes. I expanded String, and it's actually String.Array?
Heap stack:
#!/usr/local/bin/node
var port = 8081;
var io = require('socket.io').listen(port),
sys = require('sys'),
daemon = require('daemon'),
mysql = require('mysql-libmysqlclient');
var updateq = "SELECT 1=1";
var countq = "SELECT 2=2";
io.set('log level', 2);
process.on('uncaughtException', function(err) {
console.log(err);
});
var connections = 0;
var conn = mysql.createConnectionSync();
dbconnect();
io.sockets.on('connection', function(client){
connections++;
client.on('disconnect', function(){ connections--; })
});
process.on('exit', function () {
console.log('Exiting');
dbdisconnect();
});
function dbdisconnect() {
conn.closeSync();
}
function dbconnect() {
conn.connectSync('leet.hacker.org','user','password');
}
function update() {
if (connections == 0)
return;
conn.query(updateq, function (err, res) {
if (err) {
dbdisconnect();
dbconnect();
return;
}
res.fetchAll(function (err, rows) {
if (err) {
throw err;
}
io.sockets.json.send(rows);
});
});
}
function totals() {
if (connections == 0)
return;
conn.query(countq, function (err, res) {
if (err) {
// Chances are that the server has just disconnected, lets try reconnecting
dbdisconnect();
dbconnect();
throw err;
}
res.fetchAll(function (err, rows) {
if (err) {
throw err;
}
io.sockets.json.send(rows);
});
});
}
setInterval(update, 250);
setInterval(totals,1000);
setInterval(function() {
console.log("Number of connections: " + connections);
},1800000);
daemon.daemonize('/var/log/epiclog.log', '/var/run/mything.pid', function (err, pid) {
// We are now in the daemon process
if (err) return sys.puts('Error starting daemon: ' + err);
sys.puts('Daemon started successfully with pid: ' + pid);
});
Current version
function totals() {
if (connections > 0)
{
var q = "SELECT query FROM table";
db.query(q, function (err, results, fields) {
if (err) {
console.error(err);
return false;
}
for (var row in results)
{
io.sockets.send("{ ID: '" + results[row].ID + "', event: '" + results[row].event + "', free: '" + results[row].free + "', total: '" + results[row].total + "', state: '" + results[row]$
row = null;
}
results = null;
fields = null;
err = null;
q = null;
});
}
}
Still leaking memory, but it seems only on these conditions:
From startup, with no clients -> Fine
1st client connection -> Fine
2nd client (even with the 1st client disconnecting and reconnecting) -> Leaking memory
Stop all connections -> Fine
1 new connection (connections = 1) -> Leaking memory
Do yourself a favour and use node-mysql, it's a pure javascript mysql client and it's fast. Other than that, you should be using asynchronous code to stop IO being blocked whilst you're working. Using the async library will help you here. It has code for waterfall callback passing among other things.
As for your memory leaking, it probably isn't socket.io, although I haven't used it in a few months, I have had many thousands of concurrent connections and not leaked memory, and my code wasn't the best either.
Two things, however. Firstly your code is faily unreadable. I suggest looking into properly formatting your code (I use two spaces for every indentation but some people use four). Secondly, printing the number of connections every half an hour seems a little silly, when you could do something like:
setInterval(function() {
process.stdout.write('Current connections: ' + connections + ' \r');
}, 1000);
The \r will cause the line to be read back to the start of the line and overwrite the characters there, which will replace the line and not create a huge amount of scrollback. This will help with debugging if you choose to put debugging details in your logging.
You can also use process.memoryUsage() for quickly checking the memory usage (or how much node thinks you're using).
Could this be related to the connected clients array not clearing properly when a client disconnects? The array value gets set to NULL rather than being dropped from the array.

Categories