Asynchrously load NPM module - javascript

I'm trying to create a module that connects to my Database (couchDB using Cradle). In the end, the module exports the 'db' variable.
Here is the code:
var cradle = require('cradle'),
config = require('./config.js');
var db = new(cradle.Connection)(config.couchURL, config.couchPort, {
auth: {
username: config.couchUsername,
password: config.couchPassword
},
cache: true,
retries: 3,
retryTimeout: 30 * 1000
}).database('goblin'); //database name
//Check if DB exists
db.exists(function (err, exists) {
if (err && exists) {
console.log("There has been an error finding your CouchDB. Please make sure you have it installed and properly pointed to in '/lib/config.js'.");
console.log(err);
process.exit();
} else if (!exists) {
db.create();
console.log("Welcome! New database created.");
} else {
console.log("Talking to CouchDB at " + config.couchURL + " on port " + config.couchPort);
}
});
module.exports = db;
The problem is that the db.exists call is async, and if it doesn't exist I think the variable exports the variable before it's done, effecting the rest of the system.
It's being included in the executed node page the normal way:
var db = require('./couchdb.js');
Is there a way to prevent this from happening, or any best practices to tackle a problem like this without having a giant nested callback?
For reference, you can see the entire application right here (https://github.com/maned/goblin) , and the bug referenced for the project here (https://github.com/maned/goblin/issues/36).

Embrace the async style. Instead of exporting db from the module, export an async function like this:
module.exports = {
getDb: function(callback) {
db.exists(function(err, exists) {
if (exists && !err) {callback(db);} else { /* complain */ }
});
}
};
Now the application can just require('mymodule').getDb(appReady) where appReady accepts a db object that is known to be valid and usable.

Related

MongoClient's findOne() never resolves on Electron, even when collection is populated

I'm working on making a web app with Electron and I successfully connected to a Mongo DB Atlas database and I'm able to send information to it. However, I seem to be unable to retrieve it. The first snippet of code that I included is how I connected to the database.
MongoClient.connect(URI, (err, client) => {
if (err){
console.log("Something unexpected happened connecting to MongoDB Atlas...");
}
console.log("Connected to MongoDB Atlas...");
currentDatabase = client.db('JukeBox-Jam-DB'); /* currentDatabase contains a Db */
});
Then, this second snippet is how I've been writing to the database, which seems to work perfectly fine.
ipc.on('addUserToDatabase', (event, currentUser) => {
const myOptions = {
type: 'info',
buttons: ['Continue'],
defaultId: 0,
title: 'Success',
message: 'Your account has been created.'
};
dialog.showMessageBox(mainWindow, myOptions);
currentCollection = currentDatabase.collection('UsersInformation');
currentCollection.insertOne(currentUser);
});
Lastly, this is the code that I've been trying to use to retrieve information from the database. I don't see where I could be making a mistake so that it is not working for retrieving, but yes for writing. From my understanding findOne() when passed without parameters should simply return a Promise that resolves to the first entry that matches the query that is passed to it. If a query is not provided then it will resolve to the item that was put in the database first. If there's no entry that matches the query, then it should resolve to null. Any ideas why this isn't working?
ipc.on('checkUsernameRegistration', (event) => {
currentCollection = currentDatabase.collection('UsersInformation');
let myDocument = currentCollection.findOne(); /* I don't understand why this isn't working! */
console.log(myDocument); /* This prints Promise { <pending> } */
if (myDocument !== null){ /* If myDocument is not null, that means that that there is already someone with that username in the DB. */
}
});
Thanks to everyone that is attempting to help me! I've been stuck in this for several hours now.
Try using async/await :
ipc.on('checkUsernameRegistration', async (event) => {
currentCollection = currentDatabase.collection('UsersInformation');
let myDocument = await currentCollection.findOne({ _id: value });
if (myDocument !== null){
console.log(myDocument);
}
});
or, you need to pass a callback, like this:
currentCollection.findOne({ country: 'Croatia' }, function (err, doc) {
if (err) console.error(err);
console.log(doc);
});
This happens because queries are not promises. Actually, I recommend you to study the difference between async and sync code in node.js. Just to understand where the callback should be passed to function, and where you can simply write await Model.method({ options }).

How to extend pg-promise interface with function

I have a node.js module pg-promise instantiated as follows.
const pgp = require('pg-promise')();
// Database connection details;
const cn = {
host: 'localhost', // 'localhost' is the default;
...
}
// Create db connection and verify it
var db = pgp(process.env.DATABASE_URL || cn);
db.one('Select version()')
.then(data => {
log.info('Connected: ', data);
})
.catch(error => {
log.error("Error connecting to db", error);
})
// extension methods
db.findById = function (table, id) {
log.debug('read ', table, id);
return db.one('Select * from ' + table + ' where id = $1', id);
}
module.exports = db;
The db object is an instance of interface type pgPromise.IDatabase<{}, pg.Iclient>
I would like to be able to call the functions provided by this lib along with my own functions.:
const db = require('../db');
db.any('Select query..')
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
db.findById('users',1)
.then(data => { res.send(data); })
.catch(err => { log.error(err); });
But when I run it I get the error
TypeError: db.findById is not a function
I tried this too but with the same effect.
module.exports = db;
module.exports.findById = function()...;
Only one sollution I could come up with was this:
module.exports = {
db: db,
findById: function(){
...
}
}
But it is now ugly to use it other modules, as I need always to ask specificaly for the db property.
From the author of pg-promise.
Database protocol in pg-promise is extendable, supporting event extend that lets you extend the protocol on all levels. You need this level of automation, because when it comes to the essential Tasks and Transactions, which encapsulate the allocated connection, the protocol becomes dynamic, and so you need a special provision to make the protocol extension work automatically, which is exactly what event extend does.
In order to understand it better, I wrote pg-promise-demo to show how to do it correctly, plus some other high-level stuff that comes useful most of the time.
pg-promise seems to use an annoying pattern where they freeze every object and make every property read-only, so you'll be unable to simply add properties to it manually like you're attempting. The library supports extensions in the extend property of initOptions like this:
const initOptions = {
extend(obj, dc) {
obj.findById = function() {
...
}
//add other extension methods or properties here
}
};
const pgp = require('pg-promise')(initOptions);
//now any databases created with pgp will contain those extension methods
Alternatively, you can define a Proxy over your export object that defers either to the db or to your own custom function:
const extension = {
findById: function() {
...
},
//other functions
};
module.exports = new Proxy(extension, { get(target, name) {
if(db[name] !== undefined) return db[name];
return target[name];
});
But you should prefer the natively supported way to do this using initOptions.

Adding multiple BigQuery JSON credential files in Node project

I've been working on a Node project that involves fetching some data from BigQuery. Everything has been fine so far; I have my credential.json file (from BigQuery) and the project works as expected.
However, I want to implement a new feature in the project and this would involve fetching another set of data from BigQuery. I have an entirely different credential.json file for this new dataset. My project seems to recognize only the initial credential.json file I had (I named them differently though).
Here's a snippet of how I linked my first credential.json file:
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
if (typeof encodedCredentials === 'string' && encodedCredentials.length > 0) {
const google_auth = atob(encodedCredentials);
if (!fs.existsSync('credentials.json')) {
fs.writeFile("credentials.json", google_auth, function (err, google_auth) {
if (err) console.log(err);
console.log("Successfully Written to File.");
});
}
}
}
catch (error){
logger.warn(`Ensure that the environment variable for GOOGLE_AUTH_KEY is set correctly: full errors is given here: ${error.message}`)
process.kill(process.pid, 'SIGTERM')
}
}
Is there a way to fuse my two credential.json files together? If not, how can I separately declare which credential.json file to use?
If not, how can I separately declare which credential.json file to use?
What I would do I would create a function which is the exit point to BigQuery and pass an identifier to your function which credential to generate, This credential will then be used when calling BigQuery.
The below code assume you changed this
function createCredentials(){
try{
const encodedCredentials = process.env.GOOGLE_AUTH_KEY;
To this:
function createCredentials(auth){
try{
const encodedCredentials = auth;
And you can use it like this
import BigQuery from '#google-cloud/bigquery';
import {GoogApi} from "../apiManager" //Private code to get Token from client DB
if (!global._babelPolyfill) {
var a = require("babel-polyfill")
}
describe('Check routing', async () => {
it('Test stack ', async (done, auth) => {
//Fetch client Auth from local Database
//Replace the 2 value below with real values
const tableName = "myTest";
const dataset = "myDataset";
try {
const bigquery = new BigQuery({
projectId: `myProject`,
keyFilename: this.createCredentials(auth)
});
await bigquery.createDataset(dataset)
.then(
args => {
console.log(`Create dataset, result is: ${args}`)
})
.catch(err => {
console.log(`Error in the process: ${err.message}`)
})
} catch (err) {
console.log("err", err)
}
})
})

How does this javascript object modification work? I don't understand the scope, It doesn't persist

I'm using Express, Node.js and a Mongo DB. I've simplified this code somewhat from my actual code to try to cut out the noise, I hope it makes sense.
My basic problem is that I'm modifying an object and I am expecting the data change to persist across client-server calls, but it isn't. The comments explain more.
// server side routes.js
var LM = require('./modules/login-manager'); // database accessing functions
var DM = require('./modules/data-manager'); // database accessing functions
module.exports = function (app) {
app.post('/', function (req, res) {
// LM.manualLogin does a MongoDB call that looks fine, returns the expected data
LM.manualLogin(req.body['user'], req.body['pass'], function (e, o) {
if (!o) {
res.status(400).send(e);
} else {
req.session.user = o;
// calculateRank adds arbitrary fields to the user object
// this change persists to POST /dashboard
calculateRank(req.session.user);
// DM.getListOfData() does a Mongo DB call that looks fine
// and returns the expected data in o
DM.getListOfData(function (e, o) {
if (!o) {
res.status(400).send(e);
} else {
// availableData doesn't already exist. This assignment
// operation looks good
// in the debugger, but "availableData" is NOT in the
// req.session when we get to POST /dashboard
req.session.availableData = o;
}
});
res.status(200).send(o);
}
});
});
app.get('/dashboard', function (req, res) {
if (req.session.user == null) {
res.redirect('/');
}
else {
// "availableData" is not there!
// but changes to req.session.user are
res.render('dashboard', {
user: req.session.user,
data: req.session.availableData
});
}
});
}
// client side
$('#login').ajaxForm({
beforeSubmit: function (formData, jqForm, options) {
formData.push({
name: 'remember-me',
value: $('.button-rememember-me-glyph').hasClass('glyphicon-ok')
});
return true;
},
success: function (responseText, status, xhr, $form) {
if (status == 'success') window.location.href = '/dashboard';
},
error: function (e) {
// login error
}
});
So I click my login button that drives the client-side ajax function. Server side, I get the POST / and when that returns, client side redirects to /dashboard. The problem is that server side, in POST /dashboard some of my data is missing. The thing I really don't understand is that the data set by "req.session.user = o;" and "calculateRank(req.session.user)" persists. The data set by "req.session.availableData = o;" doesn't, in POST /dashboard it is as if the operation never happened (but the other two did). When I debug that assignment operation it looks fine and I can see the data in the req object.
Can anyone help explain this to me please? Thanks!
You're sending the response, res.status(200).send(o);, before req.session.availableData = o; has had time to run, since it's an async operation.
Sending the response inside the getListOfData callback would fix your problem.

Handling timeouts with Node.js and mongodb

I am currently testing how some code stands up against the following scenario:
Node.js application is started and successfully establishes a connection to mongodb
After it has successfully setup a connection, the mongodb server dies and all subsequent requests fail
To do this I have got the following code which makes use of the official driver (found here: https://github.com/mongodb/node-mongodb-native) :
MongoClient.connect('mongodb://localhost:27017/testdb', function(err, db) {
app.get('/test', function(req, res) {
db.collection('users', function (err, collection) {
console.log(err);
if (err) {
// ## POINT 1 ##
// Handle the error
}
else {
collection.find({ 'username': username }, { timeout: true }).toArray(function(err, items) {
console.log(err);
if (err) {
// ## POINT 2 ##
// Handle the error
}
else {
if (items.length > 0) {
// Do some stuff with the document that was found
}
else {
// Handle not finding the document
}
}
});
}
});
});
});
As the mongodb server is no longer running when the request is being handled, I'd made the assumption that at either the points which I have labelled ## POINT 1 ## or ## POINT 2 ##, it would return an error indicating a timeout; this however, isn't the case.
I have tried a number of different settings (including one you can see here that explicitly allows the cursor to timeout), however I cannot seem to enable it in any way. In every configuration I've tried Node.js will simply keep waiting for the find() operation to callback and it never does.
If I start the Node.js application before running mongodb, it catches the error in the connect callback fine, but if the connection dies after that it doesn't seem to handle it in any way.
Is there a setting I am missing or is there no way to detect connections being terminated after they've been established?
Edit: just to be clear, the username variable used in the find method is actually declared in my full code, the code I've put in this post is a cut down version to illustrate the structure and error checking.
UPD:
Based on this post, looks like they've deployed fix that will do the same as what we do here. Not sure if this is already within npm (15.10.13). https://github.com/mongodb/node-mongodb-native/issues/1092#ref-commit-2667d13
After some investigation I've managed to understand what is going on there:
Every time you call any method to deal with database (find, update, insert, etc.) it creates cursor, that has own ID and registers itself to EventEmitter of Db for being called back later. In meantime it registers itself to _notReplied object within same CallBackStore.
But once connection is closed, I couldn't locate anything that would iterate through _notReplied cursors and would trigger them with errors or any logic with timers (it still might be somewhere there). So I've managed to write small work around, that does force triggers cursors with error when DB emits close event:
new mongodb.Db('testdb', new mongodb.Server('localhost', 27017, { }), { safe: true }).open(function (err, db) {
if (!err) {
db.on('close', function() {
if (this._callBackStore) {
for(var key in this._callBackStore._notReplied) {
this._callHandler(key, null, 'Connection Closed!');
}
}
});
// ...
} else {
console.log(err)
}
});
I recommend using first approach instead of MongoClient. Reasons are few: for example when you close connection and then call .find it will properly trigger error in callback, while with MongoClient it won't.
If you are using MongoClient:
MongoClient.connect('mongodb://localhost:27017/testdb', function(err, db) {
if (!err) {
db.on('close', function() {
if (this._callBackStore) {
for(var key in this._callBackStore._notReplied) {
this._callHandler(key, null, 'Connection Closed!');
}
}
});
// ...
} else {
console.log(err);
}
});
What this will do? Once connection is closed, it will iterate through All _notReplied cursors and trigger events for them with error Connection Closed!.
Test case:
items.find({ }).toArray(function(err, data) {
if (!err) {
console.log('Items found successfully');
} else {
console.log(err);
}
});
db.close();
That will force close database connection and trigger close event that you handle earlier and will make sure that cursor will be closed.
UPD:
I've added Issue on GitHub: https://github.com/mongodb/node-mongodb-native/issues/1092 we'll see what they say regarding this.
I had the same problem, and found this page from google.
But your choosed answer didn't resolve the problem and it is as same as you, this._callBackStore can't use
but i tried to wrap the Mongo, and it seems work fine
var MongoClient = require('mongodb').MongoClient;
var mongo = {};
mongo.init = function() {
MongoClient.connect('mongodb://localhost:27017/testdb', function(err, db) {
if (err) {
mongo.DB = '';
} else {
mongo.DB = db;
}
db.on('close', function() {
mongo.DB = '';
});
db.on('reconnect', function() {
mongo.DB = db;
});
}
}
mongo.getdb = function(callback) {
if (mongo.DB) {
callback(null, mongo.DB);
} else {
callback('can not connect to db', null);
}
}
module.exports = mongo;
firstly start server and init() it
and then you can require it and use
mongo.getdb(function(err, db) {
if (err) {
console.log(err);
} else {
db.collection('user').find({'xxx':'xxx'}).toArray(function(err, items) {
console.log(items);
});
}
});
After some further investigation, it seems that you can't specify "offline" timeouts such as in the scenario outlined above. The only timeout that can be specified is one which informs the server to timeout the cursor after 10 minutes of inactivity, however as in the scenario above the connection to the server is down this does not work.
For reference, I found the information here: https://github.com/mongodb/node-mongodb-native/issues/987#issuecomment-18915263 by who I believed to be one of the main contributors to the project.
I'm making api with Hapi and Mongodb (w/o mongoose). Features:
Start responding to API request only if mongo db is available
Stop responding if mongo dies during cycle
Re-start when mongo available again
Keep single connection for all requests
Combining some ideas from other answers and this post https://productbuilder.wordpress.com/2013/09/06/using-a-single-global-db-connection-in-node-js/ my approach is this:
server.js
Utilities.initializeDb(() => {
server.start((err) => {
if (err) throw err;
console.log('Server running at:', server.info.uri);
});
}, () => {
server.stop((err) => {
if (err) throw err;
console.log('Server stopped');
});
});
Utilities.js
"use strict";
const MongoClient = require('mongodb').MongoClient;
const MongoUrl = 'mongodb://localhost:27017/db';
export const Utilities = {
initializeDb: (next, onCrash) => {
const ConnectToDatabase = (params) => {
MongoClient.connect(MongoUrl, (err, db) => {
if (err !== null) {
console.log('#t4y4542te Can not connect to mongo db service. Retry in 2 seconds. Try #' + params.retry);
console.error(err);
setTimeout(() => {
ConnectToDatabase({retry: params.retry + 1});
}, 2000);
} else {
db.on('close', () => {
onCrash();
console.log('#21df24sf db crashed!');
ConnectToDatabase({retry: 0});
});
global.db = global.db || db;
next();
}
});
};
ConnectToDatabase({retry: 0});
}
};
I'm exporting db connection to global space. It feels like not best solution, but I had projects where db connection was passed as param to all modules and that sucked more. Maybe there should be some modular approach where you import db connection where you need it, but in my situation i need it almost everywhere, I would have to write that include statement in most files. This API is pointless w/o connection to db, so I think it might be best solution even if I'm against having something flying magically in global space..

Categories