Why is my callback not working correctly? - javascript

This method runs at node server
const express = require("express");
const app = express();
const fs = require("fs");
const connectDb = require("./config/db");
const __init__ = (local = false) => {
fs.writeFile(
"./config/default.json",
`{
"mongoURI": ${
local
? `"mongodb://127.0.0.1:27017/test"`
: `"mongodb+srv://admin:<password>#abc-xxghh.mongodb.net/test?retryWrites=true&w=majority"`
}
}`,
function(err) {
if (err) {
return console.log(err);
}
connectDb();
}
);
};
__init__(true);
The problem is that if originally mongoURI: 127.0.0.1:27017, and if I do __init__(false), Node will try to connect to 127.0.0.1:27017, when it should be connecting to +srv uri.
If I run __init__(false) AGAIN, then it will connect to appropriate link.
Likewise, if I then run __init__(true), it will connect to srv+ when it should be connecting to local, and if I run __init__(true) again, only then it will connect to local.
What am I doing wrong here? I'm using the callback as Im supposed to, no?
Edit:
//config/db
// for mongoDB connection
const mongoose = require("mongoose");
// require the directory
const config = require("config");
// get all contents of JSON file
const db = config.get("mongoURI");
const connectDb = async () => {
try {
console.log("connecting to mongodb", db);
await mongoose.connect(db, {
useNewUrlParser: true,
useCreateIndex: true,
useFindAndModify: false,
useUnifiedTopology: true
});
console.log("Mongo DB connected");
} catch (err) {
console.log("unable to connect to mongodb");
console.log(err.message);
//exit if failure
process.exit(1);
}
};
module.exports = connectDb;
I've even tried doing the following:
.....
console.log("Developing locally:", local);
// require the directory
const config = require("config");
// get all contents of JSON file
const db = config.get("mongoURI");
connectDb(db);
.....
But it still reads the old value

The problem is on execution order since the require is sync
The order now is:
const connectDb = require("./config/db");
const config = require("config");
const db = config.get("mongoURI"); // this has the OLD VALUE
fs.writeFile(...
await mongoose.connect(db, { // this is using the OLD REFERENCE
So you need to change your connectDb function like this:
const connectDb = async () => {
const config = require("config");
// get all contents of JSON file
const db = config.get("mongoURI");
try {
console.log("connecting to mongodb", db);
await mongoose.connect(db, {
useNewUrlParser: true,
useCreateIndex: true,
useFindAndModify: false,
useUnifiedTopology: true
});
console.log("Mongo DB connected");
} catch (err) {
console.log("unable to connect to mongodb");
console.log(err.message);
//exit if failure
process.exit(1);
}
};
Anyway, I think this is not a nicer way to load config based on the environment, so I would suggest improving it using factory pattern.

Your code for URL local vs srv+ is correct. Problem i could see is placement of method connectDb();
fs.writeFile("fir arg - URL", "second -content", third - error fun {});
where in your code after function, connectDb() is placed after error fun. After it should be closed.

Related

Node server stucks at starting when listen function is inside the mongoDB connectToServer() function

I am trying to connect my mongodb server with express. But the server is not listening when i am giving the listen function inside connectToServer(). The following snippet is the index.js file.
const express = require("express");
const { connectToServer } = require("./utils/dbConnect");
const usersRoute=require('./routes/users.route.js');
const app = express();
const port = 5000;
connectToServer((err) => {
app.listen(port, () => {
console.log({ port });
}
)});
app.use('/users',usersRoute)
app.get("/", (req, res) => {
res.send("Hello World");
});
Here is the dbConnect.js snippet:
const { MongoClient } = require("mongodb");
const connectionString = "mongodb://localhost:27017";
const client = new MongoClient(connectionString, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
let dbConnection;
module.exports = {
connectToServer: function (callback) {
client.connect(function (err, db) {
if (err || !db) {
return callback(err);
}
dbConnection = db.db("users");
console.log("Successfully connected to MongoDB.");
return callback();
});
},
getDb: function () {
return dbConnection;
},
};
The server stucks at [nodemon] starting node index.js
I was expecting to get the server running and listening. But it doesn't.
Here is the dbConnect() snippet.
const { MongoClient } = require("mongodb-legacy");
const connectionString = "mongodb://localhost:27017";
const client = new MongoClient(connectionString, {
useNewUrlParser: true,
useUnifiedTopology: true,
});
let dbConnection;
module.exports = {
connectToServer: function (callback) {
client.connect(function (err, db) {
if (err || !db) {
return callback(err);
}
dbConnection = db.db("users");
console.log("Successfully connected to MongoDB.");
return callback();
});
},
getDb: function () {
return dbConnection;
},
};
Here the callback function client.connect() is deprecated since mongodb v5. So i used another package to support the legacy mongodb drivers from here:
Legacy-Driver
That package on github says:
The next major release of the driver (v5) will drop support for
callbacks. This package will also have a major release at that time to
update the dependency requirement to ^5.0.0. Users can expect to be
able to upgrade to v5 adopting the changes and features shipped in
that version while using this module to maintain any callback code
they still need to work on migrating.
Lastly i changed the MongoClient location to this:
const { MongoClient } = require("mongodb-legacy");

How to switch between Mongo databases using Mongoose?

What I want to do is to use different databases for different users, for example I have 3 users that would connect to:
www.user1.myfrontend.com
www.user2.myfrontend.com
www.user3.myfrontend.com
Let's suppose that each user want to display the list of products he has, a GET request would be sent to the backend and from there I will connect to the database of the user:
mongodb://mongodatabse:secret#server/databaseOfUser1
mongodb://mongodatabse:secret#server/databaseOfUser2
mongodb://mongodatabse:secret#server/databaseOfUser3
What I did so far:
I connect to the database called config at the start of the app:
db.js
const connect = (uri, app, database="config") => {
const db= mongoose
.createConnection(uri,{ useNewUrlParser: true, useUnifiedTopology: true })
db.on('open', () => {
mongoose.connection.useDb("config")
app.emit('dbReady');
return true;
});
db.on('error', (err) => {
console.log(`Couldn't connect to database': ${err.message}`);
return false;
});
};
server.js
db.connect(process.env.MONGODB_URL, app);
app.on('dbReady', function () {
server.listen(PORT, () => {
console.info(`> Frontend is hosted #: ${process.env.BASE_URL}`);
console.info(`> Database is hosted #: ${process.env.mongodb_url}`);
console.info(`> Listening on port ${PORT}`);
});
});
Then whenever I receive a request I check in the config database for the database to use:
app.js:
const AccessConfig = require('./models/schemas/AccessConfigSchema');
const db = require('./models/db');
app.use(async (req, res, next) => {
const subdomain = req.subdomains[req.subdomains.length - 1];
try {
let database = await AccessConfig.findOne({ subdomain: subdomain});
if (!database)
database= await AccessConfig.findOne({ subdomain: "demo"});
console.log(database);
db.useDb(database);
next();
} catch (e) {
console.log(e.message)
return res.status(500).send('Error: ' + e.message);
}
});
So far It seems like the database isn't changing and I'm not even sure that this is the correct implementation or too many connections are open, etc.
I figured out, you can create connections using
//DB_URI_USER1=mongodb://mongodatabse:secret#server/databaseOfUser1
//DB_URI_USER2=mongodb://mongodatabse:secret#server/databaseOfUser2
const user1Connection = mongoose.createConnection(process.env.DB_URI_USER1, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
const user2Connection = mongoose.createConnection(process.env.DB_URI_USER2, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
Then you create the model for each
const User1 = user1Connection.model(...)
const User2 = user2Connection.model(...)
Now on the API you query the desired model.
Working for me :)

Node JS : use mongodb on imported module

I have a app.js that connect to a mongodb database an display it with express.My app.js is starting to be quite long. So I'm trying to do "modular design". I need to do a "timer.js" that will do some stuff in my mongodb with a timer.
I want to import this function from "checking.js" but this file require mongodb, some constant from DOTENV etc. so I need a import/export relation between them. How to do it ?
App.js (main file)
require('dotenv').config()
const POWER = process.env.POWER;
var mongoDb = require('mongodb');
var mongoClient = mongoDb.MongoClient;
const serverUrl = process.env.ENV_SERVEUR_MONGO_URL;
const useDB = process.env.ENV_MONGO_DATABASE;
app.get('/top', function (req, res) {
var resultArray = [];
mongoClient.connect(serverUrl, function (err, client) {
var db = client.db(useDB);
if (err) throw err;
var cursor = db.collection('top').find().sort({ _id: -1 });
cursor.forEach(function (doc, err) {
resultArray.push(doc);
}, function () {
client.close();
res.render('pages/top', { items: resultArray })
});
});
});
var checking = require('./checking')
Checking.js
function checkingdatabase() {
// ERROR require mongodb, variable undefined etc.
mongoClient.connect(serverUrl, function (err, client) {
var db = client.db(useDB);
if (err) throw err;
//do stuff
});
}
setInterval(checkingActiveOffer, 5000);
module.exports = Object.assign({ checkingdatabase })```
create DB.js file and share MongoDB connection
mongoose.connect(process.env.ENV_SERVEUR_MONGO_URL;, { useFindAndModify: false, useUnifiedTopology: true, useNewUrlParser: true })
.then(function (res) {
console.log('Succeeded connected to: ' + process.env.ENV_SERVEUR_MONGO_URL;);
exports.isReady = true;
exports.connection = res;
exports.con = res.connection
})
Checking.js
var db = require('./DB')
app.get('/top', function (req, res) {
db.con.collection('top').find().sort({_id:-1}).toArray()
.then(r=>{
res.render('pages/top', { items: resultArray })
})
})
You can do it in two different ways:
1 - You pass the values you need as a prop to Checking function. So this way you would pass your envs and your mongo client when you invoke Checking function. Not advisable
2 - You can, and should, declare the things you need inside the Checking file. Your envs and mongoClient can just be required there, and it will make your code cleaner.
Take a look at this code and see if that suits your use case.

NeDB not saving into file with Express.JS

I am writing a bulk import function for a password manager for myself and I have come across an issue.
There is an array of passwords to import and I'm using a forEach() method to iterate through each password to import.
I call the insert function and everything just stops. No error, no callback, no saving to file. Here is my code:
const express = require('express')
const app = express()
const { encrypt, decrypt } = require('./crypto')
const Datastore = require('nedb')
app.post('/bulkimport', checkAuthenticated, (req, res) => {
var passwords = JSON.parse(req.body.passwords)
var dbForUser = new Datastore('./passwords/' + req._passport.session.user + '.db')
passwords.forEach(password => {
function doc(code, link, name, password) {
this.code = code
this.link = link
this.name = name
this.password = password
}
var entry = new doc(password.name, password.url, password.username, password.password)
console.log(entry)
console.log('before insert') // gets logged
dbForUser.insert(entry, function(err, doc) {
console.log('after insert') // doesn't get logged
if (err) return res.status(500).send()
console.log(doc)
})
});
})
Middlewares I'm using:
app.use(bodyParser.json())
app.use(express.urlencoded({ extended: false }))
app.use(flash())
app.use(session({
secret: process.env.SESSION_SECRET,
resave: false,
saveUninitialized: false
}))
app.use(passport.initialize())
app.use(passport.session())
app.use(methodOverride('_method'))
Thanks for the help!
I see two problems. db.insert according to the nedb docs takes a plain JS object, and you're calling a res.send() in a forEach (which could result in the cannot set headers after they are sent error. You can also skip parsing the body by using a module for that.
const express = require('express')
// added
const bodyParser = require('body-parser')
const app = express()
const { encrypt, decrypt } = require('./crypto')
const Datastore = require('nedb')
// added
app.use(bodyParser.json())
app.post('/bulkimport', checkAuthenticated, (req, res) => {
// changed
var passwords = req.body.passwords
var dbForUser = new Datastore('./passwords/' + req._passport.session.user + '.db')
// changed: forEach would just keep running, potentially causing errors
for (let password of passwords) {
// changed
var entry = {
link: password.url,
password: password.password,
name: password.username,
code: password.name,
}
// changed to remove res.send from callback
let e = false
dbForUser.insert(entry, (err, doc) => {
if (err) {
e = true
}
})
// added to exit from route if an error
if (e) {
res.status(500).send()
return
}
}
res.status(201).send()
})
EDIT:
In the chat we also discovered that nedb doesn't really do anything on disk unless you call it with autoload: true or call loadDatabase, which was happening on one of the databases but not the one in the Express route (docs). Adding that got things working.

Server file works fine locally, but once deployed (on Heroku), MongoDB returns "false" instead of JSON

When attempting to load JSON data from MongoDb while on the deployed version of the website it returns false. There are no issues when doing it locally. Checking heroku logs in the CLI tells me that in the issues function (located in issues.js), it "can't read property toArray() of undefined".
There are three main files: server.js
'use strict';
const express = require('express'),
app = express(),
issues = require('./routes/issues'),
users = require('./routes/users'),
bodyParser = require('body-parser');
app.use(bodyParser.json());
// routing for issue management
app.get('/issues', issues.getAllIssues);
app.put('/issues/issues.json', issues.putNewIssue);
app.post('/issues/:issue_id.json', issues.postIssueUpdate);
app.delete('/issues/:issue_id.json', issues.deleteIssue);
// routing for user management
app.get('/users', users.getAllUsers);
app.put('/users/users.json', users.putNewUser);
app.get('/', (req, res) => {
res.header('Content-Type', 'text/html');
res.end('<h1>Issue Manager</h2>');
})
let port = process.env.PORT;
if (port == null || port =='') {
port = 3000;
}
app.listen(port);
... the function from the routing.js that produces the "false" results
const im = require('issue_manager');
exports.getAllIssues = async (req, res) => {
let allIssues = await im.issues();
console.log(allIssues);
console.log('Operation complete.');
res.json(allIssues);
}
... the module, issues.js, that contains the MongoDb client and processes the request
'use strict';
const MongoClient = require('mongodb').MongoClient;
let db, allIssues;
(async function() {
const uri = 'censored';
const dbName = 'IssueManager';
let client;
client = MongoClient.connect(uri,
{ useNewUrlParser: true,
useUnifiedTopology: true })
.then(data => {
return data;
})
.catch(err => {
return err;
});
try {
db = (await client).db(dbName);
allIssues = (await db).collection('Issues').find();
} catch (e) {
console.log(e);
}
})();
exports.issues = async () => {
try {
return await allIssues
.toArray()
.then((data) => {
return data;
})
.catch((err) => {
return err;
});
} catch (e) {
console.error(e);
return false;
}
}
If your code runs fine in your local environment then I think you should:
Confirm that your deployed application can connect to your MongoDB server.
If it can't, verify that you are using the right environment variables / credentials in your deployed application.
This is a common mistake we all experience when deploying. =)

Categories