I am trying to connect multiple MongoDB databases into a single Node.js project. Here is my current structure and issue at hand.
Node Version: v6.12.1
Express.js Version: 4.16.2
Mongoose Version: 4.13.6
Current Structure:
primaryDB.js
var mongoose = require('mongoose');
var configDB = require('./database.js');
//Connect to MongoDB via Mongoose
mongoose.Promise = require('bluebird');
//mongoose.Promise = global.Promise;
mongoose.connect(configDB.url, { useMongoClient: true });
//Check for successful DB connection
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function() {
console.log("Primary DB Successfully Connected..");
});
module.exports = mongoose;
secondaryDB.js
var mongoose = require('mongoose');
mongoose.connect('mongodb://mongodb_address_goes_here:27017/db_name', { useMongoClient: true });
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function() {
console.log("Secondary DB Successfully Connected..");
});
module.exports = mongoose;
Then each DB connection gets imported respectively into their schema files, from which the schema files have module exports.
Issue at hand
When I run my application it starts fine and connects to both DB's successfully however I believe that mongoose is either getting overwritten or something because I might be able to do a findOne() command on primary but secondary fails or vice versa.
Example:
var async = require('async');
var primaryModel = require('../../../models/general/primary');
var SecondaryModel = require('../../../models/general/secondary');
function getInfo() {
async.waterfall([
getPrimaryName,
getSecondaryName
], function (err, info) {
});
};
function getPrimaryName(callback){
Primary.findOne({}, function (err, primaryInfo){
if (err) {
console.log("Error" + err);
}
console.log('Primary info is : ' + primaryInfo);
callback(null,primaryInfo);
});
}
function getSecondaryName(primaryInfo, callback) {
console.log(primaryInfo); //Make sure its being passed
Secondary.findOne({}, function (err, secondaryInfo) {
if (err) {
console.log("Error" + err);
}
console.log('Secondary Info is : ' + secondaryInfo);
callback(null, secondaryInfo);
});
}
The problem with above is I might get data back from the call to Primary but not Secondary. Which again I believe is from something being overridden .
Any help appreciated. Sorry about the verbosity.
use mongoose.createConnection to create your connections
so
const conn1 = mongoose.createConnection('first server options')
const conn2 = mongoose.createConnection('second server options')
read more here
http://mongoosejs.com/docs/connections.html#multiple_connections
Related
I am following the documentation and based off what I read I am doing it right. I am connecting to my Mongo Atlas server. The server connects and I am able to connect to the DB and the Collection. Yet the DB and the Collection are not being passed to the db object.
I have tried console logging the values and refactored my logic and yet still no solution.
// MongoDB Connection Setup
let db = {};
let MongoClient = require("mongodb").MongoClient;
let uri = process.env.MONGODB_CONNECT_URL;
let client = new MongoClient(uri, { useNewUrlParser: true });
client.connect(err => {
assert.strictEqual(null, err);
console.log('Connected Successfully to MongoDB!');
db.client = client.db("cosmosdb");
db.collection = client.db('cosmosdb').collection('cosmos');
console.log("Database Values: ", db) // This actually returns values
return db;
});
console.log('Database: ', db); // Not returning values
app.set('port', process.env.PORT || 3000);
let server = app.listen(app.get('port'), () => {
console.log(`Express server listening on port: `, server.address().port)
});
server.db = db;
When I console.log db I am expecting to see
Database: {
client: // values
collection: // values
}
yet this is what I am getting back
Database: {}
EDITED
Is your uri assigned like below? (mongodb+srv)
let uri = `mongodb+srv://${dbUser}:${dbPwd}#${dbHost}/test?retryWrites=true`;
let client = new MongoClient(uri, { useNewUrlParser: true });
There is a parameter you are missing on the connect() call, you have "err", but it should be (err, client). So for me it looks as follows:
var db = {};
var MongoClient = require('mongodb').MongoClient;
//Use connect method to connect to the Server
MongoClient.connect(process.env.MONGODB_CONNECT_URL, { useNewUrlParser: true }, function (err, client) {
assert.equal(null, err);
db.client = client;
db.collection = client.db('newswatcherdb').collection('newswatcher');
console.log("Connected to MongoDB server");
});
After using my node applications for a couple of time I always get problems with too many MySQL queries. Currently I am using a MySQL Connection Pool which already increased the stability but isn't a final solution.
Is there a better way to connect to the MySQL database and close the connection directly after the query?
General "MySQL Connection":
const mysql = require('mysql');
const config = require('config');
const dbConfig = config.get('Azure.dbConfig');
console.log(`used mysql information: ${dbConfig}`);
const con = mysql.createPool(dbConfig
);
con.on('enqueue', () => {
console.log('connection enqued')
});
con.on('connection', function (connection) {
connection.query('SET SESSION auto_increment_increment=1')
});
con.on('release', function (connection) {
console.log('Connection %d released', connection.threadId);
});
module.exports = con;
Example for a MySQL query:
const con = require('../db.js');
const bodyParser = require('body-parser');
module.exports = function (app) {
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({
extended: true
}));
app.get('/dbCompareName', function (req, res) {
const ingredients = [];
con.query('SELECT wm_wine.name, wm_wine.avocandoIngredient, wm_wine.jahrgang, wm_wine.sorte, wm_cart.anzahl, wm_wine.typ from wm_wine INNER JOIN wm_cart on wm_cart.id = wm_wine.id', function (err, result, info) {
if (err) {
console.log('while querying the mysql database, an error occurred');
console.log(err);
if (err.fatal === true) {
con.connect(err => {
if (err) throw err;
logger.info('reconnected to mysql database');
});
};
}
else {
const transfer = result;
};
};
I am new to node.js and am trying to learn how to connect to mysql database from ejs file. I tried to search for sample code however the code is not working. Can someone please check it out for me. Thank you.
function loaddata() {
var sql = require("mysql");
var con = mysql.createConnection({});
con.connect(function (err) {
if (err) {
console.log('Error connecting to Db');
return;
}
console.log('Connection established');
});
con.query('update students set name="sus" where email="smn14#mail.aub.edu"', function (err, rows) {
if (err) throw err;
console.log('Data received from Db:\n');
console.log(rows);
});
con.end(function (err) {
// The connection is terminated gracefully
// Ensures all previously enqueued queries are still
// before sending a COM_QUIT packet to the MySQL server.
});
}
The create connect is worst.
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'me',
password : 'secret',
database : 'my_db'
});
connection.connect();
connection.query('SELECT 1 + 1 AS solution', function(err, rows,
fields) {
if (err) throw err;
console.log('The solution is: ', rows[0].solution); });
connection.end();
From this example, you can learn the following:
Every method you invoke on a connection is queued and executed in sequence.
Closing the connection is done using end() which makes sure all remaining queries are executed before sending a quit packet to the
mysql server.
Docs
I now understand the process of server/clients. Makes sense, otherwise you would be able to see the database passwords stored in Client.js. :-)
But, there is one way that works for me. The client call a javascript-function and send a message to the server. The server receives this message and starts a database query. Send the result to all clients via socket.io
At the client in the file.ejs
<script type='text/javascript'>
let socket = io.connect();
function getSql(userId) {
socket.emit('start-new-sql-querie',{
userId: userId
});
}
socket.on('new-sql-result', function (data){ // listen for the new sql result
console.log(data.userStatus); // foo something with the new data
})
</script>
<button onclick="getSql(1)">Test sql query</button>
database connection.js at server side
const connection = {
connectionLimit: 10,
host: "localhost",
user: "Abc",
password: "1234",
database: "d001",
multipleStatements: true
};
module.exports = connection;
yourapp.js at server side
const express = require('express');
const port = process.env.PORT || 1234;
const app = express();
const server = require('http').createServer(app);
const mysql = require('mysql2');
const config = require('./routes/connection'); // SQL-Connection
const pool = mysql.createPool(config);
let io = require('socket.io')(server);
io.sockets.on('connection', function(socket) {
socket.on('start-new-sql-querie', function(data) { // listen from the clients
let user_id = data.userId;
sql_test.getConnection((error, connection) => { // Connect to sql database
console.log("user_id: ", user_id)
connection.query(`SELECT * FROM user WHERE id='${user_id}'`, (err, result) => {
socket.emit('new-sql-result',{ // send sql result-status to all clients
userStatus: result.result[0].status
})
})
connection.release();
})
});
})
When i run my code i get an error
What i'm trying to do is when someone logs on to my site it logs the IP and other data into a database. it seems to work but then i get this error and it exits out of my app
{ [Error: Trying to open unclosed connection.] state: 1 }
Connection to database has been established
/home/azura/node_modules/mongoose/lib/index.js:343
throw new mongoose.Error.OverwriteModelError(name);
^
OverwriteModelError: Cannot overwrite `dataType` model once compiled.
at Mongoose.model (/home/azura/node_modules/mongoose/lib/index.js:343:13)
at Namespace.<anonymous> (/home/azura/Desktop/dbWrite.js:19:37)
at Namespace.EventEmitter.emit (events.js:95:17)
at Namespace.emit (/home/azura/node_modules/socket.io/lib/namespace.js:205:10)
at /home/azura/node_modules/socket.io/lib/namespace.js:172:14
at process._tickCallback (node.js:415:13)
The code that im using is:
var mongoose = require("mongoose");
var express = require("express");
var app = express();
var http = require("http").Server(app);
var io = require("socket.io")(http);
app.get("/", function (req, res) {
res.sendFile(__dirname + "/index.html");
});
io.on("connection", function (socket) {
var ip = socket.request.socket.remoteAddress;
var dataBase = mongoose.connection;
mongoose.connect("mongodb://localhost:27017/NEW_DB1");
dataBase.on("error", console.error);
console.log("Connection to database has been established");
var collectedData = new mongoose.Schema({
ipAddress: String,
time: Number
});
var collectionOfData = mongoose.model("dataType", collectedData);
var Maindata = new collectionOfData({
ipAddress: ip,
time: 100000000000000000
});
Maindata.save(function (err, Maindata) {
if (err) {
return console.error(err);
} else {
console.dir(Maindata);
}
});
});
http.listen(10203, function () {
console.log("Server is up");
});
the index.html file has nothing important on it.
I'm just wondering why i'm getting this error.
what can i do to fix it?
Put this code out of connection scope. No Need to create Schema every type there is new connection event.
mongoose.connect("mongodb://localhost:27017/NEW_DB1");
dataBase.on("error", console.error);
console.log("Connection to database has been established");
var collectedData = new mongoose.Schema({
ipAddress: String,
time: Number
});
var collectionOfData = mongoose.model("dataType", collectedData);
io.on("connection", function (socket) {
var ip = socket.request.socket.remoteAddress;
var dataBase = mongoose.connection;
var Maindata = new collectionOfData({
ipAddress: ip,
time: 100000000000000000
});
Maindata.save(function (err, Maindata) {
if (err) {
return console.error(err);
} else {
console.dir(Maindata);
}
});
});
every time a connection come in then the "connection" event will be emit,so
mongoose.connect("mongodb://localhost:27017/NEW_DB1");
will execute manny times,this cause the error.
I have an iOS app which is sending a JSON packet to a webserver. The webserver code looks like this:
var express = require('express');
var bodyParser = require('body-parser');
var mongoose = require('mongoose');
var app = express();
mongoose.connect('mongodb://localhost/test');
var db = mongoose.connection;
db.on('error', console.error.bind(console, 'connection error:'));
db.once('open', function (callback) {
console.log("MongoDB connection is open.");
});
// Mongoose Schema definition
var Schema = mongoose.Schema;
var LocationSchema = new Schema({
X: Number,
Y: Number,
Orientation: Number,
UserID: String,
Time: String
});
// Mongoose Model definition
var LocationsCollection = mongoose.model('locations', LocationSchema);
// create application/json parser
var jsonParser = bodyParser.json();
// URL management
app.get('/', function (req, res) {
res.sendFile(__dirname + '/index.html');
});
app.post('/update', jsonParser, function (req, res) {
if (!req.body) return res.sendStatus(400);
else {
console.log(req.body);
}
});
// Start the server
var server = app.listen(3000, function () {
var host = server.address().address
var port = server.address().port
console.log('App listening at %s:%s',host, port)
});
The key part is the app.post method which processes the incoming http request being sent from my iOS app. At the moment, the method which prints the req.body to the console looks like this:
{
datapoint_1:
{ timestamp: '2015-02-06T13:02:40:361Z',
x: 0.6164286615466197,
y: -0.6234909703424794,
id: 'B296DF8B-6489-420A-97B4-6F0F48052758',
orientation: 271.3345946652066 },
datapoint_2:
{ timestamp: '2015-02-06T13:02:40:961Z',
x: 0.6164286615466197,
y: -0.6234909703424794,
id: 'B296DF8B-6489-420A-97B4-6F0F48052758',
orientation: 273.6719055175781 }
}
So, you can see the request is a nested JSON object. Ideally, I'd like to loop through the request objects (ie. the datapoints) and insert those into the mongoDB database (via mongoose). However, I can't seem to figure out how to do much of anything with the req.body. I can't seem to create a loop to iterate through the request or how to properly parse the nested JSON file so it matches the mongoose schema. Can anyone provide some guidance on how to insert these datapoints into the mongoose database?
Set body-parser's extended property to true to allow parsing nested objects.
var express = require('express');
var app = express()
var bodyParser = require('body-parser');
app.use(bodyParser.urlencoded({
extended: true
}));
Answering my own question. But, after figuring out how to access the key/value pairs inside the nested JSON object... it became relatively easy to figure out the rest. The updated app.post function now looks like this:
app.post('/update', jsonParser, function (req, res) {
if (!req.body) return res.sendStatus(400);
else {
for(var datapoint in req.body){
//create new instance of LocationCollection document
var point = new LocationsCollection({
X:Number(req.body[datapoint]["x"]),
Y:Number(req.body[datapoint]["y"]),
Orientation:Number(req.body[datapoint]["orientation"]),
Time:req.body[datapoint]["timestamp"],
UserID:req.body[datapoint]["id"]
});
//insert the newly constructed document into the database
point.save(function(err, point){
if(err) return console.error(err);
else console.dir(point);
});
}
}
});
I can test if this worked by putting the following method inside the callback function once the mongodb connection is first established:
//Find all location points and print to the console.
console.log("Searching for all documents in Location Points Collection");
LocationsCollection.find(function(err,data){
if(err) console.error(err);
else console.dir(data);
});
This will print any documents that have been previously added to the database. Hopefully this helps.
Try somthing like this.
var app = express();
var bodyParser = require('body-parser');
app.use(bodyParser.json({limit:1024*1024, verify: function(req, res, buf){
try {
JSON.parse(buf);
} catch(e) {
res.send({
error: 'BROKEN_JSON'
});
}
}}));
It should be a simple for (var key in obj) loop:
app.post('/update', jsonParser, function (req, res) {
var locationObject = req.body(),
insertObjects = [],
key;
for (key in locationObject) { // loop through each object and insert them into our array of object to insert.
insertObjects.push(locationObject[key]);
}
if (!insertObjects.length) { // if we don't have any object to insert we still return a 200, we just don't insert anything.
return res.status(200).send({
success: true,
message: 'Nothing inserted, 0 locations in POST body',
count: 0;
});
}
LocationsCollection.create(insertObjects, function (err, res) {
if (err) {
return res.status(400).send({
success: false,
message: err.message
});
}
// we have successfully inserted our objects. let's tell the client.
res.status(200).send({
success: true,
message: 'successfully inserted locations',
count: insertObjects.length;
});
});
});
Mongo allows for inserting multiple documents with a single callback, which makes this a lot easier.
This also checks the schema to ensure only proper documents are created.