can't obtain mongoose model inside one service from another using senecajs - javascript

I have two seneca services running on port 3000 and 3001. I am trying to create a separate mongoose connection from service on port 3000 create mongoose model and obtain it in service running on port 3001
connection-service.js
require('dotenv').config();
console.log("HOST",process.env.DB_PORT);
const seneca = require('seneca');
const plugins=require('./seneca_plugins/plugins');
const service=seneca();
service.use(plugins.user,{
collection:{UserAccountsCollection:"users"},
db:'accounts'
});
service.listen({port:3000,pin:'service:db_connection'});
plugin code for connection-service.js
const UserSchema = require('../db/models/user/user');
module.exports = async function (options) {
const dbConn = await require('./utils/dbConnections')(process.env.DB_HOST, process.env.DB_PORT, options.db);
this.add('service:db_connection,usage:GetUserAccountsConnection', async function (args, done) {
const UserModel = await dbConn.model('UserModel', UserSchema, options.collection.userAccountsCollection);
console.log(UserModel) //works
//done(null,{somekey:"someRandom_string_or_object"}) //works
done(null, {model:UserModel}); //passes empty object i.e. null,{}
})
}
service.js running as client
const service= require('seneca')();
const plugin=require('./seneca_plugins/plugin');
service.client({port:3000,pin:'service:db_connection'});
service.use(plugin);
plugin code for client service
module.exports = function (options) {
this.act('service:db_connection,usage:GetUserAccountsConnection', function (msg, reply) {
console.log("I am acting")
let model =reply.model //reply is returned as {}
console.log(model); //prints undefined in console
})
this.add(..... //other code follows
}
however when I use done(null,{somekey:"someRandom_string_or_object"}) it works but does not works when I pass model created done(null,{model:UserModel})

You can't mix callbacks and async/await. Try https://www.npmjs.com/package/seneca-promisify if you'd like to use async await.

Related

Mongo DB problem - connections accumulation

I have a problem with the approach I use to connect to Mondo DB.
I use the following method:
import { Db, MongoClient } from "mongodb";
let cachedConnection: { client: MongoClient; db: Db } | null = null;
export async function connectToDatabase(mongoUri?: string, database?: string) {
if (!mongoUri) {
throw new Error(
"Please define the MONGO_URI environment variable inside .env.local"
);
}
if (!database) {
throw new Error(
"Please define the DATABASE environment variable inside .env.local"
);
}
if (cachedConnection) return cachedConnection;
cachedConnection = await MongoClient.connect(mongoUri, {
useNewUrlParser: true,
useUnifiedTopology: true,
}).then((client) => ({
client,
db: client.db(database),
}));
return cachedConnection!;
}
Everytime I need to connect to MongoDB I do as follows:
const { db } = await connectToDatabase(config.URI, config.USERS_DATABASE);
const myUniversity = await db
.collection(config.MY_COLLECTION)
.findOne({})
Everything seems ok, so what is the problem?
The problem is that the connections to my DB don't close after I use them. In fact I thought that my server is stateless so after every time i use my DB, the connections end. But it is not true! They stay alive, and after few hours of using my app mongo atlas sends me an email saying that the limit is exceeded.
As you can see in this screenshot, this chart is ever growing. That means that connections stay on and they accumulate. How do you think I can solve this problem?
Keep in mind that it uses cachedConnection only if I use the same connection. If I call a different API from the first one it creates another connection and it doesn't enter in if (cachedConnection) block, but it goes forward till the end.
You can try this simple demo which will allow you to use the same connection throughout the application in different modules. There are three modules: the index.js is the starter program, the dbaccess.js is where you have code to create and maintain a connection which can be used again and again, and a apis.js module where you use the database connection to retrieve data.
index.js:
const express = require('express');
const mongo = require('./dbaccess');
const apis = require('./apis');
const app = express();
const init = async () => {
await mongo.connect();
app.listen(3000);
apis(app, mongo);
};
init();
dbaccess.js:
const { MongoClient } = require('mongodb');
class Mongo {
constructor() {
this.client = new MongoClient("mongodb://127.0.0.1:27017/", {
useNewUrlParser: true,
useUnifiedTopology: true
});
}
async connect() {
await this.client.connect();
console.log('Connected to MongoDB server.');
this.db = this.client.db('test');
console.log('Database:', this.db.databaseName);
}
}
module.exports = new Mongo();
apis.js:
module.exports = function(app, mongo) {
app.get('/', function(req, res) {
mongo.db.collection('users').find().limit(1).toArray(function(err, result) {
res.send('Doc: ' + JSON.stringify(result));
});
});
}
Change the appropriate values in the url, database name and collection name before trying.

How do I solve a problem with my DB connection and require?

I have an Express app that was created with express generator. I have a standard app.js file that exports app. I also have a standard www file that imports app and is a starting point of the application:
const app = require('../app')
const debug = require('debug')('img-final:server')
const http = require('http')
const Mongo = require('../utils/dbConnection/dbConnection')
const port = normalizePort(process.env.PORT || '3000')
app.set('port', port)
/**
* Create HTTP server.
*/
const server = http.createServer(app)
/**
* Listen on provided port, on all network interfaces.
*/
async function startServer() {
try {
await Mongo.init()
debug('Connected correctly to DB')
server.listen(port)
} catch (err) {
debug(err)
}
}
startServer()
//some more unrelated code.
I also have a utility file for connecting to db dbConnection.js:
const MongoClient = require('mongodb').MongoClient
class Mongo {
async init() {
const client = new MongoClient(`mongodb://localhost:27017/img-new`, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
await client.connect()
this.db = client.db('img-new')
}
getConnection() {
return this.db
}
}
module.exports = new Mongo()
My problem is that when I start my app const app = require('../app') is obviously running first, and wherever in my app route controllers I use getConnection(), the connection is undefined at that point because my Mongo.init() is running after const app = require('../app').
I'm trying to understand how to solve it in sane way. I guess I can move all require's and all other code inside startServer after await Mongo.init() , but it seems like there should be a better solution. Thank you.
Edit:
Is this an OK solution ?
const debug = require('debug')('img-final:server')
const http = require('http')
const Mongo = require('../utils/dbConnection/dbConnection')
async function startServer() {
try {
await Mongo.init()
const app = require('../app')
const port = normalizePort(process.env.PORT || '3000')
app.set('port', port)
const server = http.createServer(app)
server.listen(port)
} catch (err) {
debug(err)
}
}
startServer()
I have 1 solution but I'm not sure it satisfies your expectation.
In the getConenction method you check if this.db is undefined. If it's a case, just call init() method then return this.db. If not, you return this.db directly.
The code is like this :
async getConnection() {
if(!this.db) {
// connection to db is not established yet, we call `init` method
await this.init();
}
// this.db is defined here, we return the connection
return this.db;
}
And you don't have to call await Mongo.init() in the startServer() function
The previous answer by Đăng Khoa Đinh is the right direction. I add a bit of defensive coding to prevent multiple this.init() being called at the same time. Note: I did not code against errors while connecting.
const MongoClient = require('mongodb').MongoClient
class Mongo {
init() {
// Gerard we set isConnected to a promise
this.isConnected = new Promise(async (resolve, reject) => {
const client = new MongoClient(`mongodb://localhost:27017/img-new`, {
useNewUrlParser: true,
useUnifiedTopology: true,
})
await client.connect()
this.db = client.db('img-new')
resolve();
});
}
isConnected = null;
async getConnection() {
if(this.isConnected === null) {
// connection to db is not established yet, we call `init` method
this.init();
}
// Now either the DB is already connected, or a connection is in progress. We wait.
await this.isConnected;
// this.db is defined here, we return the connection
return this.db;
}
}
module.exports = new Mongo()
The caller will then just do
connection = await Mongo.getConnection();

Node JS mssql exporting database connection

I have hard time understanding why my code doesn't work. I am using node package mssql and want to have database pool connection initiation in separate file:
databaseConnection.js:
const sql = require("mssql/msnodesqlv8");
config = {
database: process.env.DB_NAME,
server: process.env.DB_SERVER,
driver: "msnodesqlv8",
options: {
trustedConnection: true
}
};
let pool = sql.connect(config);
module.exports = pool;
Then I have my express route file data.js
const express = require("express");
const router = express.Router();
const db = require("../configs/databaseConnection");
router.get("/dataList", async (req, res) => {
let allData = await db.request().query("select * from dataList");
console.log(allData);
res.render("dataList", { title: "Data list" });
});
module.exports = router;
However, when I start the server and go to the route I get error:
(node:13760) UnhandledPromiseRejectionWarning: TypeError: db.request is not a function
The thing is if I setup precisely as this example mssql documentation (where verything would be done in the route) it works. However, if database connection is in separate file it doesn't work.
I would appreciate any help understanding this
Regards,
Rokas
sql.connect returns a promise, so once we know that, we can either do a .then(result => ... or use await, for example:
If you want to store the db object at startup for later I'd suggest changing the line:
const db = require("../configs/databaseConnection");
to
let db = null;
require("../configs/databaseConnection").then(pool => {
db = pool;
});

Node.js Async/Await module export

I'm kinda new to module creation and was wondering about module.exports and waiting for async functions (like a mongo connect function for example) to complete and exporting the result. The variables get properly defined using async/await in the module, but when trying to log them by requiring the module, they show up as undefined. If someone could point me in the right direction, that'd be great. Here's the code I've got so far:
// module.js
const MongoClient = require('mongodb').MongoClient
const mongo_host = '127.0.0.1'
const mongo_db = 'test'
const mongo_port = '27017';
(async module => {
var client, db
var url = `mongodb://${mongo_host}:${mongo_port}/${mongo_db}`
try {
// Use connect method to connect to the Server
client = await MongoClient.connect(url, {
useNewUrlParser: true
})
db = client.db(mongo_db)
} catch (err) {
console.error(err)
} finally {
// Exporting mongo just to test things
console.log(client) // Just to test things I tried logging the client here and it works. It doesn't show 'undefined' like test.js does when trying to console.log it from there
module.exports = {
client,
db
}
}
})(module)
And here's the js that requires the module
// test.js
const {client} = require('./module')
console.log(client) // Logs 'undefined'
I'm fairly familiar with js and am still actively learning and looking into things like async/await and like features, but yeah... I can't really figure that one out
You have to export synchronously, so its impossible to export client and db directly. However you could export a Promise that resolves to client and db:
module.exports = (async function() {
const client = await MongoClient.connect(url, {
useNewUrlParser: true
});
const db = client.db(mongo_db);
return { client, db };
})();
So then you can import it as:
const {client, db} = await require("yourmodule");
(that has to be in an async function itself)
PS: console.error(err) is not a proper error handler, if you cant handle the error just crash
the solution provided above by #Jonas Wilms is working but requires to call requires in an async function each time we want to reuse the connection. an alternative way is to use a callback function to return the mongoDB client object.
mongo.js:
const MongoClient = require('mongodb').MongoClient;
const uri = "mongodb+srv://<user>:<pwd>#<host and port>?retryWrites=true";
const mongoClient = async function(cb) {
const client = await MongoClient.connect(uri, {
useNewUrlParser: true
});
cb(client);
};
module.exports = {mongoClient}
then we can use mongoClient method in a diffrent file(express route or any other js file).
app.js:
var client;
const mongo = require('path to mongo.js');
mongo.mongoClient((connection) => {
client = connection;
});
//declare express app and listen....
//simple post reuest to store a student..
app.post('/', async (req, res, next) => {
const newStudent = {
name: req.body.name,
description: req.body.description,
studentId: req.body.studetId,
image: req.body.image
};
try
{
await client.db('university').collection('students').insertOne({newStudent});
}
catch(err)
{
console.log(err);
return res.status(500).json({ error: err});
}
return res.status(201).json({ message: 'Student added'});
};

Mongodb + Node: When to close

I am working on a Koa + Mongodb backend. My question is: When should I close the db, or does Mongodb manage that because I am not closing any of them right now and it seems fine.
// app.js
const Koa = require('koa')
const database = require('./database')
const app = new Koa()
database
.connet()
.then(() => {app.listen(':8080')})
.catch((err) => {console.error(err)})
// ./database.js
const MongoClient = require('mongodb').MongoClient
const Model = require('./model')
class Database {
async connect() {
if (!db) {
db = await MongoClient.connect("localhost:27017")
this.item = new Model(db, 'item_collection')
}
}
}
module.exports = new Database()
// ./model.js
class Model {
constructor(db, collectionName) {
this.name = collectionName
this.database = database
}
async findAll() {
const result = await this.db.collection(this.name).find().toArray()
if (!result) {
throw new Error('error')
}
return result
}
}
module.exports = Model
I also ran a stress test using vegeta to make API request to the server at 100 request / second and the response time is good. So, am I worried about premature optimization here? If not, when should I close the db?
As Koa keeps running (and in your case listening on port 8080) you should not close the db connection.
If you are running scripts that are expected to end (tasks running on cron, etc) you should manually close the connection when you are finished with all of your db tasks.
You can take a look at this example for express.js (Koa's sister framework)

Categories