Is it okay to include my connection in all request in NodeJS? - javascript

I have 3 files. db.js, app.js, commentController.js.
I am including my connection in every request in my app so that I wont be repeating the code connection again and again. Is this a bad / unsecure practice? Is there a better/proper way to implement this?
db.js
const mysql = require('mysql');
const pool = mysql.createPool({
host : 'host',
user : 'user',
password : 'password',
database : 'dbname'
});
exports.pool = pool;
app.js
const db = require('./db');
app.use((req, res, next) => {
req.pool = db.pool;
next();
});
commentController.js
exports.showComments = (req, res) => {
req.pool.getConnection((err, conn) => {
conn.query(`SELECT * FROM comments`, (err, results, fields) => {
conn.release();
if (err) throw err;
res.render('comments', { results });
});
});
};

If your only reason for doing this is to avoid duplicating code, then I think it's a bad idea. People looking at your code (or you looking at your code in a year) aren't going to naturally expect a db connection to be a property of req. And you aren't saving yourself any trouble really.
Just require() the database pool in the file and use it.
commentController.js
const db = require('./db');
require() will return the same pool to all your modules.
It's also not clear why you are requesting a connection rather than using the pool (I'm making some assumptions about the lib you're using).
Normally you should be able to do:
const db = require('./db');
exports.showComments = (req, res) => {
db.query(`SELECT * FROM comments`, (error, results, fields) => {
if (err) throw err;
res.render('comments', { results });
});
});
This saves the trouble of requesting and returning connections and just lets the pool do it's work.

Related

How to create a loop for SELECT function, to keep data updated?

I'm using an Oracle database, and every time it updates, the server doesn't understand this update and needs me to drop it for it to update the data.
const express = require('express');
const oracledb = require('oracledb');
const app = express();
var cors = require('cors')
app.use (cors())
app.engine('html', require('ejs').renderFile);
app.set('view engine', 'html');
// Connection details for the Oracle database
const connectionString = 'dbprod';
const user = 'sapiensproducao';
const password = 'fabrica';
// Connect to the database
oracledb.getConnection(
{
connectionString: connectionString,
user: user,
password: password
},
function(err, connection) {
if (err) {
console.error(err.message);
return;
}
console.log('Connection was successful!');
// Execute a SQL query
const query = 'SELECT CODEMP,CODORI,NUMORP,SEQEOQ,DATREA,HORREA,CODPRO,CODDER,QTDRE1,QTDRFG,CODLOT,OBSEOQ from USU_VPROEXT ORDER BY DATREA DESC, HORREA DESC';
connection.execute(query, [], (err, result) => {
if (err) {
console.error(err.message);
return;
}
console.log('Query was successful!');
console.log()
// Render the HTML template and pass the query results as a local variable
app.get('/teste', (req, res) => {
res.json(result.rows)
});
});
}
);
app.listen(3000, () => {
console.log('Server is listening on port 3000');
});
I thought of creating a loop for this SELECT function, but how can I create it?
How can I keep running this select in a loop, to keep the data always updated?
In the structure of your web server, you only ever query the database once and then create an endpoint to serve that data. Instead, create an endpoint which queries the data whenever it's invoked. Which may look more like this:
// define the endpoint
app.get('/teste', (req, res) => {
// within the endpoint, query the database
oracledb.getConnection(
{
connectionString: connectionString,
user: user,
password: password
},
function(err, connection) {
if (err) {
console.error(err.message);
// DON'T DO THIS, return an actual response to the user
return;
}
console.log('Connection was successful!');
const query = 'SELECT CODEMP,CODORI,NUMORP,SEQEOQ,DATREA,HORREA,CODPRO,CODDER,QTDRE1,QTDRFG,CODLOT,OBSEOQ from USU_VPROEXT ORDER BY DATREA DESC, HORREA DESC';
connection.execute(query, [], (err, result) => {
if (err) {
console.error(err.message);
// DON'T DO THIS, return an actual response to the user
return;
}
console.log('Query was successful!');
console.log();
// return the results to the user
res.json(result.rows);
});
});
});
The key difference here is that instead of wrapping the endpoint in the query, you wrap the query in the endpoint. So every time the endpoint is invoked it re-queries the database.
Please also note the comments for your error handling. If you just return; from the function and never return a response to the client, the client will just hang until it times out. Return an actual response, which can include error codes, messages, anything you like. Even just res.json(false); would be better than no response at all.

SQL data is not loading

Does anyone know why my request just gets stuck loading when trying to access my database ?
My database name is test. If set the database: books or something like that for example. Then it returns the error database is unknown: books so I assume that my password is correct it just isn't finding the test data base ?
// To import these packages remember to add "type":"module" to package Json
import express from "express";
import mysql from "mysql";
const app = express();
const db = mysql.createConnection({
host: "localhost",
user: "root",
password: "keks000207",
database: "test",
});
// This is an API request with an Express server
app.get("/", (req, res) => {
res.json("Hello this is the backend");
});
app.get("/books", (req, res) => {
const q = "SELECT * FROM books";
db.query(q, (err, data) => {
if (err) return res.json(err);
return data;
});
});
app.listen(8800, () => {
console.log("Connected to backend!");
});
Try db.connect() or similar method available in the file itself.
And Instead of return data inside the callback of db.query, you should use res.send(data), then you will get the response in the GET /books API.

Using a variable as a query in axios

I'm attempting to use a variable userEmail (which is assigned the user's email who is currently logged into my app) as a query for my api as I only want to search and return documents from my mongodb that relate to that specific email. I'm struggling to figure out how to pass this variable from my reactjs app to my api and utilising it, I don't even know if it is possible so any help would be great.
I'm using axios to make my GET request.
api.js contains my api helper functions
export async function surveyApiGet () {
let response = await axios.get('http://localhost:6000/'); // dateApi.js
return response.data;
}
dateApi.js is the api surveyApiGet() calls and at the moment my query is just regex that matches any emails, rahter than filtering to just the current user.
let MongoClient = require('mongodb').MongoClient;
var url = "mongodb://127.0.0.1:27017/";
const express = require('express');
const app = express();
app.use(express.json());
app.get('/', function (req, res) {
MongoClient.connect(url, function(err, db) {
if (err) throw err;
var dbo = db.db("manderleydb");
var query = { userId : /^(.)/ };
dbo.collection("manderleySurveyCompleted").find(query).sort({ date: 1}).limit(1)
.toArray(function(err, result) {
if (err) throw err;
console.log(result);
db.close();
res.status(200).json(JSON.stringify(result));
});
});
});
app.listen(6000, () => {
console.log('listening on port 6000');
});
Landing.js is where I make the call to my api helper function
surveyApiGet()
.then(response => {
// code
})
.catch(err => {
err.message;
});
For this, you can do a POST request to your Nodejs API and you can include the user's email with that request. To do so what you have to do is something like the following.
const usersEmail = "example#email.com"
Then through Axios, you can post this code to Nodejs API as follows.
const data = await axios.post("http://localhost:6000/", {
usersEmail : usersEmail
});
After you send this to Nodejs you can hold the value of the user's email through the request's body as follows. Change your endpoint to post instead of get.
app.post("/", function (req, res) {
const userEmail = req.body.userEmail;
//Put your MongoDB query and the status code here.
})
Drop a comment if you have any unclear points.

How to organize communication with database in Node.Js?

I have a problem to design communication with MySQL database in my Nodejs's app.
The biggest problem is that queries are async, so it becomes complicated to design my projects. For example, I have excercises.js
EXCERCISES.JS
var express = require('express');
var database = require('../database/database.js');
var router = express.Router();
console.log(database.db(saveDbData))
/* GET users listing. */
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: #DATABASE RESULT});
});
module.exports = router;
In need to write query's result in ex field.
Then I write a module to handle mysql connection
DATABASE.JS
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
var results;
var db = function(){
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
res = results;
})
return res;
}
module.exports = {
db: db,
}
Obviously, It doesn't work because pool.query is async.
The only alternative that I've found on the web is something like this:
EXERCISES.JS
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: result[0].name});
});
})
But in this way, mysql parts and routing/render parts are mixed. Is it still a well-designed solution? Are there more elegant solutions?
EDIT:
I have modified the files and I have used Promise like this
EXERCISES.JS
var express = require('express');
var database = require('../database/database.js');
var router = express.Router();
var data = database.db()
.then(
function(data){
console.log("Resolved");
/* GET users listing. */
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: data[0].name});
});
})
.catch(
error => console.error(error));
module.exports = router;
DATABASE.JS
ar mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
var res;
var db = function(){
return new Promise(function(resolve, reject){
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) reject(error);
resolve(results)
})
})
}
module.exports = {
db: db,
}
And it works, but I don't think it is the best solution. For example, what if I want to get data for render from more queries?
I'm new in these technologies so I'm not able to figure out the best way to integrate database and html pages's rendering.
Have you ever wondered why all web frameworks in node requires you to return responses using the res object instead of just return? It's because all web frameworks expect that you need to do something asynchronous.
Consider a web framework design similar to Laravel (PHP) or Spring Framework (Java):
// Theoretical synchronous framework API:
app.get('/path', function (request) {
return "<html><body> Hello World </body></html>";
});
Then if you need to do anything async you will face the issue that the data you're fetching hasn't returned by the time you need to return the HTTP request:
// Theoretical synchronous framework API:
app.get('/path', function (request) {
return ??? // OH NO! I need to return now!!
});
It is for this reason that web frameworks in javascript don't act on return values. Instead it passes you a callback to call when you are done:
// Express.js
app.get('/path', function (request, response) {
doSomethingAsync((err, result) => {
response.send(result);
});
});
So for your code you just need to do:
router.get('/', function(req, res) {
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
res.render('exercises',{title: 'Exercises', ex: result[0].name});
});
});
Exporting the database
Exporting the database is as simple as exporting pool:
db.js
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
module.exports = {
pool: pool
}
exercises.js
let db = require('./db');
// you can now use db.pool in the rest of your code
// ..
Reusing your queries
Instead of coding SELECT statements in your controllers (routes) you can (and should) code them in your db module(s):
db.js
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
function getExerciseNames (callback) {
pool.query('SELECT name FROM Exercises',callback);
}
module.exports = {
pool: pool
}
Then in your controller logic you just need to do:
router.get('/', function(req, res) {
db.getExerciseNames(function(error, results, fields){
if (error) throw error;
res.render('exercises',{
title: 'Exercises',
ex: result[0].name
});
});
});
Caching
If your intention is to query the db only once to cache the value of Exercises then don't invert the Express routing flow. Instead implement the caching at your db layer:
db.js:
var exerciseNamesCache = [];
var exerciseNamesFields = [];
function getExerciseNames (callback) {
if (exerciseNamesCache.length > 0 && exerciseNamesFields.length > 0) {
callback(null, exerciseNamesCache, exerciseNamesFields);
}
pool.query('SELECT name FROM Exercises',function(error, results, fields){
if (!error) {
exerciseNamesCache = results;
exerciseNamesFields = fields;
}
callback(error, results, fields);
});
}
Promises
Promises is a design pattern for handling callbacks. It is comparable to Java's Futures (CompletionStage etc.) only a lot more lightweight. If an API you are using returns a promise instead of accepting a callback then you need to call res.render() inside the promise's .then() method:
router.get('/', function(req, res, next) {
doSomethingAsync()
.then(function(result){
res.send(result);
})
.catch(next); // remember to pass on asynchronous errors to next()
});
If the API you're using accepts a callback then weather or not you wrap it in a promise is more a matter of taste. I personally wouldn't do it unless you are also using another API that returns a promise.
async/await
One advantage of promises is that you can use them with await. Express specifically works well with async/await. Just remember you can only use await inside a function marked with async:
router.get('/', async function(req, res, next) {
let result = await doSomethingAsync();
res.send(result);
});
Multiple asynchronous operations
Fetching multiple asynchronous data can be as simple as:
router.get('/', function(req, res, next) {
doSomethingAsync(function(result1){
doSomethingElse(function(result2) {
res.json([result1, result2]);
});
});
});
With promises that would be:
router.get('/', function(req, res, next) {
doSomethingAsync()
.then(function(result1){
return doSomethingElse()
.then(function(result2) {
return [result1, result2];
});
})
.then(function(results){
res.json(results);
})
.catch(next);
});
But both the above code perform the requests sequentially (get result1 then get result2). If you want to fetch both data in parallel you can do this with Promises:
router.get('/', function(req, res, next) {
Promise.all([
doSomethingAsync(), // fetch in parallel
doSomethingElse()
])
.then(function(results){
res.json(results);
});
})
With callbacks it's a little bit more complicated. There is a design pattern you can use and someone has actually implemented it as a library called async.js but often the easiest solution is to wrap them in Promises and use Promise.all(). Still, do check out async.js since it has functionality useful for things like batching requests, perform async operations while a condition is true etc. (the promise based counterpart of that library is async-q)
You can use npm modules for achieving the async task with MySQL.
I recommended to choose sequilize or jm-ez-mysql. If you go with jm-ez-mysql then your code structure like
server.js
require('./config/database.js');
./config/database.js
const sql = require('jm-ez-mysql');
// Init DB Connection
const connection = My.init({
host: process.env.DBHOST,
user: process.env.DBUSER,
password: process.env.DBPASSWORD,
database: process.env.DATABASE,
dateStrings: true,
charset: 'utf8mb4',
timezone: 'utc',
multipleStatements: true,
});
module.exports = {
connection,
};
After that, you can use MySQL asynchronously.
./exercise.js
const sql = require('jm-ez-mysql');
const exerciseUtil = {};
exerciseUtil.searchUserById = async (id) => {
try {
// table name, db fields, condition, values
const result = await sql.first('users', ['id, name, email'], 'id = ?', [id]);
return result; // Which return your result in object {}
} catch (err) {
console.log(err);
throw err;
}
};
module.exports = exerciseUtil;
I hope it helps.
Happy Coding :)

In a Node web app, do you open one MongoDB connection for each HTTP request?

I'm adding MongoDB to my Express.js Node web app. This is what I got so far:
// in app.js
var mongodb = require('mongodb');
var mongourl = /* … */;
// These are just examples:
app.get('/write', function (req, res) {
mongodb.connect(mongourl, function (err, db) {
db.collection('Users', function (err, coll) {
coll.insert(/* stuff */, function (err) {
res.send(200, 'Done.');
});
});
});
});
app.get('/read', function (req, res) {
mongodb.connect(mongourl, function (err, db) {
db.collection('Users', function (err, coll) {
coll.find({}, function (err, cursor) {
cursor.toArray(function (err, items) {
res.send(200, items);
});
});
});
});
});
Assuming that I want to stick with the default mongodb driver (for now):
Is this pattern right? Do I have to open a new connection to the database in each of my different routes that perform database operations?
If the pattern is right, then how do I deal with the obvious code repetition going on here? Obviously, as it stands now, the code is not acceptable.
Use the new standard, MongoClient. It manages the pool for you, defaults to 5.
//require as a module to be used anywhere.
module.exports = {}
var MongoClient = require('mongodb').MongoClient;
var mongoURI = /* … */;
MongoClient.connect(mongoURI, function(err, db) {
if(err) throw err;
module.exports.users = db.collection('users');
console.log('Connected to Mongo!')
})
then
var db = require('./db.js')
//once connected
//db.users.find()... etc
check out:
http://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html
pooling details:
http://mongodb.github.io/node-mongodb-native/driver-articles/mongoclient.html#connection-pool-configuration
Do not close and reopen connection, you're just loosing resources :s

Categories