I want to connect to different databases on server side so I can perform queries that include those two databases using node.
I have a config.js like this:
module.exports = {
database: {
user: 'brunojs',
password: 'bdpf5',
connectString: 'localhost:1521/orcl'
},
jwtSecretKey: "jmvhDdDBMvqb=M#6h&QVA7x"
};
This saves my info for accessing the first database.
Then I have one list.js file which performs the query:
var oracledb = require('oracledb');
var jwt = require('jsonwebtoken');
var config = require(__dirname + '../../config.js');
function get(req, res, next) {
oracledb.getConnection(
config.database,
function(err, connection){
if (err) {
return next(err);
}
connection.execute(
'select num_sequencial, notes, des_especialidade, dt_diag ' +
'from organite_repository ',
{},//no binds
{
outFormat: oracledb.OBJECT
},
function(err, results){
if (err) {
connection.release(function(err) {
if (err) {
console.error(err.message);
}
});
return next(err);
}
res.status(200).json(results.rows);
connection.release(function(err) {
if (err) {
console.error(err.message);
}
});
}
);
}
);
}
module.exports.get = get;
Everything works fine.
The thing is, right now, I want to perform queries using another database. How can I do that?
the right solution is to use pool https://github.com/oracle/node-oracledb/blob/master/doc/api.md#createpool
Creating massive pool:
module.exports = {
database: [{user: 'brunojs',
password: 'bdpf5',
connectString: 'localhost:1521/orcl',
poolAlias:'database1'
},
{user: 'brunojs',
password: 'bdpf5',
connectString: 'localhost2:1521/orcl',
poolAlias:'database2'
}],
jwtSecretKey: "jmvhDdDBMvqb=M#6h&QVA7x"
};
during initialization web-server, initialize the pools
const dbConfig = require('../config/database.js');
async function initialize() {
dbConfig.database.forEach(async function(item) {
const pool = await oracledb.createPool(item);
});
}
Then you can use the created pools when calling the connection procedure:
conn = await oracledb.getConnection('database1');
const execresult = await conn.execute(context.execsql, execbinds, context.opts);
First, add a second credentials object in your config.js
module.exports = {
database: {
user: 'brunojs',
password: 'bdpf5',
connectString: 'localhost:1521/orcl'
},
database2: {
user: 'user2',
password: 'password',
connectString: 'someotherhost:1521/orcl'
},
jwtSecretKey: "jmvhDdDBMvqb=M#6h&QVA7x"
};
then use one or other here:
oracledb.getConnection(
config.database, // you could change this line to config.database2
function(err, connection){
if (err) { ...
If you want to query one database, then another, you'd need to keep references to both connection objects (error checking omitted for brevity):
oracledb.GetConnection(
config.database,
function(err, connection1) {
oracledb.GetConnection(
config.database2,
function(err, connection2) {
// in this function you can use either connection object
connection1.execute(...);
connection2.execute(...);
}
});
This is slightly out of scope for your question, but you could also take a look at Waterline. It supports setting up multiple databases and then tying models to them, so that knowing where certain data models are stored is abstracted away.
you can always use links on the DB side, so your java code does not have to connect to another DB, for example:
select num_sequencial, notes, des_especialidade, dt_diag
from organite_repository#linkA
UNION
select num_sequencial, notes, des_especialidade, dt_diag
from organite_repository#linkB
/* ... */
Related
I'm using an Oracle database, and every time it updates, the server doesn't understand this update and needs me to drop it for it to update the data.
const express = require('express');
const oracledb = require('oracledb');
const app = express();
var cors = require('cors')
app.use (cors())
app.engine('html', require('ejs').renderFile);
app.set('view engine', 'html');
// Connection details for the Oracle database
const connectionString = 'dbprod';
const user = 'sapiensproducao';
const password = 'fabrica';
// Connect to the database
oracledb.getConnection(
{
connectionString: connectionString,
user: user,
password: password
},
function(err, connection) {
if (err) {
console.error(err.message);
return;
}
console.log('Connection was successful!');
// Execute a SQL query
const query = 'SELECT CODEMP,CODORI,NUMORP,SEQEOQ,DATREA,HORREA,CODPRO,CODDER,QTDRE1,QTDRFG,CODLOT,OBSEOQ from USU_VPROEXT ORDER BY DATREA DESC, HORREA DESC';
connection.execute(query, [], (err, result) => {
if (err) {
console.error(err.message);
return;
}
console.log('Query was successful!');
console.log()
// Render the HTML template and pass the query results as a local variable
app.get('/teste', (req, res) => {
res.json(result.rows)
});
});
}
);
app.listen(3000, () => {
console.log('Server is listening on port 3000');
});
I thought of creating a loop for this SELECT function, but how can I create it?
How can I keep running this select in a loop, to keep the data always updated?
In the structure of your web server, you only ever query the database once and then create an endpoint to serve that data. Instead, create an endpoint which queries the data whenever it's invoked. Which may look more like this:
// define the endpoint
app.get('/teste', (req, res) => {
// within the endpoint, query the database
oracledb.getConnection(
{
connectionString: connectionString,
user: user,
password: password
},
function(err, connection) {
if (err) {
console.error(err.message);
// DON'T DO THIS, return an actual response to the user
return;
}
console.log('Connection was successful!');
const query = 'SELECT CODEMP,CODORI,NUMORP,SEQEOQ,DATREA,HORREA,CODPRO,CODDER,QTDRE1,QTDRFG,CODLOT,OBSEOQ from USU_VPROEXT ORDER BY DATREA DESC, HORREA DESC';
connection.execute(query, [], (err, result) => {
if (err) {
console.error(err.message);
// DON'T DO THIS, return an actual response to the user
return;
}
console.log('Query was successful!');
console.log();
// return the results to the user
res.json(result.rows);
});
});
});
The key difference here is that instead of wrapping the endpoint in the query, you wrap the query in the endpoint. So every time the endpoint is invoked it re-queries the database.
Please also note the comments for your error handling. If you just return; from the function and never return a response to the client, the client will just hang until it times out. Return an actual response, which can include error codes, messages, anything you like. Even just res.json(false); would be better than no response at all.
I have a problem to design communication with MySQL database in my Nodejs's app.
The biggest problem is that queries are async, so it becomes complicated to design my projects. For example, I have excercises.js
EXCERCISES.JS
var express = require('express');
var database = require('../database/database.js');
var router = express.Router();
console.log(database.db(saveDbData))
/* GET users listing. */
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: #DATABASE RESULT});
});
module.exports = router;
In need to write query's result in ex field.
Then I write a module to handle mysql connection
DATABASE.JS
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
var results;
var db = function(){
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
res = results;
})
return res;
}
module.exports = {
db: db,
}
Obviously, It doesn't work because pool.query is async.
The only alternative that I've found on the web is something like this:
EXERCISES.JS
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: result[0].name});
});
})
But in this way, mysql parts and routing/render parts are mixed. Is it still a well-designed solution? Are there more elegant solutions?
EDIT:
I have modified the files and I have used Promise like this
EXERCISES.JS
var express = require('express');
var database = require('../database/database.js');
var router = express.Router();
var data = database.db()
.then(
function(data){
console.log("Resolved");
/* GET users listing. */
router.get('/', function(req, res, next) {
res.render('exercises',{title: 'Exercises', ex: data[0].name});
});
})
.catch(
error => console.error(error));
module.exports = router;
DATABASE.JS
ar mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
var res;
var db = function(){
return new Promise(function(resolve, reject){
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) reject(error);
resolve(results)
})
})
}
module.exports = {
db: db,
}
And it works, but I don't think it is the best solution. For example, what if I want to get data for render from more queries?
I'm new in these technologies so I'm not able to figure out the best way to integrate database and html pages's rendering.
Have you ever wondered why all web frameworks in node requires you to return responses using the res object instead of just return? It's because all web frameworks expect that you need to do something asynchronous.
Consider a web framework design similar to Laravel (PHP) or Spring Framework (Java):
// Theoretical synchronous framework API:
app.get('/path', function (request) {
return "<html><body> Hello World </body></html>";
});
Then if you need to do anything async you will face the issue that the data you're fetching hasn't returned by the time you need to return the HTTP request:
// Theoretical synchronous framework API:
app.get('/path', function (request) {
return ??? // OH NO! I need to return now!!
});
It is for this reason that web frameworks in javascript don't act on return values. Instead it passes you a callback to call when you are done:
// Express.js
app.get('/path', function (request, response) {
doSomethingAsync((err, result) => {
response.send(result);
});
});
So for your code you just need to do:
router.get('/', function(req, res) {
pool.query('SELECT name FROM Exercises', function(error, results, fields){
if (error) throw error;
res.render('exercises',{title: 'Exercises', ex: result[0].name});
});
});
Exporting the database
Exporting the database is as simple as exporting pool:
db.js
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
module.exports = {
pool: pool
}
exercises.js
let db = require('./db');
// you can now use db.pool in the rest of your code
// ..
Reusing your queries
Instead of coding SELECT statements in your controllers (routes) you can (and should) code them in your db module(s):
db.js
var pool = mysql.createPool({
connectionLimit: 10000,
host: 'localhost',
user: 'root',
password: 'password',
database: 'Example'
});
function getExerciseNames (callback) {
pool.query('SELECT name FROM Exercises',callback);
}
module.exports = {
pool: pool
}
Then in your controller logic you just need to do:
router.get('/', function(req, res) {
db.getExerciseNames(function(error, results, fields){
if (error) throw error;
res.render('exercises',{
title: 'Exercises',
ex: result[0].name
});
});
});
Caching
If your intention is to query the db only once to cache the value of Exercises then don't invert the Express routing flow. Instead implement the caching at your db layer:
db.js:
var exerciseNamesCache = [];
var exerciseNamesFields = [];
function getExerciseNames (callback) {
if (exerciseNamesCache.length > 0 && exerciseNamesFields.length > 0) {
callback(null, exerciseNamesCache, exerciseNamesFields);
}
pool.query('SELECT name FROM Exercises',function(error, results, fields){
if (!error) {
exerciseNamesCache = results;
exerciseNamesFields = fields;
}
callback(error, results, fields);
});
}
Promises
Promises is a design pattern for handling callbacks. It is comparable to Java's Futures (CompletionStage etc.) only a lot more lightweight. If an API you are using returns a promise instead of accepting a callback then you need to call res.render() inside the promise's .then() method:
router.get('/', function(req, res, next) {
doSomethingAsync()
.then(function(result){
res.send(result);
})
.catch(next); // remember to pass on asynchronous errors to next()
});
If the API you're using accepts a callback then weather or not you wrap it in a promise is more a matter of taste. I personally wouldn't do it unless you are also using another API that returns a promise.
async/await
One advantage of promises is that you can use them with await. Express specifically works well with async/await. Just remember you can only use await inside a function marked with async:
router.get('/', async function(req, res, next) {
let result = await doSomethingAsync();
res.send(result);
});
Multiple asynchronous operations
Fetching multiple asynchronous data can be as simple as:
router.get('/', function(req, res, next) {
doSomethingAsync(function(result1){
doSomethingElse(function(result2) {
res.json([result1, result2]);
});
});
});
With promises that would be:
router.get('/', function(req, res, next) {
doSomethingAsync()
.then(function(result1){
return doSomethingElse()
.then(function(result2) {
return [result1, result2];
});
})
.then(function(results){
res.json(results);
})
.catch(next);
});
But both the above code perform the requests sequentially (get result1 then get result2). If you want to fetch both data in parallel you can do this with Promises:
router.get('/', function(req, res, next) {
Promise.all([
doSomethingAsync(), // fetch in parallel
doSomethingElse()
])
.then(function(results){
res.json(results);
});
})
With callbacks it's a little bit more complicated. There is a design pattern you can use and someone has actually implemented it as a library called async.js but often the easiest solution is to wrap them in Promises and use Promise.all(). Still, do check out async.js since it has functionality useful for things like batching requests, perform async operations while a condition is true etc. (the promise based counterpart of that library is async-q)
You can use npm modules for achieving the async task with MySQL.
I recommended to choose sequilize or jm-ez-mysql. If you go with jm-ez-mysql then your code structure like
server.js
require('./config/database.js');
./config/database.js
const sql = require('jm-ez-mysql');
// Init DB Connection
const connection = My.init({
host: process.env.DBHOST,
user: process.env.DBUSER,
password: process.env.DBPASSWORD,
database: process.env.DATABASE,
dateStrings: true,
charset: 'utf8mb4',
timezone: 'utc',
multipleStatements: true,
});
module.exports = {
connection,
};
After that, you can use MySQL asynchronously.
./exercise.js
const sql = require('jm-ez-mysql');
const exerciseUtil = {};
exerciseUtil.searchUserById = async (id) => {
try {
// table name, db fields, condition, values
const result = await sql.first('users', ['id, name, email'], 'id = ?', [id]);
return result; // Which return your result in object {}
} catch (err) {
console.log(err);
throw err;
}
};
module.exports = exerciseUtil;
I hope it helps.
Happy Coding :)
Currently I am playing around with the MySQL library in Node.js however I have a question about the correct/most efficient way to be using this library.
According to w3schools the correct way to make a single query is to use code like this
var mysql = require('mysql');
var con = mysql.createConnection({
host: "localhost",
user: "yourusername",
password: "yourpassword",
database: "mydb"
});
con.connect(function(err) {
if (err) throw err;
con.query("SELECT * FROM customers", function (err, result, fields) {
if (err) throw err;
console.log(result);
});
});
However, say I wanted to make multiple queries which would be executed by an event for example how would I handle this? Should I create an "initialise" function which is executed as soon as the program runs such as this?
var mysql = require('mysql');
var database;
//Initialise database
function setupDatabase() {
database = mysql.createConnection({
host: token.host,
user: token.user,
password: token.password,
database: token.database,
port: token.port
});
}
//Imagine this could be called at any time after execution
function event() {
if(database != null) {
database.connect(function(err) {
if (err) throw err;
database.query("SELECT * FROM customers", function (err, result, fields) {
if (err) throw err;
console.log(result);
});
});
}
}
And also do I have to connect to the database each time I make a query or can I add the "database.connect" call to my setupDatabase function such as this?
var mysql = require('mysql');
var database;
//Initialise database
function setupDatabase() {
database = mysql.createConnection({
host: token.host,
user: token.user,
password: token.password,
database: token.database,
port: token.port
});
if(database != null) {
database.connect(function(err) {
if (err) throw err;
});
}
}
//Imagine this could be called at any time after execution
function event() {
if(database != null) {
database.query("SELECT * FROM customers", function (err, result, fields) {
if (err) throw err;
console.log(result);
});
}
}
My main concern is that calling the con.connect function every single time I make a query would be slow and although these are asynchronous I want to be using the correct/most efficient way possible. Feel free to correct me on any mistakes with the last two code snippets I have only tested the first one so far.
You have to make database connection only once per application livetime (unless you have disconnects). Then you may have as much queries as you want.
Just put database connection routine somewhere in sepparate file and then require it in your applicatin initialisation step.
// mysql.js
const mysql = require('mysql');
module.exports = mysql.createConnection({
host: "localhost",
user: "yourusername",
password: "yourpassword",
database: "mydb"
});
Or require it anywhere you need database connection - it will return connected database object without reruning that code again and again.
// inex.js
const databse = require('./mysql')
database.query("SELECT * FROM customers")
I created a file which include a function that holds a pool and handles the connection to the database like this
let _this = {};
let POOL = null;
function getPool() {
return new Promise((resolve, reject) => {
if(POOL != null) {
resolve(POOL);
} else {
//create connection pool
POOL = connectionPool;
resolve(POOL);
}
});
}
function closePool(){
// close pool here
}
_this.getPool = getPool;
_this.closePool = closePool;
module.exports = _this;
Now you can call getPool() and will recive a pool of connections where you can execute your queries with.
In my project, I am using MySQL database. The problem is, that I don't know how to write simple database module to one js file.
My code is:
File: database.js
var mysql = require("mysql");
var connection = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "mydb"
});
module.exports = class Database {
constructor() {
connection.connect(function (err) {
if (err) {
console.error(err);
throw err;
} else {
console.log("Connection to database was successful");
}
});
}
getConnection() { return connection; }
};
I have a lot of files in project and each one needs use database, the problem is, when I import database.js constructor is every time called, I think, its a bad way.
Is there any better way how to use database connection in project? Somethink like call getConnection() from database.js file without call constructor.
Thank you for any help.
A possible solution:
const mysql = require("mysql"),
connection = mysql.createConnection({
host: "localhost",
user: "root",
password: "",
database: "mydb"
});
connection.connect(function (err) {
if (err) {
console.error(err);
throw err;
} else {
console.log("Connection to database was successful");
}
});
module.exports = {
getConnection() { return connection; }
};
In your database.js file while exporting write:
modules.exports = new Database();
And use
require('./path/to/database-file')
Wherever you have to use the instance, It will always get the same object.
How can I get all the documents from mongodb using nodejs SDK ? I tried following approach to get all the documents but could't get any however insertions is working fine.
// Connection URL
var url = config.mongodbConnectionString;
var db: any;
// Use connect method to connect to the Server
MongoClient.connect(url, function (err: any, database: any) {
assert.equal(null, err);
console.log("Connected correctly to server");
db = database;
});
export class MongodbProvider implements IDbProvider {
public getMenus(): any {
var menus: any = [];
try {
db.open(function (err, db) {
var cursor = db.collection('menus').find(function (err, cursor) {
cursor.each(function (err, doc) {
console.log(doc);
menus.push(doc);
});
});
});
}
catch (err) {
console.log(err);
}
return menus;
}
}
Can you please help me in this?
First parameter of find is a filter. To fetch everything, it should be an empty document. Something like:
var cursor = db.collection('menus').find({}, function (err, cursor) {