I am trying to create a new collection and get MongoDB connection error: { MongoError: Authentication failed. error
require("dotenv").config()
const mongo = require("mongodb").MongoClient
const mongoose = require("mongoose")
const dbRoute = `mongodb://${process.env.REACT_APP_SERVER_ADMIN}:${
process.env.REACT_APP_SERVER_ADMIN_PASSWORD
}>#${process.env.REACT_APP_DB}`
mongoose.connect(dbRoute, { useNewUrlParser: true })
let db = mongoose.connection
db.once("open", () => console.log("connected to the database"))
// checks if connection with the database is successful
db.on("error", console.error.bind(console, "MongoDB connection error:"))
REACT_APP_SERVER_ADMIN is one of the Users from mlab db and REACT_APP_SERVER_ADMIN_PASSWORD is the password that I used when I created it in mlab dashboard.
REACT_APP_DB is in the format <...>.mlab.com:PORT/DBNAME
There isn't any schema or a model defined as of now.
Nothing complex about this as far as I can see so I am probably missing something basic. Any ideas?
Related
I have inherited a legacy system made in nodeJS and postgres. Whenever I encounter a database call error e.g let's say an insert violates a duplicate constraint the db client throws an error which I handle but its unable to make subsequent queries to the db and it hangs.
I have tried adding an error listener which recreates the client on error message but to no avail. I have many files calling the db so Its not ideal to recreate the client on each catch clause.
db connection class
const { Client } = require('pg');
const config = require('../../config');
const log = require('../../logger').LOG;
const client = new Client({
connectionString: config.dbUrl
});
client.on('error', err => {
log.info('client connection Error!'+ err.stack);
client = null;
client = new Client({
connectionString: config.dbUrl
});
client.connect();
});
client.on('end', () => {
log.info('client connection! End client sent');
});
client.on('notification', msg => {
log.info('client connection! notification message sent'+ msg);
});
client.connect();
// Export the Postgres Client module
module.exports = client;
sample query that is encountering
function createGame(gameHash) {
Model.query("INSERT INTO games (hash) values($1) RETURNING gId",[gameHash], function(err,db_res) {
if(err) {
log.info('Game record creation error: '+err.stack);
}
log.info('create record db resp: '+JSON.stringify(db_res));
gameId = db_res.rows[0].gId;
});
}
UPDATE:
so after reviemwing the logs keenly I have observed the issue occurs when the client decides to call an update command instead of the insert command in the query above. Interestingly the query passed to it is hard coded string clearly indicating insert but for some reason the update command is called.
2023-02-03 01:34:04 : create record db resp- with hash=>:
{"command":"UPDATE","rowCount":1,"oid":null,"rows":[],"fields":[],"_types":{"_types":{"arrayParser":{},
"builtins":{"BOOL":16,"BYTEA":17,"CHAR":18,"INT8":20,"INT2":21,"INT4":23,"REGPROC":24,"TEXT":25,"OID":26,"TID":27,"XID":28,"CID":29,"JSON":114,
"XML":142,"PG_NODE_TREE":194,"SMGR":210,"PATH":602,"POLYGON":604,
"CIDR":650,"FLOAT4":700,"FLOAT8":701,"ABSTIME":702,"RELTIME":703,
"TINTERVAL":704,"CIRCLE":718,"MACADDR8":774,"MONEY":790,"MACADDR":829,"INET":869,"ACLITEM":1033,"BPCHAR":1042,"VARCHAR":1043,"DATE":1082,
"TIME":1083,"TIMESTAMP":1114,"TIMESTAMPTZ":1184,"INTERVAL":1186,
"TIMETZ":1266,"BIT":1560,"VARBIT":1562,"NUMERIC":1700,"REFCURSOR":1790,"REGPROCEDURE":2202,"REGOPER":2203,"REGOPERATOR":2204,"REGCLASS":2205,"REGTYPE":2206,"UUID":2950,"TXID_SNAPSHOT":2970,"PG_LSN":3220,
"PG_NDISTINCT":3361,"PG_DEPENDENCIES":3402,"TSVECTOR":3614,"TSQUERY":3615,"GTSVECTOR":3642,"REGCONFIG":3734,"REGDICTIONARY":3769,
"JSONB":3802,"REGNAMESPACE":4089,"REGROLE":4096}},"text":{},"binary":{}},"RowCtor":null,"rowAsArray":false}
I am using Node Express API to run SQL queries to populate a dashboard of data. I am using the mssql-node package to do so. Sometimes it runs flawlessly, other times I get the following error:
[Error: [Microsoft][SQL Server Native Client 11.0]Query timeout expired]
I am creating a poolPromise with a connectionPool to the db, then I pass that object to my other controllers which run the specific queries to populate data. I run the server which initiates the db.js script and connects to MSSQL with a pool connection.
db.js:
// for connecting to sql server
const sql = require('mssql/msnodesqlv8');
// db config to connect via windows auth
const dbConfig = {
driver: 'msnodesqlv8',
connectionString: 'Driver={SQL Server Native Client 11.0};Server={my_server};Database={my_db};Trusted_Connection={yes};',
pool: {
idleTimeoutMillis: 60000
}
};
// create a connectionpool object to pass to controllers
// this should keep a sql connection open indefinitely that we can query when the server is running
const poolPromise = new sql.ConnectionPool(dbConfig)
.connect()
.then(pool => {
console.log('Connected to MSSQL');
return pool;
})
.catch(err => console.log('Database Connection Failed! Bad Config: ', err))
module.exports = { sql, poolPromise };
An example of one of my controllers and how I use the poolPromise object is below. I currently have about 7 of these controllers that run their own specific query to populate a specific element on the dashboard. The performance of the queries each run within 1-10 seconds (depending on current server load, as I am querying an enterprise production server/db, this can vary). As I mentioned earlier, the queries run flawlessly sometimes and I have no issues, but at other times I do have issues. Is this a symptom of me querying from a shared production server? Is it preferred to query from a server that has less load? Or am I doing something in my code that could be improved?
const { sql, poolPromise } = require('../db');
// function to get data
const getData = async (req, res) => {
try {
// create query parameters from user request
let id= req.query.id;
// create query from connectionPool
let pool = await poolPromise;
let qry = `
select * from tbl where id = #Id
`
let data = await pool.request()
.input('Id', sql.VarChar(sql.MAX), id)
.query(qry);
// send 200 status and return records
res.status(200);
res.send(data.recordset);
} catch(err) {
console.log('Error:');
console.log(err);
res.sendStatus(500);
}
};
module.exports = { getData };
Hi I am new to node and oracle.I have created a app and made a successfull connection to db.
I need to use connection object across the application how can i do that?
Below is my index.js file
const express = require("express");
const app = express();
const authRoute = require("./routes/auth");
app.use(express.json());
app.use("/api",authRoute) ;
app.listen(3000,function(){
console.log("Node Server : Running on port 3000...");
})
database connection file => connect.js
const oracledb = require('oracledb');
const dotenv = require('dotenv');
dotenv.config();
const connection = oracledb.getConnection(
{
user : process.env.USER,
password : process.env.PASS,
connectString : process.env.ConnectString
},
function(err, connection)
{
if (err) {
console.error(err.message);
return;
}
console.log('Connection was successful!');
connection.close(function(err){
if (err) {
console.error(err.message);
return;
}
});
});
module.exports = connection;
I want to use this db connection in my auth.js file
const router = require('express').Router();
const db = require('../database/connect');
router.post("/authenticate",function(req,res){
//console.log(req);
const user = req.body.username;
const username = {"name" : user};
const pass = req.body.key;
const password = {"pass" : pass};
//const result = db.execute('select * from usertable');// this doesn't work
//console.log(result.rows);
res.send('success');
});
module.exports = router;
when i run const result = db.execute('select * from usertable'); I get the error below.
TypeError: Cannot read property 'execute' of undefined
What am i doing wrong.Can anyone please help.Thanks in advance
I had faced this problem. You must install Oracle install client v 19 in your machine. You have to go to web install oracle instant client base on your machine.
(Update: there is a multi-part series with code showing what you want at https://github.com/oracle/oracle-db-examples/tree/main/javascript/rest-api)
Use a connection pool that is opened at app start. Then the pool cache can be used to get the pool (and then connections) in other modules.
For a web app like yours you definitely want to use a connection pool for performance.
There's a big section on connection pooling in the documentation. E.g see Connection Pool Cache which says:
When pools are created, they can be given a named alias. The alias can
later be used to retrieve the related pool object for use. This
facilitates sharing pools across modules and simplifies getting
connections.
The examples are worth reviewing.
Background
Making a small web app that connects to a Mongo DB hosted with Mlab. I've created the DB on mlab, and created users with read/write permission. I've also created a users collection with several records.
The Problem
When I try and connect to the database using the code on mongo.github.io, I hit the error:
/home/ed/dev/mongo-demo/node_modules/mongodb/lib/operations/mongo_client_ops.js:466
throw err;
^
TypeError: Cannot read property 'db' of null
The Code
var MongoClient = require('mongodb').MongoClient;
const url = 'mongodb://<user>:<pass>#ds115434.mlab.com:15434';
// Database Name
const dbName = 'princee3-music';
// Use connect method to connect to the server
MongoClient.connect(url, function(err, client) {
console.log("Connected successfully to server");
const db = client.db(dbName);
client.close();
});
What I Have Tried
Oddly, if I connect through the shell using:
mongo ds115434.mlab.com:15434/princee3-music -u <dbuser> -p <dbpassword>
That works fine, or if I wrap the connection in an anonymous self-calling async function, it also connects.
Async Wrapper
const MongoClient = require('mongodb').MongoClient;
const mongoUrl = 'mongodb://<user>:<pass>#ds115434.mlab.com:15434/';
const dbName = 'princee3-music';
(async() => {
const client = await MongoClient.connect(mongoUrl, { useNewUrlParser: true});
const db = client.db(dbName);
db.collection('users').insertOne({
email: user.email,
pass: hashedPassword,
admin: true
}, (err, result) => {
if (err) {
reject({error: err});
} else {
resolve({message: 'okay'});
}
});
client.close();
})();
Any pointers on where I may be going wrong would be great.
The official mLab docs advise to connect like below. It has to be asynchronous , in order to wait for the connection to occur, or the client will be null, thus throwing an error saying that it can’t read property db of null.
On the other hand, you async has useNewUrlParser which might be the key to have a successful connection, see this issue
MongoClient.connect(url, { useNewUrlParser: true }).then(client => client.db())
I'm new to MongoDB.
When I create my node.js server I use only one DB connection (on start I connect to it).
But imagine: I have one database with some generic tables, and more databases - each for a custom client.
How can I create those DB at runtime?
start.js:
const mongoose = require("mongoose");
// import environmental variables from variables.env file
require("dotenv").config({ path: "variables.env" });
mongoose.connect(process.env.DATABASE);
mongoose.Promise = global.Promise;
mongoose.connection.on("error", err => {
console.error(`🚫 → ${err.message}`);
});
require("./models/MaintenanceType");
require("./models/Maintenance");
const app = require("./app");
app.set("port", process.env.PORT || 7777);
const server = app.listen(app.get("port"), () => {
console.log('started');
});
variables.env (example):
NODE_ENV=development
DATABASE=mongodb://db:qwe123#sometest.server.com:412345/webtest
PORT=1234
SECRET=webtest
KEY=webtestcom
and controller:
const mongoose = require("mongoose");
const Maintenance = mongoose.model("Maintenance");
exports.createMaintenance = async (req, res) => {
const maintenance = await new Maintenance(req.body).save();
// ALSO create a db and table if not exists for this client and use it somehow
res.json(maintenance);
};
is it possible to do?
You can create new connection
mongoose.connect('URI_FOR_ANOTHER_DATABASE')
But it's bad idea to create new connections, so the driver has a feature to use existing connections to query another database, for this purpose you can check useDb() method as shown here