Node JS Multiple Select - javascript

Hi i am trying to use two selects in one JS file in node js and sql server. I am unable to figure out the syntax for this. I need a select to get all the persons from a table and another select to count the total number of persons in that table.Will it be possible to put those two selects in a single JS file. If so can someone help me with the syntax?
Here is the code i tried and i am getting the error
"cant Set headers after they are sent"
var sql = require("mssql");
var dbConfig = {
server: "XXXXX",
database: "XXXXX",
user: "XXXXX",
password: "XXXX",
port: 1433
};
exports.list = function(req, res){
sql.connect(dbConfig, function (err) {
if (err) console.log(err);
var request = new sql.Request();
request.query('select * from PERSON', function (err, recordset) {
if (err)
console.log(err)
else
console.log(recordset)
res.render('personinfo_itwx', { data: recordset });
});
request.query('select count(*) from PERSON', function (err, recordset) {
if (err)
console.log(err)
else
console.log(recordset1)
res.render('personinfo_itwx', { data: recordset1 });
});
});
};

#Aditya I'm not sure it's the best way to do so, although I would simply make two different requests, in order to achieve what you need. As I mentioned my in my comment, easiest way, would be to use (for instance) async library. And here's example you've asked for.
WARNING: I did not look at mysql docs
const async = require('async')
// {
async.series([
function(next)
{
new sql.Request()
.query('SELECT * from PERSON', next(err, resultList))
},
function(next)
{
new sql.Request()
.query('SELECT COUNT(*) from PERSON', next(err, count))
}
], (err, result) =>
{
/*
err: String
- if any of the shown above return an error - whole chain will be canceled.
result: Array
- if both requests will be succesfull - you'll end up with an array of results
---
Now you can render both results to your template at once
*/
})
// }
Surely, if you want manipulate with errors or results once you get them - you always may push error and results to new function, play with your data, and return the callback afterwards. Like so:
function(next)
{
new sql.Request()
.query('SELECT * from PERSON', (err, resultList) =>
{
if (err)
{
return next(err, null)
}
/*
data manipulation
*/
return next(null, resultList)
})
},

Related

how do I set up a grouping query on the server side. I want set up the query so I can use it for a pie chart in front end, using d3js

I am using this query to count and group occupation types. I need to do this in order to create a pie chart for the client side for the front end. I don't think I need to input anything, I just want to get the information sorted and counted. User_Id is just referenced just in case I get any errors with authorization of the jwt token. The user_id is not referenced in the table that I am pulling information from, it's just there for authorization purposes.
router.get("/occupation/:user_id", authorization, async (req, res) => {
try {
console.log(req);
const result = await pool.query(
"SELECT occupation,COUNT(occupation) FROM resources GROUP BY occupation;",
[req.params.user_id][req.body.occupation]
// [req.body.json
// ]
);
console.log(req.body);
res.status(200).json({
status: "success",
data: {
occupation: result.rows, //this gets the one row we need
},
});
} catch (err) {
console.error(err.message);
}
});
router.get("/occ", authorization, async (req, res) => {
try {
console.log(req);
const result = await pool.query(
// "SELECT occupation,COUNT(occupation) FROM resources GROUP BY occupation;",
"SELECT occupation,COUNT(occupation) as values FROM resources GROUP BY occupation"
);
console.log(req);
console.log(result);
res.status(200).json({
status: "success",
data: {
occupationResults: result.rows, //this gets the one row we need
},
});
} catch (err) {
console.error(err.message);
}
});

Oracledb (NodeJS) working when I create a new pool each time but not without. Need it to use already created pool

Followed a tutorial to get this working for stored procedures inside of Oracle. I have my GET/SELECT statements working correctly where based on the user making the GET call it changes the pool so that the SELECTs are from the correct user.
Pool creations that work for GET/SELECT
async function initialize() {
await oracledb.createPool({
user: 'user1',
password: 'pass1',
connectString: 'oracledb.website/dev',
poolAlias: 'pool1'
});
await oracledb.createPool({
user: 'user2',
password: 'pass2',
connectString: 'oracledb.website/dev',
poolAlias: 'pool2'
});
}
The tutorial I followed for stored procedures can be found here: https://blogs.oracle.com/opal/using-dbmsoutput-with-nodejs-and-node-oracledb
You will see that in this example he has a new pool being created for every request.
oracledb.createPool(
dbconfig,
function(err, pool) {
if (err)
console.error(err.message)
else
doit(pool);
});
var doit = function(pool) {
Note that the dbConfig used above is an array like:
dbconfig.hrPool.user = 'user3';
dbconfig.hrPool.password = 'pass3';
dbconfig.hrPool.connectString = 'oracle.site/dev';
This will cause issues if you specify a poolAlias and you will quickly end up trying to create a pool alias that already exists with an error like:
"NJS-046: poolAlias "pool1" already exists in the connection pool cache.
I have attempted to update this code myself but I am not familiar enough with asyc/waterfalls/callbacks to get it to keep going.
What I attempted is below (it never actually runs anything):
return new Promise(async (resolve, reject) => {
async.waterfall(
[
function(cb) {
oracledb.getConnection('pool1');
},
enableDbmsOutput,
createDbmsOutput,
fetchDbmsOutputLine
],
function (err, conn, cb) {
if (err) {
console.error("In waterfall error cb: ==>", err, "<== THIS IS WHERE THE ORACLE ERROR WILL SHOW!");
// Release the Oracle Connection
conn.release(function (err) {
if (err) console.error(err.message);
});
}
}
);
var enableDbmsOutput = function (conn, cb) {
conn.execute(
"BEGIN DBMS_OUTPUT.ENABLE(NULL); END;",
function(err) { return cb(err, conn); });
};
var createDbmsOutput = function (conn, cb) {
console.log('I NEVER MAKE IT HERE')
conn.execute(query
,function(err) { return cb(err, conn); });
};
var fetchDbmsOutputLine = function (conn, cb) {
conn.execute(
"BEGIN DBMS_OUTPUT.GET_LINE(:ln, :st); END;",
{ ln: { dir: oracledb.BIND_OUT, type: oracledb.STRING, maxSize: 32767 },
st: { dir: oracledb.BIND_OUT, type: oracledb.NUMBER } },
function(err, result) {
if (err) {
return cb(err, conn);
} else if (result.outBinds.st == 1) {
return cb(null, conn); // no more output
} else {
resolve(result);
return fetchDbmsOutputLine(conn, cb);
}
});
};
})
}
Would really appreciate any help!
The blog you quoted is a command-line script and only creates a pool once. That happens at the start of the script. It also is an old blog post. All its async module calls and JS callbacks should/would now be replaced by Node.js's newer async/await syntax. Also avoid using Promise() directly - code gets too confusing.
Since you are creating some kind of web listener, you should create the pool during app start up, but not for each web request.
Check the node-oracledb example webapp.js.
async function init() {
try {
await oracledb.createPool({
user: dbConfig.user,
password: dbConfig.password,
connectString: dbConfig.connectString
});
// Create HTTP server and listen on port httpPort
const server = http.createServer();
server.on('error', (err) => {
console.log('HTTP server problem: ' + err);
});
server.on('request', (request, response) => {
handleRequest(request, response);
});
await server.listen(httpPort);
console.log("Server is running at http://localhost:" + httpPort);
} catch (err) {
console.error("init() error: " + err.message);
}
}
async function handleRequest(request, response) {
... // Your code to handle each web request goes here.
}
init();
You may also wait to review the PL/SQL procedure example plsqlproc.js.

Calling a mysql column using template literals node js

I am trying to call a field from mysql within my node js file using template literals but am unable to obtain the value. Please take a look at my post.controller.js file below where is says message: Post ${body.post_id} was successfully created where post_id is a field within my mysql database.
//The following code is in post.service.js file
const pool = require("../../config/database");
module.exports = {
//Create new post
createPost: (data, callBack) =>{
pool.query(
`insert into posts(userhandle, post_body)
values(?,?)`,
[
data.userhandle,
data.post_body
],
(error, results, fields) =>{
if(error){
return callBack(error);
}
return callBack(null, results);
}
);
}
}
//The following code is in post.controller.js file
const {
createPost,
} = require("./post.service");
module.exports = {
//Controller for creating new post
createPost: (req, res) =>{
const body = req.body;
createPost(body, (err, results) => {
if(err){
console.log(err);
return res.status(500).json({
success:0,
message:"Error. Unable to create post"
});
}
return res.status(200).json({
success: 1,
message: `Post ${body.post_id} was successfully created`,
data: results
});
});
}
}
I'm guessing post_id is a PK auto incremented, if that so try results.post_id, since this is an object retuned from callback.
If this won't work do the console.log(results) and see if post_id is in it.

synchronicity javascript error while accessing the database twice in an endpoint using node/express

I have a company that have a job opening, and other users that want to work there have to make orders to that job position. Overy order has an id of the user that make´s it. I want to also show the name of that user when the company wants to see all the orders for a job.
So, what I was doing was just get all the orders with Order.getOrder, and then get name and email from user for every order and add it to what I am going to return.
The error I´m getting is TypeError: Cannot read property 'then' of undefined in the last then
router.get("/orders", verifyToken, (req, res) => {
Order.getOrders(req.userId, (err, rows) => {
for (x in rows) {
console.log(rows[x].id);
User.forge({id: rows[x].id}).fetch({columns:['email','name']}).then(user => {
rows[x].nameTester = user.get('name');
rows[x].emailTester = user.get('email');
});
}
}).then(function(err, rows) {
res.json({orders: rows});
});
});
And this
Order.getOrders = (userData, callback)=>{
if (connection) {
const query = ...sql query...
connection.query(query, [userData], (err, result, fields) => {
if (err) {
throw err;
} else {
callback(null, result);
}
});
}
};

Javascript nodejs tedious mssql is there a way to get json?

I'm using nodejs and tedious connector to get data from mssql server. In documentation, I only see this one way to retrieve data
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (rows) {
...
bigArrat.push(JSON.stringify(rows));
});
But in my example I want all rows, not only one property but more. Currently, it return in separate row one cell eg. rows[0].value will return Name, rows[1].value Value ... for me it is rubbish.
I want to get all information in json array of object not all metadata or one property. There is a way to do this or there is a better connector for nodejs and sqlserver ?
The rows value sent to your initial callback is the array of rows being sent back:
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
console.log(rows) // this is the full array of row objects
// it just needs some manipulating
jsonArray = []
rows.forEach(function (columns) {
var rowObject ={};
columns.forEach(function(column) {
rowObject[column.metadata.colName] = column.value;
});
jsonArray.push(rowObject)
});
return callback(null, rowCount, jsonArray);
});
In Sql Server 2016 you can format query results as JSON text using FOR JSON option, see https://msdn.microsoft.com/en-us/library/dn921882.aspx
You just need to read JSON fragments returned by query.
Add this to your config.
rowCollectionOnRequestCompletion: true
var config = {
userName: '', // update me
password: '', // update me
server: '', // update me
options: {
database: '', // update me
encrypt: true,
rowCollectionOnRequestCompletion: true
}
}
Then on your query you can now get the data of rows.
var executeQuery = (res,query) => {
request = new Request(query, (err, rowCount, rows) => {
console.log("Rows: ", rows);
res.send(rows);
});
connection.execSql(request);
}
I learned it from:
http://tediousjs.github.io/tedious/api-request.html
EDIT
Update not to have metadata:
var data = []
request = new Request(query, (err, rowCount, rows) => {
if(err) {
console.log(err)
res.send({ status: 500, data: null, message: "internal server error."})
} else {
console.log(rowCount+' row(s) returned')
res.send({ status: 200, data: data, message: "OK"})
}
})
request.on('row', function(row){
data.push({
last_name: row[0].value,
first_name: row[1].value
})
})
connection.execSql(request)
If you are using express on server side I can recommend using express4-tedious (see https://www.npmjs.com/package/express4-tedious). It allows to easily write apis for SQL connections with small code and streams json result to response.
Connection:
var express = require('express');
var tediousExpress = require('express4-tedious');
var app = express();
app.use(function (req, res, next) {
req.sql = tediousExpress(req, {connection object});
next();
});
Example Api:
/* GET from tableone, streams json result into response */
router.get('/', function (req, res) {
req.sql("select Name, Value, Article_Id from [tableone] where Id = '1' for json path")
.into(res);
});
You can then call these apis e.g. from frontend.
I tried that way but it did not work for me perhaps my knowledge of js and callbacks is not good enough. So, here is my solution. I had to add things to my config of connection to make rows of request work. You would also have to do this. Go to: at the end of new Request section, and to the rows.
here
Second thing, I did is pretty simple.
var jsonArray = [];
var rowObject= {};
var request = new Request("SELECT TOP 5 * FROM tableName",function(err,rowCounts,rows)
{
if (err)
{
console.log(err);
}
else
{
console.log(rowCounts + " rows returned");
}
//Now parse the data from each of the row and populate the array.
for(var i=0; i < rowCounts; i++)
{
var singleRowData = rows[i];
//console.log(singleRowData.length);
for(var j =0; j < singleRowData.length; j++)
{
var tempColName = singleRowData[j].metadata.colName;
var tempColData = singleRowData[j].value;
rowObject[tempColName] = tempColData;
}
jsonArray.push(rowObject);
}
//This line will print the array of JSON object.
console.log(jsonArray);
and to show you how my connection.config looks like:
static config: any =
{
userName: 'username',
password: 'password',
server: 'something.some.some.com',
options: { encrypt: false, database: 'databaseName' ,
rowCollectionOnRequestCompletion: true }
};//End: config
and this is how I am passing it to connection.
static connection = new Connection(Server.config);
Complementing the answer from #Jovan MSFT:
var request = new Request('select person_id, name from person for json path', function(err) {
if (err) {
console.log(err);
}
connection.close();
});
And, finally, in the row event:
request.on('row', function(columns) {
var obj = JSON.parse(columns[0].value);
console.log(obj[0].name);
});
P.S.: the code above does not iterate over columns parameter because for json path returns a single array of objects in a single row and column.
Applying map-reduce function in returned rows:
rows.map(r=>{
return r.reduce((a,k)=>{
a[k.metadata.colName]=k.value
return a
}
,{})
})
This is a combination of a few responses above. This uses FOR JSON AUTO in the SELECT statement and parses the "column" as JSON. The row/column nomenclature may be a bit misleading for folks unfamiliar with this API. In this case, the first "columns" value will be an array of the rows in your table:
var request = new Request("SELECT Name, Value, Article_Id FROM [tableone] WHERE Id = '1' FOR JSON AUTO", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', (columns) => {
const json = JSON.parse(columns[0].value);
});

Categories