I am developing a REST API. One of the end points I have recieve a list of data like below.
[
{
"iduser": 3,
"title": "House in kandala",
"description": "Built a house in kandala area"
},
{
"iduser": 3,
"title": "House in NYC",
"description": "Built a house in greater NYC area"
}
]
I need to save the list into the database. Below is my code.
const mysql = require('mysql2');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createConnection({
host: prop.get('server.host'),
user: prop.get("server.username"),
password: prop.get("server.password"),
port: prop.get("server.port"),
database: prop.get("server.dbname")
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body == null && event.body == undefined) {
var response = errorCodes.missing_parameters;
callback(null, response)
}
else {
let body = JSON.parse(event.body)
console.log("body", body);
let iduser = Number(body.iduser);
let title = body.title;
let description = body.description;
if (isNaN(iduser)) {
var response = errorCodes.invalid_parameter;
callback(null, response);
}
else {
// allows for using callbacks as finish/error-handlers
const sql = "INSERT INTO seller_portfolio_item (iduser, title, description) VALUES (?,?,?)";
con.execute(sql, [iduser, title, description], function (err, result) {
if (err) {
console.log(err.toString());
if (err.toString().indexOf('cannot be null') >= 0) {
var response = errorCodes.not_null_parameters;
callback(null, response);
}
var response = errorCodes.internal_server_error;
callback(null, response);
}
else {
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ insertId: result.insertId }),
"isBase64Encoded": false
};
callback(null, response)
}
});
}
}
};
My code is capable of inserting just one record, not suitable to save multiple when I am sending a list. As a result, client program will have to call the same method again and again in a loop.
How can I read the list and insert multiple records ?
You are correct that going forward it is better to use mysql instead of mysql2. Below is one approach that can be used to batch insert multiple records.
Be sure to run npm install mysql --save to ensure you have to necessary package installed.
Working with multiple records requires some additional thinking and planning as well. You should consider:
does your table contain any unique keys other than the primary?
is it possible your API function will ever attempt to insert a duplicate?
in the event of a duplicate how should it be handled?
do you need to know the insert ID for every new record created?
will every object in your list always have the same number of entries, the same keys, and expected values?
Depending on your answers to the above considerations the example I provided below would require additional code and complications. This example is the simplest implementation of the idea.
// package changed, remember to npm install…
const mysql = require('mysql');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createPool({
connectionLimit: 10,
host: prop.get('server.host') || '127.0.0.1',
user: prop.get("server.username") || 'local_user',
password: prop.get("server.password") || 'local_password',
database: prop.get("server.dbname") || 'local_database',
multipleStatements: true, // necessary to run chained queries
charset: 'utf8mb4' // necessary if you might need support for emoji characters - table charset must match
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
// It is better to check for the existence of your
// expected request body in the controller stage of
// your app but I've included this for consistency
// with your original code.
let query_object = event.body ? JSON.parse(event.body) : null;
console.log('query_object', query_object);
if (!query_object.length) {
let response = errorCodes.missing_parameters;
callback(null, response)
}
else {
// use the keys of the first object to define the field names.
// you don't have to use this approach but it provides flexibility
// if you will not always use the same fields
let keys = Object.keys(query_object[0]);
// map the values into a supported format
let values = query_object.map( obj => keys.map( key => obj[key]));
let sql = 'INSERT INTO seller_portfolio_item (' + keys.join(',') + ') ?;'
con.query(sql, values, function(error, results, fields) {
if (error) callback(null, error);
// when inserting multiples you will only get back the
// insert id of the first record. if there are updates
// due to duplicate keys, you won't even get that.
// results will look like this:
console.log(results);
// Expected output
// OkPacket {
// fieldCount: 0,
// affectedRows: 3,
// insertId: 1,
// serverStatus: 2,
// warningCount: 6,
// message: '&Records: 3 Duplicates: 0 Warnings: 6',
// protocol41: true,
// changedRows: 0
// }
let response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ records_inserted: results.affectedRows }),
"isBase64Encoded": false
};
callback(null, response)
});
}
};
Related
I am developing a REST API in Node.js using AWS Lambda.
Below is my code. I am trying to get a JSON input from the user and save it into the database.
const mysql = require('mysql2');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createConnection({
host: prop.get('server.host'),
user: prop.get("server.username"),
password: prop.get("server.password"),
port: prop.get("server.port"),
database: prop.get("server.dbname")
});
exports.updateUser = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body == null && event.body == undefined) {
var response = errorCodes.missing_parameters;
callback(null, response)
}
else {
let body = JSON.parse(event.body)
console.log("body", body);
let iduser = body.iduser;;
let first_name = body.first_name;
let created_date = body.created_date;
// allows for using callbacks as finish/error-handlers
const sql = "UPDATE user SET first_name=? created_date=? WHERE iduser=? ";
con.execute(sql, [first_name, created_date, iduser ], function (err, result) {
if (err) {
console.log(err.toString());
if (err.toString().indexOf('cannot be null') >= 0) {
var response = errorCodes.not_null_parameters;
callback(null, response);
}
var response = errorCodes.internal_server_error;
callback(null, response);
}
else {
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ insertId: result.insertId }),
"isBase64Encoded": false
};
callback(null, response)
}
});
}
};
This is my json input
{
"iduser": 3,
"first_name": "Ranjith",
"created_date": "2021-07-28 00:00:00",
}
Now, notice my iduser is always a number, first_name is a string, and the created_date is a date.
Right now, there is no data type validation. So i can even pass a text like hello to the created_date field. There it will not get saved, but the database field will be set to null.
Instead, I am thinking of validating the data types before sending. So I can convert the iduser to a number, first_name to a string and date_created to a date field.
I have two questions.
Is this data type validation a good practice in Node.JS? I am asking because I am a Java Guy, there things always have a specific data type.
If the data type validation is a good thing, How can I convert the above data to the appropriate types?
I'm working with the node-globaloffensive library and want to send multiple requests to the gc to get every rank from every user in my database. With my current code it is repeating all over again and I'm only getting one request back. Do you have andy ideas how I can send and receive every profile?
My current code:
const SteamUser = require('steam-user');
const SteamTotp = require('steam-totp');
const GlobalOffensive = require('globaloffensive');
const config = require('./config');
const db = require('./database');
var SteamID = require('steamid');
var mysql = require('mysql');
let user = new SteamUser();
let csgo = new GlobalOffensive(user);
csgo.on('debug', console.log);
user.on('error', console.error);
var pool = mysql.createPool({
supportBigNumbers: true,
bigNumberStrings: true,
connectionLimit: 10,
host: db.host,
user: db.user,
password: db.password,
database: db.dbname
});
const logInOptions = {
accountName: config.accountName,
password: config.password
}
user.logOn(logInOptions);
user.on('loggedOn', res => {
console.log("Logged into Steam as " + user.steamID.getSteam3RenderedID());
user.setPersona(SteamUser.EPersonaState.Online);
user.gamesPlayed(730);
});
csgo.on("connectedToGC", function() {
console.log("connectedToGC");
const checkData = setInterval(()=>{
//Check connection to game coordinator
if ( csgo.haveGCSession ) {
//Check Database
pool.getConnection(function(err, connection) {
if (err) throw err;
connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'", function (err, rows, fields) {
connection.release();
if (err) throw err;
rows.forEach( (row) => {
var account_id = new SteamID(`${row.SteamID64}`);
//Request Data from CS:GO
csgo.requestPlayersProfile(account_id, function(data) {
console.log(data);
console.log("before");
setTimeout(function(){
//do what you need here
}, 2000);
console.log("after");
});
});
});
});
};
}, 10000);
});
Log:
Logged into Steam as [U:1:xxxxxxxxxx]
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 2000 ms
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 4000 ms
Got handled GC message ClientWelcome
Unknown SO type 2 with 1 items
Unknown SO type 7 with 1 items
GC connection established
connectedToGC
Got handled GC message MatchmakingGC2ClientHello
Got unhandled GC message 9194
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
account_id: xxxxxxxx,
ongoingmatch: null,
global_stats: null,
penalty_seconds: null,
penalty_reason: null,
vac_banned: null,
ranking:
{ account_id: xxxxxxxx,
rank_id: 16,
wins: 1073,
rank_change: null },
commendation: { cmd_friendly: 57, cmd_teaching: 50, cmd_leader: 50 },
medals:
{ display_items_defidx:
[ 1331, 4551, 1376, 970, 1367, 4356, 1358, 1337, 1341, 1329 ],
medal_team: null,
medal_combat: null,
medal_weapon: null,
medal_global: null,
medal_arms: null,
featured_display_item_defidx: 1331 },
my_current_event: null,
my_current_team: null,
survey_vote: null,
activity: null,
player_level: 24,
player_cur_xp: 327680185,
player_xp_bonus_flags: null }
before
after
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
Couple of problems here -
Callback style functions in forEach which doesn't work the way you are expecting it to work. Either use Promise.all or for..of
mssql supports promise so no need to use callback style to make things complicated and hard to read.
Convert the callback style to promise.
I have modified the code. It will look something like this
function getRequestPlayer(account_id) {
return new Promise((resolve, reject) => {
csgo.requestPlayersProfile(account_id, function (data) {
console.log(data);
resolve(data);
});
});
}
csgo.on("connectedToGC", async function () {
console.log("connectedToGC");
// Check connection to game coordinator
if (csgo.haveGCSession) {
// Check Database
const connection = await pool.getConnection();
const rows = await connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'");
await Promise.all(rows.map(row => {
const account_id = new SteamID(`${row.SteamID64}`);
return getRequestPlayer(account_id);
}));
}
});
You might need to tweak few things here and there but you will get an idea. Hope this helps
Stuck at the point to index data collection in elasticsearch.
Following is the code I'm trying to index the data from mongo.
const elasticsearch = require('elasticsearch');
// instantiate an Elas
var bulk = [];
var MongoClient = require('mongodb').MongoClient;
var ObjectID = require('mongodb').ObjectID;
var mongoDBName = 'mydb'; // Name of mongodb goes here
var mongoCollectionName = 'mycollection'; // Collection name of mongodb goes here
var connectionString = 'mongodb://127.0.0.1:27017/'; // put username and password for mongo here
var esIndexName = 'new-collection'; // Elasticsearch index name will go here
var bulk = [];
const client = new elasticsearch.Client({
hosts: [ 'http://localhost:9200']
});
// ping the client to be sure Elasticsearch is up
client.ping({
requestTimeout: 30000,
}, function(error) {
// At this point, eastic search is down, please check your Elasticsearch service
if (error) {
console.error('Elasticsearch cluster is down!');
} else {
console.log('Everything is ok');
}
});
MongoClient.connect(connectionString+mongoDBName, function(err, db) {
if(err) throw err;
// for each object in a collection
var collection = db.collection(mongoCollectionName);
var counter = 0;
collection.find().each(function(err, item, response, status) {
console.log(item)
Array.from(item).forEach(itemdata => {
bulk.push({index:{
_index: esIndexName,
_type: mongoCollectionName,
}
})
bulk.push(itemdata)
})
//perform bulk indexing of the data passed
client.bulk({body:bulk}, function( err, response ){
if( err ){
console.log("Failed Bulk operation".red, err)
} else {
console.log("Successfully imported %s".green, mongoCollectionName.length);
}
console.log(response);
});
if(item != null) {
if(counter % 100 == 0) console.log( "Syncing object id: "+ item['_id'] + " #: " + counter);
client.indices.create(
{ index: esIndexName },
function(error, response) {
if (error) {
console.log(error);
} else {
console.log("created a new index", response);
}
}
);
}
counter += 1;
});
});
So here I'm trying to indexing data into elasticsearch, I'm able to create the collection index, but failed to insert the data in index of elastic search. Can anyone help me here?
Where I'm getting wrong, and what mistake I'm doing here.
I'm using nodejs here, just simple function to test, later will add lambda function to update/delete and which any change.
First of all, I would suggest to tidy up your code ; it's very difficult to see how the blocks are nested.
Now, there are several problems with your code:
Why are you doing Array.from(item).forEach(itemdata => {? item is a document object from Mongo, so doing Array.from on it has no effect.
You are calling the bulk API inside the .each callback ; meaning you'll do an API call for each document. I don't think this is what you want.
You are creating the index after the bulk operation. This is wrong. You should create your ES index once and for all before inserting documents. It's important because in the future, you'll want to have a more advanced configuration to process your documents.
Your ping call to ES is nice, but it doesn't prevent the rest of your code to run if the cluster is down.
So what you should do:
Create your ES index before iterating over you documents.
Iterate over your MongoDB documents and accumulate them in your body object.
When you have a batch of n documents, call the bulk API and reset your body.
Here is the solution you are looking for
index.js
//MongoDB client config
var MongoClient = require('mongodb').MongoClient;
var mongoDBName = 'mydb'; // Name of mongodb goes here
var mongoCollectionName = 'mycollection'; // Collection name of mongodb goes here
var connectionString = 'mongodb://127.0.0.1:27017/'; // put username and password for mongo here
//Elasticsearch client config
const { Client } = require('#elastic/elasticsearch')
const esClient = new Client({ node: 'http://localhost:9200' });
var esIndexName = 'new-collection'; // Elasticsearch index name will go here
let bulk = [];
async function indexData() {
const client = await MongoClient.connect(connectionString, { useNewUrlParser: true })
.catch(err => { console.log(err); });
if (!client) {
return;
}
try {
const db = client.db(mongoDBName);
let collection = db.collection(mongoCollectionName);
await collection.find().forEach((doc) => {
bulk.push({
index: {
_index: esIndexName,
}
})
let { _id, ...data } = doc;
bulk.push(data);
})
console.log(bulk);
await esClient.indices.create({
index: esIndexName,
}, { ignore: [400] })
const { body: bulkResponse } = await esClient.bulk({ refresh: true, body: bulk })
if (bulkResponse.errors) {
const erroredDocuments = []
// The items array has the same order of the dataset we just indexed.
// The presence of the `error` key indicates that the operation
// that we did for the document has failed.
bulkResponse.items.forEach((action, i) => {
const operation = Object.keys(action)[0]
if (action[operation].error) {
erroredDocuments.push({
// If the status is 429 it means that you can retry the document,
// otherwise it's very likely a mapping error, and you should
// fix the document before to try it again.
status: action[operation].status,
error: action[operation].error,
operation: bulk[i * 2],
document: bulk[i * 2 + 1]
})
}
})
console.log(erroredDocuments)
}
const { body: count } = await esClient.count({ index: esIndexName })
console.log(count)
} catch (err) {
console.log(err);
} finally {
client.close();
}
}
indexData();
package.json
{
"name": "elastic-node-mongo",
"version": "1.0.0",
"description": "Simple example to connect ElasticSearch, MongoDB and NodeJS",
"main": "index.js",
"dependencies": {
"#elastic/elasticsearch": "^7.3.0",
"mongodb": "^3.3.2",
"nodemon": "1.18.3"
},
"scripts": {
"dev": "nodemon",
"start": "node index.js"
},
"keywords": [
"nodejs",
"node",
"mongodb",
"elasticsearch",
"docker"
],
"author": "Sathishkumar Rakkiasmy",
"license": "ISC"
}
Clarifications
I'm able to create the collection index but failed to insert the data
in an index of elastic search.
Above sentence makes sense. Because the bulk variable is unaltered.
Refer below links why bulk variable is unaltered.
Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
How do I return the response from an asynchronous call?
To know more about asynchronous programming
https://developer.mozilla.org/en-US/docs/Learn/JavaScript/Asynchronous
https://developer.mozilla.org/en-US/docs/Learn/JavaScript/Asynchronous/Async_await
You can make logstash to import data from mongo db to elasticsearch.Please find attached configuration for your reference.
input {
mongodb {
codec => “json”
uri => ‘mongodb://localhost:27017/NewDb’
placeholder_db_dir => ‘/home/devbrt.shukla/Desktop/scalaoutput/ELK/logstash-6.4.1/db_dir’
placeholder_db_name => ‘Employee_sqlite.db’
collection => ‘Employee’
batch_size => 5000
generateId => ‘true’
parse_method => “simple”
}
}
filter {
mutate {
remove_field => [ “_id” ]
}
}
output {
elasticsearch {
hosts => [“localhost:9200”]
index => “employee-%{+YYYY.MM.dd}”
}
stdout { codec => rubydebug } }
In Logstash we will three sections Input, Filter and Output.
Input: Is to take data from sql, mongodb, mysql etc..
Filter: In this section, we can frame customized json to index into elasticsearch.
Output: In this section we will put Index name, doc type and Ip address of the output section i.e. elasticsearch.
So, I have this POST request I made
$("#pacotes").on('click', ".produto", function () {
console.log(this.id);
$.post("http://localhost:3000/pacote?idPacote=" + this.id);
});
The log returns a number on the client side, as it should.
The post then goes through my route and arrives here
exports.Pacote = function (req, res) {
console.log("gato");
var pacote = req.idPacote;
console.log(pacote);
connection.connection();
global.connection.query('SELECT * FROM Pacote WHERE idPacotes = ? LIMIT 1', [pacote], function (err, result) {
if (result.length > 0) {
if (result) {
var object = JSON.parse(JSON.stringify(result));
var packObject = object[0];
if (result.length > 0) {
if (result) {
res.render('home', { title: 'pacote', layout: 'pacote', data: packObject });
}
}
} else if (err) {
console.log(err);
}
};
});
}
The first log is just a flag to see if it is reaching the point, which it is
But the second log should return a number, yet it is returning undefined
I'm not very experienced in this subject, but this has always worked for me.
I don't understand where I went differently as my login function is nearly the same thing and returns actual values as expected. Maybe because of bodyparser, but I dont know.
It just bothers me that the id returns properly on the client side but as undefined on the server side
I also tried the same thing but with GET and the results didnt change
You are passing "idPacote" in query string. You will get the the query string parameters in "req.query" if you are using Express with NodeJS. Try this
var pacote = req.query.idPacote;
instead of
var pacote = req.idPacote;
The var pacote = req.idPacote; should be replaced with (provided that you send it as GET parameter):
var pacote = req.params.idPacote;
A side note: you should be using connection pooling in order to improve performance in your app, for example:
var mysql = require("mysql");
//Database connection parameters
var config = {
connectionLimit: 10000,
host: "127.0.0.1",
user: "user",
password: "password",
database: "database",
charset: "utf8_general_ci",
connectTimeout: 4000
};
//Pool
var pool = mysql.createPool(config);
function connection(){
//Assign connection pool for further reuse
this.init = function () {
this.pool = pool;
};
//Get connection
this.acquire = function(callback){
this.pool.getConnection(function(error, con){
if (error) {
if (this.pool)
//Close all connections in pool
this.pool.end(function(err){});
console.log("\x1b[31m" + error, "\x1b[0m");
}
else {
callback(error, con);
}
});
};
}
Read more here.
I'm using nodejs and tedious connector to get data from mssql server. In documentation, I only see this one way to retrieve data
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (rows) {
...
bigArrat.push(JSON.stringify(rows));
});
But in my example I want all rows, not only one property but more. Currently, it return in separate row one cell eg. rows[0].value will return Name, rows[1].value Value ... for me it is rubbish.
I want to get all information in json array of object not all metadata or one property. There is a way to do this or there is a better connector for nodejs and sqlserver ?
The rows value sent to your initial callback is the array of rows being sent back:
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
console.log(rows) // this is the full array of row objects
// it just needs some manipulating
jsonArray = []
rows.forEach(function (columns) {
var rowObject ={};
columns.forEach(function(column) {
rowObject[column.metadata.colName] = column.value;
});
jsonArray.push(rowObject)
});
return callback(null, rowCount, jsonArray);
});
In Sql Server 2016 you can format query results as JSON text using FOR JSON option, see https://msdn.microsoft.com/en-us/library/dn921882.aspx
You just need to read JSON fragments returned by query.
Add this to your config.
rowCollectionOnRequestCompletion: true
var config = {
userName: '', // update me
password: '', // update me
server: '', // update me
options: {
database: '', // update me
encrypt: true,
rowCollectionOnRequestCompletion: true
}
}
Then on your query you can now get the data of rows.
var executeQuery = (res,query) => {
request = new Request(query, (err, rowCount, rows) => {
console.log("Rows: ", rows);
res.send(rows);
});
connection.execSql(request);
}
I learned it from:
http://tediousjs.github.io/tedious/api-request.html
EDIT
Update not to have metadata:
var data = []
request = new Request(query, (err, rowCount, rows) => {
if(err) {
console.log(err)
res.send({ status: 500, data: null, message: "internal server error."})
} else {
console.log(rowCount+' row(s) returned')
res.send({ status: 200, data: data, message: "OK"})
}
})
request.on('row', function(row){
data.push({
last_name: row[0].value,
first_name: row[1].value
})
})
connection.execSql(request)
If you are using express on server side I can recommend using express4-tedious (see https://www.npmjs.com/package/express4-tedious). It allows to easily write apis for SQL connections with small code and streams json result to response.
Connection:
var express = require('express');
var tediousExpress = require('express4-tedious');
var app = express();
app.use(function (req, res, next) {
req.sql = tediousExpress(req, {connection object});
next();
});
Example Api:
/* GET from tableone, streams json result into response */
router.get('/', function (req, res) {
req.sql("select Name, Value, Article_Id from [tableone] where Id = '1' for json path")
.into(res);
});
You can then call these apis e.g. from frontend.
I tried that way but it did not work for me perhaps my knowledge of js and callbacks is not good enough. So, here is my solution. I had to add things to my config of connection to make rows of request work. You would also have to do this. Go to: at the end of new Request section, and to the rows.
here
Second thing, I did is pretty simple.
var jsonArray = [];
var rowObject= {};
var request = new Request("SELECT TOP 5 * FROM tableName",function(err,rowCounts,rows)
{
if (err)
{
console.log(err);
}
else
{
console.log(rowCounts + " rows returned");
}
//Now parse the data from each of the row and populate the array.
for(var i=0; i < rowCounts; i++)
{
var singleRowData = rows[i];
//console.log(singleRowData.length);
for(var j =0; j < singleRowData.length; j++)
{
var tempColName = singleRowData[j].metadata.colName;
var tempColData = singleRowData[j].value;
rowObject[tempColName] = tempColData;
}
jsonArray.push(rowObject);
}
//This line will print the array of JSON object.
console.log(jsonArray);
and to show you how my connection.config looks like:
static config: any =
{
userName: 'username',
password: 'password',
server: 'something.some.some.com',
options: { encrypt: false, database: 'databaseName' ,
rowCollectionOnRequestCompletion: true }
};//End: config
and this is how I am passing it to connection.
static connection = new Connection(Server.config);
Complementing the answer from #Jovan MSFT:
var request = new Request('select person_id, name from person for json path', function(err) {
if (err) {
console.log(err);
}
connection.close();
});
And, finally, in the row event:
request.on('row', function(columns) {
var obj = JSON.parse(columns[0].value);
console.log(obj[0].name);
});
P.S.: the code above does not iterate over columns parameter because for json path returns a single array of objects in a single row and column.
Applying map-reduce function in returned rows:
rows.map(r=>{
return r.reduce((a,k)=>{
a[k.metadata.colName]=k.value
return a
}
,{})
})
This is a combination of a few responses above. This uses FOR JSON AUTO in the SELECT statement and parses the "column" as JSON. The row/column nomenclature may be a bit misleading for folks unfamiliar with this API. In this case, the first "columns" value will be an array of the rows in your table:
var request = new Request("SELECT Name, Value, Article_Id FROM [tableone] WHERE Id = '1' FOR JSON AUTO", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', (columns) => {
const json = JSON.parse(columns[0].value);
});