I am developing a REST API in Node.js using AWS Lambda.
Below is my code. I am trying to get a JSON input from the user and save it into the database.
const mysql = require('mysql2');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createConnection({
host: prop.get('server.host'),
user: prop.get("server.username"),
password: prop.get("server.password"),
port: prop.get("server.port"),
database: prop.get("server.dbname")
});
exports.updateUser = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body == null && event.body == undefined) {
var response = errorCodes.missing_parameters;
callback(null, response)
}
else {
let body = JSON.parse(event.body)
console.log("body", body);
let iduser = body.iduser;;
let first_name = body.first_name;
let created_date = body.created_date;
// allows for using callbacks as finish/error-handlers
const sql = "UPDATE user SET first_name=? created_date=? WHERE iduser=? ";
con.execute(sql, [first_name, created_date, iduser ], function (err, result) {
if (err) {
console.log(err.toString());
if (err.toString().indexOf('cannot be null') >= 0) {
var response = errorCodes.not_null_parameters;
callback(null, response);
}
var response = errorCodes.internal_server_error;
callback(null, response);
}
else {
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ insertId: result.insertId }),
"isBase64Encoded": false
};
callback(null, response)
}
});
}
};
This is my json input
{
"iduser": 3,
"first_name": "Ranjith",
"created_date": "2021-07-28 00:00:00",
}
Now, notice my iduser is always a number, first_name is a string, and the created_date is a date.
Right now, there is no data type validation. So i can even pass a text like hello to the created_date field. There it will not get saved, but the database field will be set to null.
Instead, I am thinking of validating the data types before sending. So I can convert the iduser to a number, first_name to a string and date_created to a date field.
I have two questions.
Is this data type validation a good practice in Node.JS? I am asking because I am a Java Guy, there things always have a specific data type.
If the data type validation is a good thing, How can I convert the above data to the appropriate types?
Related
I am developing a REST API. One of the end points I have recieve a list of data like below.
[
{
"iduser": 3,
"title": "House in kandala",
"description": "Built a house in kandala area"
},
{
"iduser": 3,
"title": "House in NYC",
"description": "Built a house in greater NYC area"
}
]
I need to save the list into the database. Below is my code.
const mysql = require('mysql2');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createConnection({
host: prop.get('server.host'),
user: prop.get("server.username"),
password: prop.get("server.password"),
port: prop.get("server.port"),
database: prop.get("server.dbname")
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body == null && event.body == undefined) {
var response = errorCodes.missing_parameters;
callback(null, response)
}
else {
let body = JSON.parse(event.body)
console.log("body", body);
let iduser = Number(body.iduser);
let title = body.title;
let description = body.description;
if (isNaN(iduser)) {
var response = errorCodes.invalid_parameter;
callback(null, response);
}
else {
// allows for using callbacks as finish/error-handlers
const sql = "INSERT INTO seller_portfolio_item (iduser, title, description) VALUES (?,?,?)";
con.execute(sql, [iduser, title, description], function (err, result) {
if (err) {
console.log(err.toString());
if (err.toString().indexOf('cannot be null') >= 0) {
var response = errorCodes.not_null_parameters;
callback(null, response);
}
var response = errorCodes.internal_server_error;
callback(null, response);
}
else {
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ insertId: result.insertId }),
"isBase64Encoded": false
};
callback(null, response)
}
});
}
}
};
My code is capable of inserting just one record, not suitable to save multiple when I am sending a list. As a result, client program will have to call the same method again and again in a loop.
How can I read the list and insert multiple records ?
You are correct that going forward it is better to use mysql instead of mysql2. Below is one approach that can be used to batch insert multiple records.
Be sure to run npm install mysql --save to ensure you have to necessary package installed.
Working with multiple records requires some additional thinking and planning as well. You should consider:
does your table contain any unique keys other than the primary?
is it possible your API function will ever attempt to insert a duplicate?
in the event of a duplicate how should it be handled?
do you need to know the insert ID for every new record created?
will every object in your list always have the same number of entries, the same keys, and expected values?
Depending on your answers to the above considerations the example I provided below would require additional code and complications. This example is the simplest implementation of the idea.
// package changed, remember to npm install…
const mysql = require('mysql');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createPool({
connectionLimit: 10,
host: prop.get('server.host') || '127.0.0.1',
user: prop.get("server.username") || 'local_user',
password: prop.get("server.password") || 'local_password',
database: prop.get("server.dbname") || 'local_database',
multipleStatements: true, // necessary to run chained queries
charset: 'utf8mb4' // necessary if you might need support for emoji characters - table charset must match
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
// It is better to check for the existence of your
// expected request body in the controller stage of
// your app but I've included this for consistency
// with your original code.
let query_object = event.body ? JSON.parse(event.body) : null;
console.log('query_object', query_object);
if (!query_object.length) {
let response = errorCodes.missing_parameters;
callback(null, response)
}
else {
// use the keys of the first object to define the field names.
// you don't have to use this approach but it provides flexibility
// if you will not always use the same fields
let keys = Object.keys(query_object[0]);
// map the values into a supported format
let values = query_object.map( obj => keys.map( key => obj[key]));
let sql = 'INSERT INTO seller_portfolio_item (' + keys.join(',') + ') ?;'
con.query(sql, values, function(error, results, fields) {
if (error) callback(null, error);
// when inserting multiples you will only get back the
// insert id of the first record. if there are updates
// due to duplicate keys, you won't even get that.
// results will look like this:
console.log(results);
// Expected output
// OkPacket {
// fieldCount: 0,
// affectedRows: 3,
// insertId: 1,
// serverStatus: 2,
// warningCount: 6,
// message: '&Records: 3 Duplicates: 0 Warnings: 6',
// protocol41: true,
// changedRows: 0
// }
let response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ records_inserted: results.affectedRows }),
"isBase64Encoded": false
};
callback(null, response)
});
}
};
I used Amplify Cli to create the templated api 'amplify add api' with new lambda function and cognito authentication. This is the code generated in the index.js file of the lambda function:
/************************************
index.js
*************************************/
const awsServerlessExpress = require('aws-serverless-express');
const app = require('./app');
const server = awsServerlessExpress.createServer(app);
exports.handler = (event, context) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
awsServerlessExpress.proxy(server, event, context);
};
/************************************
HTTP put method for insert object in app.js
*************************************/
app.put(path, function(req, res) {
if (userIdPresent) {
req.body['userId'] = req.apiGateway.event.requestContext.identity.cognitoIdentityId || UNAUTH;
} else {
// Get the unique ID given by cognito for this user, it is passed to lambda as part of a large string in event.requestContext.identity.cognitoAuthenticationProvider
let userSub = req.apiGateway.event.requestContext.identity.cognitoAuthenticationProvider.split(':CognitoSignIn:')[1];
let requestIp = req.apiGateway.event.requestContext.identity.sourceIp;
let userPoolId = process.env.AUTH_TDOCWEBAPP001_USERPOOLID;
let request = {
UserPoolId: userPoolId, // Set your cognito user pool id
AttributesToGet: [
'email',
'given_name',
'family_name',
],
Filter: 'sub = "' + userSub + '"',
Limit: 1
}
//let users = await cognitoClient.listUsers(request).promise(); //Doesn't work because await not allowed
let users = cognitoClient.listUsers(request);
console.log("got user in put:", users[0]);
// TODO: Get the group that the user belongs to with "cognito:grouops"?
// Set userId and sortKey
req.body['userId'] = users[0].sub;
req.body['sortKey'] = sortKeyValue;
req.body['updatedByIp'] = requestIp;
req.body['createdAt'] = new Date().toISOString(); //ISO 8601 suppored by DynamoDB
req.body['updatedAt'] = new Date().toISOString();
req.body['isDeleted'] = false;
}
let putItemParams = {
TableName: tableName,
Item: req.body
}
dynamodb.put(putItemParams, (err, data) => {
if(err) {
res.statusCode = 500;
res.json({error: err, url: req.url, body: req.body});
} else{
res.json({success: 'put call succeed!', url: req.url, data: data})
}
});
});
So right now, when I call the lambda via the API, i get users is undefined. I'm trying to get the user object and then the groups that it belongs. Not sure how to do it if the function doesn't allow async... Please help
I found it myself the solution.
let users = await cognitoClient.listUsers(request);
or
let users = cognitoClient.listUsers(request, function(err, data) {...});
I needed wait to get users from the Cognito.
I'm working with the node-globaloffensive library and want to send multiple requests to the gc to get every rank from every user in my database. With my current code it is repeating all over again and I'm only getting one request back. Do you have andy ideas how I can send and receive every profile?
My current code:
const SteamUser = require('steam-user');
const SteamTotp = require('steam-totp');
const GlobalOffensive = require('globaloffensive');
const config = require('./config');
const db = require('./database');
var SteamID = require('steamid');
var mysql = require('mysql');
let user = new SteamUser();
let csgo = new GlobalOffensive(user);
csgo.on('debug', console.log);
user.on('error', console.error);
var pool = mysql.createPool({
supportBigNumbers: true,
bigNumberStrings: true,
connectionLimit: 10,
host: db.host,
user: db.user,
password: db.password,
database: db.dbname
});
const logInOptions = {
accountName: config.accountName,
password: config.password
}
user.logOn(logInOptions);
user.on('loggedOn', res => {
console.log("Logged into Steam as " + user.steamID.getSteam3RenderedID());
user.setPersona(SteamUser.EPersonaState.Online);
user.gamesPlayed(730);
});
csgo.on("connectedToGC", function() {
console.log("connectedToGC");
const checkData = setInterval(()=>{
//Check connection to game coordinator
if ( csgo.haveGCSession ) {
//Check Database
pool.getConnection(function(err, connection) {
if (err) throw err;
connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'", function (err, rows, fields) {
connection.release();
if (err) throw err;
rows.forEach( (row) => {
var account_id = new SteamID(`${row.SteamID64}`);
//Request Data from CS:GO
csgo.requestPlayersProfile(account_id, function(data) {
console.log(data);
console.log("before");
setTimeout(function(){
//do what you need here
}, 2000);
console.log("after");
});
});
});
});
};
}, 10000);
});
Log:
Logged into Steam as [U:1:xxxxxxxxxx]
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 2000 ms
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 4000 ms
Got handled GC message ClientWelcome
Unknown SO type 2 with 1 items
Unknown SO type 7 with 1 items
GC connection established
connectedToGC
Got handled GC message MatchmakingGC2ClientHello
Got unhandled GC message 9194
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
account_id: xxxxxxxx,
ongoingmatch: null,
global_stats: null,
penalty_seconds: null,
penalty_reason: null,
vac_banned: null,
ranking:
{ account_id: xxxxxxxx,
rank_id: 16,
wins: 1073,
rank_change: null },
commendation: { cmd_friendly: 57, cmd_teaching: 50, cmd_leader: 50 },
medals:
{ display_items_defidx:
[ 1331, 4551, 1376, 970, 1367, 4356, 1358, 1337, 1341, 1329 ],
medal_team: null,
medal_combat: null,
medal_weapon: null,
medal_global: null,
medal_arms: null,
featured_display_item_defidx: 1331 },
my_current_event: null,
my_current_team: null,
survey_vote: null,
activity: null,
player_level: 24,
player_cur_xp: 327680185,
player_xp_bonus_flags: null }
before
after
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
Couple of problems here -
Callback style functions in forEach which doesn't work the way you are expecting it to work. Either use Promise.all or for..of
mssql supports promise so no need to use callback style to make things complicated and hard to read.
Convert the callback style to promise.
I have modified the code. It will look something like this
function getRequestPlayer(account_id) {
return new Promise((resolve, reject) => {
csgo.requestPlayersProfile(account_id, function (data) {
console.log(data);
resolve(data);
});
});
}
csgo.on("connectedToGC", async function () {
console.log("connectedToGC");
// Check connection to game coordinator
if (csgo.haveGCSession) {
// Check Database
const connection = await pool.getConnection();
const rows = await connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'");
await Promise.all(rows.map(row => {
const account_id = new SteamID(`${row.SteamID64}`);
return getRequestPlayer(account_id);
}));
}
});
You might need to tweak few things here and there but you will get an idea. Hope this helps
i have a problem regarding cross-file calls and their lifecycles. I want to query the dynamodb for a username to login the entered user.I query in an external file to minimize queries as i need to use the data in another file too. The cycle seems to be off though and I really don't know why. The query call comes after the POST / login although the input in the form is available earlier. It would print the queryparams before the POST/login call too if i'd call a console log on it. The callback does not wait for the actual data. I have browsed other posts containing information regarding asynchronous callback functions but couldn't figure out why the callback completely ignores the query function. The problem is not the communication between browser and server but rather between files/classes in the node script. The data is available for query before the post statement but the query gets executed after. This is What can i do to prevent that?
the console output (for debugging purposes) is:
GET /stylesheets/style.css 304 3ms
callback
undefined
POST /login 500 38ms - 280b
querying ...
information found
[queryResponse Object]
the query file:
const AWS = require("aws-sdk");
var exports = module.exports = {};
const dynamodb = new AWS.DynamoDB({
apiVersion: "2012-08-10",
// accessKeyId: "", //TODO
// secretAccessKey: "", //TODO
// region: "eu-central-1" //? TODO
//testing purposes
"region": "us-west-2",
"accessKeyId": "abcde",
"secretAccessKey": "abcde",
"endpoint": "http://localhost:8000"
});
var dataAfterQuery = null;
exports.query = function(queryParams,callback) {
/*prevent unneccessary queries*/
var queryNow = dynamodb.query(queryParams,
function(err,data) {
if(err) {
console.error(err);
return done(err);
}
console.log('querying ...');
if(data.Count > 0) {
console.log('information found');
} else {
console.log('"' + JSON.stringify(queryParams) + '" is not in the db yet');
}
dataAfterQuery = data;
console.log(JSON.stringify(dataAfterQuery));
return dataAfterQuery;
});
if(typeof callback == 'function') {
console.log("callback");
callback();
return queryNow;
}
}
/*function to recieve queried data*/
exports.getQueriedData = function() {
return dataAfterQuery;
}
the login file:
module.exports = function(passport) {
passport.use("login", new LocalStrategy({
passReqToCallback: true
},
function(req,username,password,done) {
var queryParams = {
TableName: "users",
KeyConditionExpression: "username = :user",
ExpressionAttributeValues: {
//username entered in jade form
":user":{"S":username}
}
};
queryFile.query(queryParams,function(err,data){
if(err) console.log(data);
//console.log(data);
//kommt vor information found?
console.error(data);
/* response can only be null or not null*/
if(data != null) {
console.error('error, more than one user with username: "' + username + '" in the db');
console.error("Entries :" + data.Count);
return done(null,false,req.flash("message", "more than version of the username in the db"));
} else {
//only one user exists in db, query for username was successful
var parsedQueryPw = data.Items[0].password.S;
userAfterQuery = data.Items[0];
//checking if entered password is wrong
if(!isValidPassword(parsedQueryPw, password)) {
console.error("invalid password");
return done(null,false,req.flash("message","invalid user-password combination"));
}
//successful login - user and password match
console.log("login successful");
//return user object for serialization
return done(null,data);
}
I'm using nodejs and tedious connector to get data from mssql server. In documentation, I only see this one way to retrieve data
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (rows) {
...
bigArrat.push(JSON.stringify(rows));
});
But in my example I want all rows, not only one property but more. Currently, it return in separate row one cell eg. rows[0].value will return Name, rows[1].value Value ... for me it is rubbish.
I want to get all information in json array of object not all metadata or one property. There is a way to do this or there is a better connector for nodejs and sqlserver ?
The rows value sent to your initial callback is the array of rows being sent back:
var request = new Request("select Name, Value, Article_Id from [tableone] where Id = '1'", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
console.log(rows) // this is the full array of row objects
// it just needs some manipulating
jsonArray = []
rows.forEach(function (columns) {
var rowObject ={};
columns.forEach(function(column) {
rowObject[column.metadata.colName] = column.value;
});
jsonArray.push(rowObject)
});
return callback(null, rowCount, jsonArray);
});
In Sql Server 2016 you can format query results as JSON text using FOR JSON option, see https://msdn.microsoft.com/en-us/library/dn921882.aspx
You just need to read JSON fragments returned by query.
Add this to your config.
rowCollectionOnRequestCompletion: true
var config = {
userName: '', // update me
password: '', // update me
server: '', // update me
options: {
database: '', // update me
encrypt: true,
rowCollectionOnRequestCompletion: true
}
}
Then on your query you can now get the data of rows.
var executeQuery = (res,query) => {
request = new Request(query, (err, rowCount, rows) => {
console.log("Rows: ", rows);
res.send(rows);
});
connection.execSql(request);
}
I learned it from:
http://tediousjs.github.io/tedious/api-request.html
EDIT
Update not to have metadata:
var data = []
request = new Request(query, (err, rowCount, rows) => {
if(err) {
console.log(err)
res.send({ status: 500, data: null, message: "internal server error."})
} else {
console.log(rowCount+' row(s) returned')
res.send({ status: 200, data: data, message: "OK"})
}
})
request.on('row', function(row){
data.push({
last_name: row[0].value,
first_name: row[1].value
})
})
connection.execSql(request)
If you are using express on server side I can recommend using express4-tedious (see https://www.npmjs.com/package/express4-tedious). It allows to easily write apis for SQL connections with small code and streams json result to response.
Connection:
var express = require('express');
var tediousExpress = require('express4-tedious');
var app = express();
app.use(function (req, res, next) {
req.sql = tediousExpress(req, {connection object});
next();
});
Example Api:
/* GET from tableone, streams json result into response */
router.get('/', function (req, res) {
req.sql("select Name, Value, Article_Id from [tableone] where Id = '1' for json path")
.into(res);
});
You can then call these apis e.g. from frontend.
I tried that way but it did not work for me perhaps my knowledge of js and callbacks is not good enough. So, here is my solution. I had to add things to my config of connection to make rows of request work. You would also have to do this. Go to: at the end of new Request section, and to the rows.
here
Second thing, I did is pretty simple.
var jsonArray = [];
var rowObject= {};
var request = new Request("SELECT TOP 5 * FROM tableName",function(err,rowCounts,rows)
{
if (err)
{
console.log(err);
}
else
{
console.log(rowCounts + " rows returned");
}
//Now parse the data from each of the row and populate the array.
for(var i=0; i < rowCounts; i++)
{
var singleRowData = rows[i];
//console.log(singleRowData.length);
for(var j =0; j < singleRowData.length; j++)
{
var tempColName = singleRowData[j].metadata.colName;
var tempColData = singleRowData[j].value;
rowObject[tempColName] = tempColData;
}
jsonArray.push(rowObject);
}
//This line will print the array of JSON object.
console.log(jsonArray);
and to show you how my connection.config looks like:
static config: any =
{
userName: 'username',
password: 'password',
server: 'something.some.some.com',
options: { encrypt: false, database: 'databaseName' ,
rowCollectionOnRequestCompletion: true }
};//End: config
and this is how I am passing it to connection.
static connection = new Connection(Server.config);
Complementing the answer from #Jovan MSFT:
var request = new Request('select person_id, name from person for json path', function(err) {
if (err) {
console.log(err);
}
connection.close();
});
And, finally, in the row event:
request.on('row', function(columns) {
var obj = JSON.parse(columns[0].value);
console.log(obj[0].name);
});
P.S.: the code above does not iterate over columns parameter because for json path returns a single array of objects in a single row and column.
Applying map-reduce function in returned rows:
rows.map(r=>{
return r.reduce((a,k)=>{
a[k.metadata.colName]=k.value
return a
}
,{})
})
This is a combination of a few responses above. This uses FOR JSON AUTO in the SELECT statement and parses the "column" as JSON. The row/column nomenclature may be a bit misleading for folks unfamiliar with this API. In this case, the first "columns" value will be an array of the rows in your table:
var request = new Request("SELECT Name, Value, Article_Id FROM [tableone] WHERE Id = '1' FOR JSON AUTO", function (err, rowCount, rows) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', (columns) => {
const json = JSON.parse(columns[0].value);
});