I'm working with the node-globaloffensive library and want to send multiple requests to the gc to get every rank from every user in my database. With my current code it is repeating all over again and I'm only getting one request back. Do you have andy ideas how I can send and receive every profile?
My current code:
const SteamUser = require('steam-user');
const SteamTotp = require('steam-totp');
const GlobalOffensive = require('globaloffensive');
const config = require('./config');
const db = require('./database');
var SteamID = require('steamid');
var mysql = require('mysql');
let user = new SteamUser();
let csgo = new GlobalOffensive(user);
csgo.on('debug', console.log);
user.on('error', console.error);
var pool = mysql.createPool({
supportBigNumbers: true,
bigNumberStrings: true,
connectionLimit: 10,
host: db.host,
user: db.user,
password: db.password,
database: db.dbname
});
const logInOptions = {
accountName: config.accountName,
password: config.password
}
user.logOn(logInOptions);
user.on('loggedOn', res => {
console.log("Logged into Steam as " + user.steamID.getSteam3RenderedID());
user.setPersona(SteamUser.EPersonaState.Online);
user.gamesPlayed(730);
});
csgo.on("connectedToGC", function() {
console.log("connectedToGC");
const checkData = setInterval(()=>{
//Check connection to game coordinator
if ( csgo.haveGCSession ) {
//Check Database
pool.getConnection(function(err, connection) {
if (err) throw err;
connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'", function (err, rows, fields) {
connection.release();
if (err) throw err;
rows.forEach( (row) => {
var account_id = new SteamID(`${row.SteamID64}`);
//Request Data from CS:GO
csgo.requestPlayersProfile(account_id, function(data) {
console.log(data);
console.log("before");
setTimeout(function(){
//do what you need here
}, 2000);
console.log("after");
});
});
});
});
};
}, 10000);
});
Log:
Logged into Steam as [U:1:xxxxxxxxxx]
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 2000 ms
Sending GC message ClientHello
Sending hello, setting timer for next attempt to 4000 ms
Got handled GC message ClientWelcome
Unknown SO type 2 with 1 items
Unknown SO type 7 with 1 items
GC connection established
connectedToGC
Got handled GC message MatchmakingGC2ClientHello
Got unhandled GC message 9194
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
account_id: xxxxxxxx,
ongoingmatch: null,
global_stats: null,
penalty_seconds: null,
penalty_reason: null,
vac_banned: null,
ranking:
{ account_id: xxxxxxxx,
rank_id: 16,
wins: 1073,
rank_change: null },
commendation: { cmd_friendly: 57, cmd_teaching: 50, cmd_leader: 50 },
medals:
{ display_items_defidx:
[ 1331, 4551, 1376, 970, 1367, 4356, 1358, 1337, 1341, 1329 ],
medal_team: null,
medal_combat: null,
medal_weapon: null,
medal_global: null,
medal_arms: null,
featured_display_item_defidx: 1331 },
my_current_event: null,
my_current_team: null,
survey_vote: null,
activity: null,
player_level: 24,
player_cur_xp: 327680185,
player_xp_bonus_flags: null }
before
after
Sending GC message ClientRequestPlayersProfile
Sending GC message ClientRequestPlayersProfile
Got handled GC message PlayersProfile
{ my_current_event_teams: [],
my_current_event_stages: [],
Couple of problems here -
Callback style functions in forEach which doesn't work the way you are expecting it to work. Either use Promise.all or for..of
mssql supports promise so no need to use callback style to make things complicated and hard to read.
Convert the callback style to promise.
I have modified the code. It will look something like this
function getRequestPlayer(account_id) {
return new Promise((resolve, reject) => {
csgo.requestPlayersProfile(account_id, function (data) {
console.log(data);
resolve(data);
});
});
}
csgo.on("connectedToGC", async function () {
console.log("connectedToGC");
// Check connection to game coordinator
if (csgo.haveGCSession) {
// Check Database
const connection = await pool.getConnection();
const rows = await connection.query("SELECT * FROM Users WHERE (MainSteamGroup = '1' || CommunitySteamGroup = '1' || vip = '1') && BotInFriendlist = '1'");
await Promise.all(rows.map(row => {
const account_id = new SteamID(`${row.SteamID64}`);
return getRequestPlayer(account_id);
}));
}
});
You might need to tweak few things here and there but you will get an idea. Hope this helps
Related
I am developing a REST API. One of the end points I have recieve a list of data like below.
[
{
"iduser": 3,
"title": "House in kandala",
"description": "Built a house in kandala area"
},
{
"iduser": 3,
"title": "House in NYC",
"description": "Built a house in greater NYC area"
}
]
I need to save the list into the database. Below is my code.
const mysql = require('mysql2');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createConnection({
host: prop.get('server.host'),
user: prop.get("server.username"),
password: prop.get("server.password"),
port: prop.get("server.port"),
database: prop.get("server.dbname")
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
if (event.body == null && event.body == undefined) {
var response = errorCodes.missing_parameters;
callback(null, response)
}
else {
let body = JSON.parse(event.body)
console.log("body", body);
let iduser = Number(body.iduser);
let title = body.title;
let description = body.description;
if (isNaN(iduser)) {
var response = errorCodes.invalid_parameter;
callback(null, response);
}
else {
// allows for using callbacks as finish/error-handlers
const sql = "INSERT INTO seller_portfolio_item (iduser, title, description) VALUES (?,?,?)";
con.execute(sql, [iduser, title, description], function (err, result) {
if (err) {
console.log(err.toString());
if (err.toString().indexOf('cannot be null') >= 0) {
var response = errorCodes.not_null_parameters;
callback(null, response);
}
var response = errorCodes.internal_server_error;
callback(null, response);
}
else {
var response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ insertId: result.insertId }),
"isBase64Encoded": false
};
callback(null, response)
}
});
}
}
};
My code is capable of inserting just one record, not suitable to save multiple when I am sending a list. As a result, client program will have to call the same method again and again in a loop.
How can I read the list and insert multiple records ?
You are correct that going forward it is better to use mysql instead of mysql2. Below is one approach that can be used to batch insert multiple records.
Be sure to run npm install mysql --save to ensure you have to necessary package installed.
Working with multiple records requires some additional thinking and planning as well. You should consider:
does your table contain any unique keys other than the primary?
is it possible your API function will ever attempt to insert a duplicate?
in the event of a duplicate how should it be handled?
do you need to know the insert ID for every new record created?
will every object in your list always have the same number of entries, the same keys, and expected values?
Depending on your answers to the above considerations the example I provided below would require additional code and complications. This example is the simplest implementation of the idea.
// package changed, remember to npm install…
const mysql = require('mysql');
const errorCodes = require('source/error-codes');
const PropertiesReader = require('properties-reader');
const prop = PropertiesReader('properties.properties');
const con = mysql.createPool({
connectionLimit: 10,
host: prop.get('server.host') || '127.0.0.1',
user: prop.get("server.username") || 'local_user',
password: prop.get("server.password") || 'local_password',
database: prop.get("server.dbname") || 'local_database',
multipleStatements: true, // necessary to run chained queries
charset: 'utf8mb4' // necessary if you might need support for emoji characters - table charset must match
});
exports.saveSellerPortfolioItem = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
// It is better to check for the existence of your
// expected request body in the controller stage of
// your app but I've included this for consistency
// with your original code.
let query_object = event.body ? JSON.parse(event.body) : null;
console.log('query_object', query_object);
if (!query_object.length) {
let response = errorCodes.missing_parameters;
callback(null, response)
}
else {
// use the keys of the first object to define the field names.
// you don't have to use this approach but it provides flexibility
// if you will not always use the same fields
let keys = Object.keys(query_object[0]);
// map the values into a supported format
let values = query_object.map( obj => keys.map( key => obj[key]));
let sql = 'INSERT INTO seller_portfolio_item (' + keys.join(',') + ') ?;'
con.query(sql, values, function(error, results, fields) {
if (error) callback(null, error);
// when inserting multiples you will only get back the
// insert id of the first record. if there are updates
// due to duplicate keys, you won't even get that.
// results will look like this:
console.log(results);
// Expected output
// OkPacket {
// fieldCount: 0,
// affectedRows: 3,
// insertId: 1,
// serverStatus: 2,
// warningCount: 6,
// message: '&Records: 3 Duplicates: 0 Warnings: 6',
// protocol41: true,
// changedRows: 0
// }
let response = {
"statusCode": 200,
"headers": {
"Content-Type": "application/json"
},
"body": JSON.stringify({ records_inserted: results.affectedRows }),
"isBase64Encoded": false
};
callback(null, response)
});
}
};
I have the following function. I have a list of domains (very big list, more than 100000), I'm trying to put them in a foreach and resolveMx all of them and save the mx records in another database.
Edit, this is the complete function:
const dns = require('dns');
const {BigQuery} = require('#google-cloud/bigquery');
const bigquery = new BigQuery(project="smartiodomains");
const functions = require('firebase-functions');
exports.getMxRecords = functions.https.onRequest( async (req, res) => {
const query = "SELECT string_field_0 FROM smartiodomains.Domains.sk_domains_table";
const options = {
query: query,
location: 'US',
};
const [job] = await bigquery.createQueryJob(options);
const [rows] = await job.getQueryResults();
const datasetId = 'Domains';
const tableId = 'smartio_records';
var index = 0;
rows.forEach((row) => {
dns.resolveMx(row.string_field_0, function(error,addresses){
if(error){
const rows = [
{domain:row.string_field_0, mx_records: 'No data found.', priority: 'No data found.'}
];
// Insert data into a table
bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows);
res.write("Something");
}else{
res.write("Something else");
addresses.forEach( address => {
const rows = [
{domain:row.string_field_0, mx_records: address.exchange, priority: address.priority}
];
// Insert data into a table
bigquery
.dataset(datasetId)
.table(tableId)
.insert(rows).then((foundErrors) => {
if (foundErrors && foundErrors.insertErrors != undefined) {
console.log('Error: ', err);
}
})
.catch((err) => {
console.error('ERROR:', err);
});
});
}
});
});
});
As #Doug Stevenson suggested i add a response (res.write("Something")). Now i have one error and a warning:
1.- Memory Limit exceeded
2.- TeenyStatisticsWarning: Possible excessive concurrent requests detected. 5000 requests in-flight, which exceeds the configured threshold of 5000. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment variable or the concurrentRequests option of teeny-request to increase or disable (0) this warning.
Old error:
With this implementation i got this error in the logs of GCF:
getMxRecordsp5ter5a8u17q { Error: queryMx ETIMEOUT marketingweb.sk
Sorry for my bad english. And thanks for any help.
An HTTP function requires that you send a response to the client after all of the asynchronous work is complete. The function terminates immediately after you send that response. Right now, you're not sending any response, so the function never terminates, and it will always time out. You should send a response after all the calls to dns.resolveMx are fully complete.
I have this function which is async and i'm trying to make a simple query from npm-mysql db.
let sortCategory = async (id) => {
try {
var sql = 'SELECT * FROM categories WHERE parent_id=?';
var results = await connection.query(sql, id);
// console.log(results);
return(results);
} catch(err) {
console.log(err);
return false;
}
}
But instead of results inside the results variable i just get the query object.
Query {
_events:
[Object: null prototype] {
error: [Function],
packet: [Function],
timeout: [Function],
end: [Function] },
_eventsCount: 4,
_maxListeners: undefined,
_callback: undefined,
_callSite:
Error
at Protocol._enqueue (C:\Users\fedesc\Sites\borsalino\node_modules\mysql\lib\protocol\Protocol.js:144:48)
at Connection.query (C:\Users\fedesc\Sites\borsalino\node_modules\mysql\lib\Connection.js:198:25)
at sortCategory (C:\Users\fedesc\Sites\borsalino\server\routes\categories.js:35:38)
at router.post (C:\Users\fedesc\Sites\borsalino\server\routes\categories.js:48:31)
at process._tickCallback (internal/process/next_tick.js:68:7),
_ended: false,
_timeout: undefined,
_timer: Timer { _object: [Circular], _timeout: null },
sql: 'SELECT * FROM categories WHERE parent_id=\'0\'',
values: '0',
.... }
The query as seen in object is
sql: 'SELECT * FROM categories WHERE parent_id=\'0\'',
values: '0',
EDIT#1
an async/await for INSERT query does works. it's only when i need to retrieve data back that i don't get it.
but i can't manage to get the results back even though i do have some in table that should return.
i feel like there is something i still not quite understand about mysql and async calls.
thanks guys.
I use async/await of mysql query like this:
var getCategories = function (id) {
return new Promise(function (resolve, reject) {
var sql = `SELECT * FROM categories WHERE parent_id=?`;
connection.query(sql, [id], function (err, result) {
if (!err) {
resolve(result);
} else {
resolve({
status: "error",
message: "Error Getting Data",
debug: err
});
}
});
});
};
try {
var categories = await getCategories();
} catch (error) {
console.log(error);
}
Above code is very different from yours but you can use the above method to use in further case
Thank you for your helpful posts fedesc. I’d been struggling with this for days. Based on your lead, I ended up with this which is elegant relative to my earlier attempts:
'use strict';
const mysql = require('mysql');
const config = require('./config.js');
const util = require('util'); // needed for async support
const ac = awaitableConnection( config );
demoAwait();
async function demoAwait() {
try {
const results1 = await ac.query( 'SELECT * FROM table' );
const results2 = await ac.query( 'SELECT * FROM table WHERE whatever' );
console.log(results1); // all above results are available
// throw 'test error'; uncomment to test an error
} catch ( err ) {
console.log(err);
} finally {
await ac.close();
}
}
function awaitableConnection( config ) { // wrapped in a promise
const connection = mysql.createConnection( config );
return {
query( sql, args ) {
return util.promisify( connection.query )
.call( connection, sql, args );
},
close() {
return util.promisify( connection.end ).call( connection );
}
};
}
The technique remains readable when queries are placed in a loop. I have to acknowledge Michał Męciński for the pattern of this technique. In Sept 2019 he updated the article fedesc linked above while taking advantage of node.js 8 or later. The article also demonstrates how to use a similar technique for transactions. Node.js, MySQL and async/await
As I can see in documentation https://www.npmjs.com/package/mysql
connection.query('SELECT * FROM `books` WHERE `author` = ?', ['David'], function (error, results, fields) {
// error will be an Error if one occurred during the query
// results will contain the results of the query
// fields will contain information about the returned results fields (if any)
});
You code should became
var sql = 'SELECT * FROM categories WHERE parent_id=?';
connection.query(sql, [id], function (error, results, fields) {
if(error){
return error;
}
return results;
});
1st of all thank you kind responders.
The answer of both of you was indeed the same and the correct one. So i just accepted the quickest responder.
NPM Mysql functions do operate in an old school callback style (and needs to be updated) What was really strange for me is that an INSERT statement did work out of the box - I guess this is because you don't really need a callback if you don't need data to be retrieved.
And async/await is part of node and not mysql.
So the call did indeed fired but without a callback.
Connection.prototype.query = function query(sql, values, cb) {
var query = Connection.createQuery(sql, values, cb);
query._connection = this;
if (!(typeof sql === 'object' && 'typeCast' in sql)) {
query.typeCast = this.config.typeCast;
}
if (query.sql) {
query.sql = this.format(query.sql, query.values);
}
if (query._callback) {
query._callback = wrapCallbackInDomain(this, query._callback);
}
this._implyConnect();
return this._protocol._enqueue(query);
};
Therefore your answers are accurate and correct.
Allow me to elaborate on a possible solution i found for my problem with the hope that maybe it'll help readers who face this approach issue as well.
There is a workaround i've found when still searching for a solution here - How to use classes in Node.js (with no pre-compilers), and why you should and here - Node.js, MySQL and promises
The solution was "promisifying" mysql functions with a class alike function that converts all mysql functions to promises.
Which than will give the option to work with database in an async/await approach.
Also there are tools that promisify functions that uses callbacks like this one here
//mysql
const mysql = require('mysql');
function Database() {
this.connection = mysql.createConnection({
host : 'localhost',
user : '*************',
password : '*************',
database : '*************',
multipleStatements: true
});
this.query = (sql, args) => {
return new Promise((resolve, reject) => {
this.connection.query(sql, args, (err, rows) => {
if (err)
return reject(err);
resolve(rows);
});
});
};
this.close = () => {
return async () => {
try {
this.connection.end(err => {
if (err) throw err;
return;
});
} catch(e) {
return e;
}
}
};
};
var connection = new Database();
Setting you db connection this way will allow you now to use async/await as in original question.
let sortCategory = async (id) => {
try {
var sql = 'SELECT * FROM categories WHERE parent_id=?';
var results = await connection.query(sql, id);
// console.log(results);
return results;
} catch(err) {
console.log(err);
return false;
}
}
Hope this helps anyone.
So, I have this POST request I made
$("#pacotes").on('click', ".produto", function () {
console.log(this.id);
$.post("http://localhost:3000/pacote?idPacote=" + this.id);
});
The log returns a number on the client side, as it should.
The post then goes through my route and arrives here
exports.Pacote = function (req, res) {
console.log("gato");
var pacote = req.idPacote;
console.log(pacote);
connection.connection();
global.connection.query('SELECT * FROM Pacote WHERE idPacotes = ? LIMIT 1', [pacote], function (err, result) {
if (result.length > 0) {
if (result) {
var object = JSON.parse(JSON.stringify(result));
var packObject = object[0];
if (result.length > 0) {
if (result) {
res.render('home', { title: 'pacote', layout: 'pacote', data: packObject });
}
}
} else if (err) {
console.log(err);
}
};
});
}
The first log is just a flag to see if it is reaching the point, which it is
But the second log should return a number, yet it is returning undefined
I'm not very experienced in this subject, but this has always worked for me.
I don't understand where I went differently as my login function is nearly the same thing and returns actual values as expected. Maybe because of bodyparser, but I dont know.
It just bothers me that the id returns properly on the client side but as undefined on the server side
I also tried the same thing but with GET and the results didnt change
You are passing "idPacote" in query string. You will get the the query string parameters in "req.query" if you are using Express with NodeJS. Try this
var pacote = req.query.idPacote;
instead of
var pacote = req.idPacote;
The var pacote = req.idPacote; should be replaced with (provided that you send it as GET parameter):
var pacote = req.params.idPacote;
A side note: you should be using connection pooling in order to improve performance in your app, for example:
var mysql = require("mysql");
//Database connection parameters
var config = {
connectionLimit: 10000,
host: "127.0.0.1",
user: "user",
password: "password",
database: "database",
charset: "utf8_general_ci",
connectTimeout: 4000
};
//Pool
var pool = mysql.createPool(config);
function connection(){
//Assign connection pool for further reuse
this.init = function () {
this.pool = pool;
};
//Get connection
this.acquire = function(callback){
this.pool.getConnection(function(error, con){
if (error) {
if (this.pool)
//Close all connections in pool
this.pool.end(function(err){});
console.log("\x1b[31m" + error, "\x1b[0m");
}
else {
callback(error, con);
}
});
};
}
Read more here.
I am attempting to use NodeJS with the Tedious (http://pekim.github.io/tedious/) sql server plugin to make multiple database calls. My intent is to:
1. Open a connection
2. Start a transaction
3. Make multiple database (stored procedure) calls, which will not return any data.
4. Commit transaction (or roll back on error).
5. Close connection
Here is an example .js file, (without using a transaction) for NodeJS where I am attempting to make multiple database calls and it is failing with the error "Requests can only be made in the LoggedIn state, not the SentClientRequest state." Nothing I try resolves this issue.
Does anyone know how to resolve this?
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var config = {
userName: 'login',
password: 'password',
server: '127.0.0.1',
options: { rowCollectionOnDone: true }
};
var max = 1;
for (var i = 0; i < max; i++) {
var connection = new Connection(config);
function executeStatement() {
request = new Request("select 42, 'hello world'", function (err, rowCount) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (columns) {
columns.forEach(function (column) {
console.log(column.value);
});
});
request.on('doneInProc', function (rowCount, more, rows) {
});
request.on('doneProc', function (rowCount, more, rows) {
console.log('statement completed!')
connection.execSql(request);
});
request.on('returnStatus', function (status) {
console.log('statement completed!')
});
connection.execSql(request);
}
connection.on('connect', function (err) {
// If no error, then good to go...
executeStatement();
});
}
console.log('Done!');
You're trying to execute a statement on a connection that is not established. You're missing an error handler before you call executeStatement.
connection.on('connect', function (err) {
if (err) {
console.log(err); // replace with your code
return;
};
// If no error, then good to go...
executeStatement();
});
Edit:
How to execute multiple statements in a transaction in serial:
var statements = ["select 1", "select 2", "select 3"];
var transaction = new sql.Transaction(connection);
transaction.begin(function(err) {
// ... error checks
async.mapSeries(statements, function(statement, next) {
var request = new sql.Request(transaction);
request.query(statement, next);
}, function(err, results) {
// ... error checks
transaction.commit(function(err, recordset) {
// ... error checks
console.log("Transaction commited.");
});
});
});
You should use tedious connection pools to create a pool of multiple connections.
For node js, a npm module is available at : https://www.npmjs.com/package/tedious-connection-pool
For every new value inside for loop you can acquire a new connection and use connection.reset on doneInProc event.
The case which you have been doing is performing 1st iteration of for loop correctly(LoggedIn State) and as you have proceeded without closing or releasing the connection you are using same connection object (SentClientRequest state).
Hence the same object is at final state when the code reaches second iteration of for loop.
Hope it resolves your issue
you can use Tedious Connection pools https://github.com/pekim/tedious-connection-pool
As #zevsuld and #mannutech said, tedious-connection-pool will enable multiple connections, and prevent erring out when simultaneous requests come into your server.
Below is a generic example that allows you to write multiple queries within one connection pool, and expose them for use in your api. I'm just adding this in case others come along who are trying to accomplish this type of implementation.
const ConnectionPool = require('tedious-connection-pool');
const path = require('path');
require('dotenv').config({
path: path.join(__dirname, '../../.env')
})
let Request = require('tedious').Request;
let poolConfig = {
min: 10,
max: 50,
log: true
}
let connectionConfig = {
userName: process.env.user,
password: process.env.password,
server: process.env.server
};
//create the pool
let pool = new ConnectionPool(poolConfig, connectionConfig);
pool.on('error', function(err) {
console.error(err);
});
// At this point in the code, we have established a connection pool. If you run node, you'll see it log out all then connections to your database.
// Let's add some methods which your server might use in fulfilling requests to various endpoints.
let query1 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column1, colum2 from TABLE WHERE column1 LIKE '${query.param}%%' ORDER BY column1 ASC`
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
let query2 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column3, colum4 from TABLE2 WHERE column3 LIKE '${query.param}%%' ORDER BY column3 ASC`;
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
// Let's expose these two functions to the rest of your API:
module.exports = {
query1,
query2
}