Javascript get returned value of inside functions - javascript

I have the following function that gets a username and by making a query to mssql checks if already exists in databse. I want the checkForUsername() to return true if recordSet.recordset.length >= 1. How can I do that? I am a bit confused about async/await. Thanks.
function checkForUsername(UserIn) {
var dbConn = new sql.ConnectionPool(config);
dbConn.connect().then(function () {
var request = new sql.Request(dbConn);
request.query("SELECT * from Admins WHERE username='"+UserIn+"';").then(function (recordSet) {
console.log(recordSet.recordset.length);
if(recordSet.recordset.length >= 1)
//checkForUsername return true
dbConn.close();
}).catch(function (err) {
dbConn.close();
});
}).catch(function (err) {
console.log(err);
});
}

Of course I can't check if the following code works, but you can try rewriting your code for something like this:
const checkForUsername = async user => {
try {
const
dbConn = new sql.ConnectionPool(config),
sqlObj = await dbConn.connect(),
query = `SELECT * FROM Admins WHERE username='${user}';`, // Note that creating a query explicitly is not recommended as it is vulnerable to SQL injection attack!
recordSet = await new sqlObj.Request(dbConn)query(query),
hasRecords = !!recordSet.recordset.length;
return hasRecords;
} catch (error) {
console.log(error);
} finally {
dbConn.close();
}
}
// Call it
(async () => {
console.log(await checkForUsername('Jhon due');
})();

const checkForUsername = async UserIn => {
try {
var dbConn = new sql.ConnectionPool(config);
const sqlObj = await dbConn.connect();
const recordSet = await new sql.Request(dbConn).query(`SELECT * FROM Admins WHERE username='${UserIn}';`);
const hasRecords = !!recordSet.recordset.length;
return hasRecords;
}catch (error) {
console.log(error);
}finally{
dbConn.close();
}
}
// Call it
(async () => {
console.log(await checkForUsername('John'));
})();

Related

Callback with recursive functions

I am using the google translate api to translate data from a json file to french locale and then write it back to a file. I am using a recursive function to iterate over the json file since it is deeply nested. However the execution is not waiting till the translation is completed before it writes to the file. I have tried using callback and promise approaches but i couldn't get it right.
Just for it to work as I required an output as an emergency I have set a timout before the write method is called. It work but I would like to learn the appropriate/correct approach to implement this.
const fs = require('fs')
const {Translate} = require('#google-cloud/translate').v2
require('dotenv').config()
process.env['NODE_TLS_REJECT_UNAUTHORIZED'] = 0
const credentials = JSON.parse(process.env.credentials)
const translate = new Translate({
credentials,
projectId: credentials.project_id,
})
let data = {}
// writeJSONTofile should be executed only after readJSONFile execution is completed
//read file
const readJSONFile = () => {
try {
data = JSON.parse(fs.readFileSync('...\\locale\\en.json'))
iterateAndTranslate(data)
setTimeout(() => {
writeJSONToFile()
}, 25000)
} catch (error) {
console.log(error)
}
}
// iterate, translate, reassign
const iterateAndTranslate = async (data) => {
for(key in data) {
if (typeof data[key] === 'object' && data[key] !== null) {
iterateAndTranslate(data[key])
} else{
data[key] = await translateText(data[key], 'fr')
}
}
}
//translate method
const translateText = async (text, targetLanguage) => {
try {
let [response] = await translate.translate(text, targetLanguage)
return response
} catch (error) {
console.log(error)
return 0
}
}
const writeJSONToFile = async () => {
var outputFileName = 'C:\\test\\test.json'
await fs.writeFileSync(outputFileName, JSON.stringify(data,null,4), (err) => {
if(err) {
console.log(err)
} else {
console.log('Done!')
}
})
}
// start from here
readJSONFile()
You have a few issues with your code.
Your functions use a global variable and mutate it instead of getting input and returning output.
timeout will cause unexpected behavior in your case.
you are using var
you have redundant async-await on the writeJSONToFile function
See my view of point about the possible solution.
const fs = require("fs");
const { Translate } = require("#google-cloud/translate").v2;
require("dotenv").config();
process.env["NODE_TLS_REJECT_UNAUTHORIZED"] = 0;
const credentials = JSON.parse(process.env.credentials);
const translate = new Translate({
credentials,
projectId: credentials.project_id,
});
// writeJSONTofile should be executed only after readJSONFile execution is completed
//read file
const readJSONFile = async () => {
try {
const data = JSON.parse(fs.readFileSync("...\\locale\\en.json"));
return iterateAndTranslate(data);
} catch (error) {
console.log(error);
}
return {};
};
// iterate, translate, reassign
const iterateAndTranslate = async (data) => {
for (let key in data) {
if (typeof data[key] === "object" && data[key] !== null) {
await iterateAndTranslate(data[key]);
} else {
data[key] = await translateText(data[key], "fr");
}
}
return data;
};
//translate method
const translateText = async (text, targetLanguage) => {
try {
let [response] = await translate.translate(text, targetLanguage);
return response;
} catch (error) {
console.log(error);
}
return null;
};
const writeJSONToFile = (data) => {
let outputFileName = "C:\\test\\test.json";
fs.writeFileSync(outputFileName, JSON.stringify(data, null, 4), (err) => {
if (err) {
console.log(err);
} else {
console.log("Done!");
}
});
};
// start from here
const run = async () => {
const data = await readJSONFile();
writeJSONToFile(data);
};
run();
See more:
why not using global varible
why not using var

What are the best practices to pass variable to outer scope while using Promise?

I was working on a Scraper script using cheerio and puppeteer. Inside a cheerio each loop I wanted to resolve a redirected url field. Using Promise I can console log it but what is the best way to insert it in the metadata?
I am actually confused about the control flow also.
(async function main() {
const browser = await puppeteer.launch({
headless: true,
});
const page = await browser.newPage();
await page.goto('https://www.example.com/?q=async+urls&s=s');
const content = await page.content();
const $ = cheerio.load(content);
var parsedResults = [];
const fetchRedirect = async (url) => {
try {
let response = await doRequest(url);
return response;
} catch (err) {
return false;
}
};
const videoBlocks = $('td[itemprop="subjectOf"]').first().each(function (i, element) {
const url = ($(this).find('a.title').attr('href'));
const fetchUrl = fetchRedirect(url);
// ** i can console log the redirected url **/
fetchUrl.then(url => console.log(url));
const title = ($(this).find('a.title').text());
var metadata = {
title
};
parsedResults.push(metadata);
});
function doRequest(url) {
return new Promise(function (resolve, reject) {
request(url, function (error, res, body) {
if (!error) {
resolve(getPathFromUrl(res.request.uri.href));
} else {
reject(error);
}
});
});
}
function getPathFromUrl(url) {
return url.split(/[?#]/)[0];
}
console.log(parsedResults);
await page.close()
await browser.close();
})();
//...............await!
const fetchUrl = await fetchRedirect(url);
// ** then instead that **/
// fetchUrl.then(url => console.log(url));
// just
console.log(fetchUrl)
const title = ($(this).find('a.title').text());
var metadata = {
title
};
parsedResults.push(metadata);
Personally I would go for a Node module. The code like this is hard to read as inside one function(main) you have declared multiple functions. It is upon your preference whether to apply OOP or functional programming there(the second is much more popular in Node environment but I prefer the first if I am going to create a functionality around a specific entity). Avoiding nesting and abstract functions(functions that you declare inside other functions) is crucial in creating a reusable and readable code.
Here it is a worked prototype(not tested). It makes a puppeteer instance reusable. The module exposes three methods: start, stop, crawlWeb
'use strict'
var browser;
async function crawlWeb(options) {
// validate options and throw errors
if (!options.url) {
throw new Error('url is invalid');
}
if (!browser) {
throw new Error('puppeteer is not started');
}
const page = await browser.newPage();
await page.goto(optionsl.url);
const content = await page.content();
const $ = cheerio.load(content);
const metas = extracMetadata($);
for (let metadata of metas) {
// you can verify if site is valid
// you can use await
try {
await doRequest(metadata.url);
}
catch(err) {
// do something if not valid
}
}
return metas;
}
async function start(options) {
browser = await puppeteer.launch(options);
}
async function stop() {
if (!browser) {
throw new Error('puppeteer is not started');
}
await page.close()
await browser.close();
}
function extracMetadata($) {
const metas = [];
$('td[itemprop="subjectOf"]').first().each(function (i, element) {
const url = ($(this).find('a.title').attr('href'));
const title = ($(this).find('a.title').text());
var metadata = {
url,
title
};
metas.push(metadata);
});
return metas;
}
function doRequest(url) {
return new Promise(function (resolve, reject) {
request(url, function (error, res, body) {
if (!error) {
resolve(getPathFromUrl(res.request.uri.href));
} else {
reject(error);
}
});
});
}
function getPathFromUrl(url) {
return url.split(/[?#]/)[0];
}
module.exports = {
crawlWeb,
start,
stop
};

Passing multiple query objects with res.render

I want to pass multiple query objects with res.render() inside my route.js. I have a select.js which contains the SQL statements and delivers the objects to my route.js. This works fine until I want to pass multiple query objects with res.render().
Any ideas on how I can pass multiple objects at once?
snippet route.js (I need to pass get_PriceData here as well)
I already query get_KategorieData but I have no clue how to handle multiple queries in one route.
router.get('/edit', (req, res, next) => {
var speisenEintragenData = {};
db.get_KategorieData()
.then(({ data: kategorie_data }) => {
res.render('speiseEintragen', { kategorie_data }); //maybe putting res.render() after the db.get?
})
.catch((error) => {
console.log(error);
});
});
select.js
const db = require('./config');
//KATEGORIEN LADEN
const get_KategorieData=()=>{
var sql = 'SELECT * FROM Kategorie';
return new Promise((resolve,reject) => {
db.query(sql, function (err, data, fields) {
if (err) reject(err);
resolve({data});
});
})
}
//PREISE LADEN
const get_PriceData=()=>{
var sql = 'SELECT * FROM preise';
return new Promise((resolve,reject) => {
db.query(sql, function (err, data, fields) {
if (err) reject(err);
resolve({data});
});
})
}
module.exports={
get_KategorieData,
get_PriceData
}
There are two ways to go about this. One is to stick with promises and other is to use async/await.
Using promise
Create a new function to query database. This is if the module you are using does not support async/await and requires a callback.
const query = ( sql ) => {
return new Promise(( resolve, reject) => {
db.query(sql, function (err, data, fields) {
if (err) reject(err);
resolve(data);
});
})
}
// and then you can write an async/await function to call n queries like
const get_data = async () => {
const sql1 = '...';
const a = await query( sql1 );
const sql2 = '...';
const b = await query( sql2 );
....
....
....
const sqln = '...';
const n = await query( sqln );
return { a ,b,... ,n};
}
Or with async/await you can directly call db.query and use the response
const get_data = async () => {
const sql1 = '...';
const res_1 = await db.query(sql1);
const sql2 = '...';
const res_2 = await db.query(sql2);
return { a: res_1 ,b: res_2 };
}
router.js can rewritten as
router.get('/edit', async (req, res, next) => {
const {a:rename_a,b:rename_b and so on}=await db.get_data();
res.render('view', { rename_a,rename_b and so on })
});

How to read files present in array nodejs

I would like to know to read the files and search for keyword sample in nodejs.
If keyword found, display the path
const allfiles = [
'C:\\Users\\public',
'C:\\Users\\public\\images',
'C:\\Users\\public\\javascripts\\index1.js',
'C:\\Users\\public\\javascripts\\index2.js'
]
const readFile = (path, opts = 'utf8') =>
new Promise((resolve, reject) => {
try{
let result=[];
fs.readFile(path, opts, (err, data) => {
if (err) reject(err)
else {
if(data.indexOf("sample")>=0){
result.push(data);
resolve(result);
}
}
})
}
catch (e) {
console.log("e", e);
}
})
const run = async () => {
allfiles.forEach(e=>{
const s = await readFile(e);
console.log(s);
})
}
run();
Expected Output
[
'C:\\Users\\public\\javascripts\\index1.js',
'C:\\Users\\public\\javascripts\\index2.js'
]
Some tips:
What happens when "sample" isn't found in readFile?
You're currently pushing the data into result instead of the path.
Think about what you're trying to accomplish with readFile. To me, what you want to do is see if that file has the word "sample", and return true if so and if not return false. So I'd name the function checkIfFileHasSample and have it return a boolean. Then in your run function, in the forEach you have the path, so that is where I'd add the path to a list of results.
Maybe you already realized this, but run is never actually called in your code sample. Ie. run() doesn't happen.
Solution:
You had some syntax errors and a tricky gotcha with async-await with run. For the syntax errors, it'll come with experience, but I'd also recommend using ESLint to help you catch them, as well as making sure your code is always properly indented.
const fs = require("fs");
const allfiles = [
"C:\\Users\\public",
"C:\\Users\\public\\images",
"C:\\Users\\public\\javascripts\\index1.js",
"C:\\Users\\public\\javascripts\\index2.js",
];
const checkIfFileHasSample = (path, opts = "utf8") =>
new Promise((resolve, reject) => {
fs.readFile(path, opts, (err, data) => {
if (err) {
reject(err);
} else {
if (data.includes("sample")) {
resolve(true);
} else {
resolve(false);
}
}
});
});
const run = async () => {
const results = [];
for (let i = 0; i < allFiles.length; i++) {
try {
const file = allFiles[i];
const hasSample = await checkIfFileHasSample(file);
if (hasSample) {
results.push(file);
}
} catch (e) {
console.log(e);
}
}
console.log(results);
};
run();

Doing Aysnc tasks in AWS Lambda inside map loop

I need to fetch user details from the database, where the user list is provided in an Array.
await userList.map(async (usr, index) => {
let sql = "SELECT `id`,`email`, `firstname`, `lastname` FROM `users` WHERE `id` = '"+usr.user_id+"'";
let user_info = await getData(sql,0);
userData.push(userObj);
});
Get Data function :-
let getData = async (sql, params) => {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err){
reject(err);
}
connection.query(sql, params, (err, results) => {
if (err){
reject(err);
}
connection.release();
resolve(results);
});
});
});
};
Now the problem is function is getting exited before the results are pulled from the database.
If I remove the loop & pull a single record everything is working fine.
But, i need to get data for all users.
Try this and let me know if it helps:
await Promise.all(userList.map(async (usr, index) => {
let sql = "SELECT `id`,`email`, `firstname`, `lastname` FROM `users` WHERE `id` = '"+usr.user_id+"'";
let user_info = await getData(sql,0);
userData.push(user_info);
}));
In this case you can avoid map and rather use a for loop to achieve the result.
You can try the below SO snippet, to see if this is the desired outcome you needed.
getData = (userId) => {
return new Promise((resolve) => {
setTimeout(() => {
resolve("user"+ userId);
}, 500)
})
}
ids = [1,2,3];
//Final results
results = [];
async function getAllDataAsync(){
for(let i = 0; i < ids.length; i++ ){
let user_info = await getData(ids[i]);
results.push(user_info);
}
return Promise.resolve(results);
}
async function start(){
const res = await getAllDataAsync();
console.log(res);
console.log("Got the results. Put rest of the logic here");
}
start();
console.log("Function will exit first");

Categories