I'm trying to execute query and use some rows of the results.
When I tried this code:
const pg = require('pg')
var Log = require('fancy-log');
var jsonPath = require('jsonpath');
var validator = require('validator');
var isEmpty = require('is-empty-array')
const argv = require('yargs').argv
require('custom-env').env(argv.env)
var db = require('./db');
var checker = require('./checklist')
let txn_id;
var param = []
//////////////////////////////////// DB Connection Block & executeQuery ////////////////////////////////////
var config = {
user: process.env.DB_USER,
password: process.env.DB_PASS,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB,
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
}
const pool = new pg.Pool(config)
async function query(q,param) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q, param)
await client.query('COMMIT')
Log("Connection succeed.")
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
//////////////////////////////////// DB Connection Block & executeQuery ////////////////////////////////////
//client.getConnection();
//db.main()
//db.print()
async function executeQuery(queryParam, conditions) {
try {
const { rows, rowCount } = await query(queryParam, conditions)
txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id
Log("Related txn_id: " + txn_id)
} catch (err) {
console.log('Database ' + err)
}
}
executeQuery("SELECT * from table_name",[])
I got this error all the time:
Database TypeError: Cannot read property 'txn_id' of undefined
My first usecase is working two different js file like db.js for connection and query function, export it and use it in main.js. Unfortunately, I couldn't get success. Probably, I don't understand fully of await usage.
Thanks for any idea in advance!
EDIT: This is main goal. I want to seperate all my logics.
db.js
const pg = require('pg')
var Log = require('fancy-log');
const argv = require('yargs').argv
require('custom-env').env(argv.env)
//var db = function(){};
let txn_id;
var param = []
var config = {
user: process.env.DB_USER,
password: process.env.DB_PASS,
host: process.env.DB_HOST,
port: process.env.DB_PORT,
database: process.env.DB,
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000 // how long a client is allowed to remain idle before being closed
}
const pool = new pg.Pool(config)
async function query(q,param) {
const client = await pool.connect()
let res
try {
await client.query('BEGIN')
try {
res = await client.query(q, param)
await client.query('COMMIT')
Log("Connection succeed.")
} catch (err) {
await client.query('ROLLBACK')
throw err
}
} finally {
client.release()
}
return res
}
async function executeQuery(queryParam, conditions) {
try {
const { rows, rowCount } = await query(queryParam,conditions)
txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id
Log("Related txn_id: " + txn_id)
} catch (err) {
console.log('Database ' + err)
}
}
module.exports = {
executeQuery
}
main.js
const pg = require('pg')
var Log = require('fancy-log');
var jsonPath = require('jsonpath');
var validator = require('validator');
var isEmpty = require('is-empty-array')
const argv = require('yargs').argv
require('custom-env').env(argv.env)
var db = require('./db');
var checker = require('./checklist')
async function foo(){
await db.executeQuery("SELECT * from table_name",[])
}
foo()
Also same error here:
Database TypeError: Cannot read property 'txn_id' of undefined
You probably solved this already, but since there are no answers, in case someone needs help with this in the future.
TL;DR: You have to remove +1 from txn_id = await rows[Math.floor(Math.random() * rowCount + 1)].txn_id.
rows is an array of length N (N rows found in table_name) with indices from 0 to N-1, and rowCount equals (in this case) rows.length; and since:
1) 0 <= Math.random() < 1
2) 0 <= Math.random() * rowCount < N
3) 1 <= Math.random() * rowCount + 1 < N+1
4) 1 <= Math.floor(Math.random() * rowCount + 1) < N+1 (only natural numbers now)
then Math.floor(Math.random() * rowCount + 1) produces indeces from 1 to N, which is why sometimes (in case of N) rows[Math.floor(Math.random() * rowCount + 1)] equals undefined.
Also, await is not needed here: await rows[Math.floor(Math.random() * rowCount)]; you already awaited for the query to finish here const { rows, rowCount } = await query(queryParam,conditions).
Only place await in front of an expression which returns a Promise, otherwise it's doing nothing. (suggested)
Related
I have written a cloud function that runs every 5 minutes on my Firebase app. In essence, the function gathers trends data from the Google Trends website and parses the JSON into a variable.
After doing so I want to then connect to the Twitter API and search for tweets using the trending topics fetched in the first part.
My Issue seems to lie with the second part. It fetches the data but the remainder of the function does not wait for the result before writing to Firebase.
I have tried two different methods but both don't seem to work as intended. I am struggling to understand how the function should wait for the second part to gather and store the information before writing to Firebase.
Method 1
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
}
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
});
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
}
});
});
Method 2
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
let searchTrends;
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
const twitterTrends = [];
async function getTrends(){
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
}
});
await getTwitterTrends();
}
async function getTwitterTrends(){
for (let i = 0; i < 1; i++) {
functions.logger.log(searchTrends[i].title.query);
T.get("search/tweets", {q: searchTrends[i].title.query, count: 1},
function(err, data, response) {
if (err) {
functions.logger.error(err);
} else {
functions.logger.info("Twitter data" +
JSON.stringify(data.statuses));
twitterTrends[i] = JSON.stringify(data.statuses);
}
});
}
return "done";
}
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
});
After checking your function it looks like a Promises issue. The reason you are seeing only the searchTrends data in Firestore is because the Firestore reference and upload is being done inside the callback for the dailyTrends method (taking for reference the method 1 code). However this does not wait for each request to the Twitter API to be resolved before writing to Firestore.
Based on the documentation for twit (which seems to be the wrapper you are using), it also supports standard promises. You could add each promise to an array, and then use Promise.all() to wait until they are all resolved to then write the data into Firestore. It would look something like this (which I haven’t tested since I don’t have Twitter API access).
exports.callTo = functions.pubsub.schedule("5 * * * *").onRun((context) => {
const ts = Date.now();
const dateOb = new Date(ts);
const date = dateOb.getDate();
const month = dateOb.getMonth() + 1;
const year = dateOb.getFullYear();
let searchTrends;
const twitterTrends = [];
const twPromises = [];
googleTrends.dailyTrends({
trendDate: new Date(year + "-" + month + "-" + date),
geo: "CA",
}, function(err, res) {
if (err) {
functions.logger.error(err);
} else {
searchTrends = JSON.parse(res).default.trendingSearchesDays[0]
.trendingSearches;
functions.logger.info(searchTrends);
for (let i = 0; i < searchTrends.length; i++) {
functions.logger.log(searchTrends[i].title.query);
twPromises.push(T.get("search/tweets", {q: searchTrends[i].title.query, count: 1})); // adds promises to the array
}
Promise.all(twPromises).then((responses) => { // runs when all promises from the array are resolved
responses.forEach((response) => {
twitterTrends.push(JSON.stringify(response.statuses));
})
const dbRef = admin.database().ref("searchTrends");
dbRef.set({google: searchTrends, twitter: twitterTrends});
})
}
});
});
I am working with OpenWeatherMapAPI to calculate the sum of precipitation for the previous 5 days. To do this I have 5 async functions with api calls using the fetch api. The data received, that concerns me, is the hourly historic weather data spanning a 24 hour period. Full code bellow. The json response is stored to a constant (Ex.const histData1) where it is then iterated through to sum all of one values over that given 24 hour period. Note: humidity is used as a proof of concept because it hasnt rained here in awhile
for (var i in histData1.hourly){
total1 += histData1.hourly[i].humidity;
};
When I send the resulting value, total1, to the console I receive the expected results.
My trouble is coming when trying to add all of these results together i.e. total1 + total2 + total3 + total4 + total5.
One solution I thought might work was returning the total[1,2,3,4,5] variable at each function and then summing them. Ex.
var rainTotals = getData1() + getData2() + getData3() + getData4() + getData5()
console.log(rainTotals)
This resulted in the following output: [object Promise][object Promise][object Promise][object Promise][object Promise] so it appears to be a datatype issue.
Can anyone shed light as to adding all of the humidity values up from the five separate functions. Feel free to roast my code, I'm pretty new at this.
Thanks!
//WORKS Provies a json of hourly weather data for (1)24 hr period starting 5 days ago.
const fiveDaysAgo = Math.floor((Date.now() / 1000)-432000);
const fivedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + fiveDaysAgo +"&appid="
async function getData5(){
const response5 = await fetch(fivedayURL)
const histData5 = await response5.json();
var total5 = 0
for (var i in histData5.hourly){
total5 += histData5.hourly[i].humidity;
};
console.log(total5);
return total5;
}
getData5();
const fourDaysAgo = Math.floor((Date.now() / 1000)-345600);
const fourdayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + fourDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData4(){
const response4 = await fetch(fourdayURL)
const histData4 = await response4.json();
var total4 = 0;
for (var i in histData4.hourly){
total4 += histData4.hourly[i].humidity
};
console.log(total4);
return total4;
}
getData4();
const threeDaysAgo = Math.floor((Date.now() / 1000)-259200);
const threedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + threeDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData3(){
const response3 = await fetch(threedayURL);
const histData3 = await response3.json();
var total3 = 0;
for (var i in histData3.hourly){
total3 += histData3.hourly[i].humidity;
};
console.log(total3);
return total3;
}
getData3();
const twoDaysAgo = Math.floor((Date.now() / 1000)-172800);
const twodayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + twoDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData2(){
const response2 = await fetch(twodayURL);
const histData2 = await response2.json();
var total2 = 0;
for (var i in histData2.hourly){
total2 += histData2.hourly[i].humidity;
};
console.log(total2);
return total2;
}
getData2();
const oneDaysAgo = Math.floor((Date.now() / 1000)-86400);
const onedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + oneDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData1(){
const response1 = await fetch(onedayURL);
const histData1 = await response1.json();
var total1 = 0;
for (var i in histData1.hourly){
total1 += histData1.hourly[i].humidity;
};
console.log(total1);
return total1;
}
getData1();
var rainTotals = getData1() + getData2() + getData3() + getData4() + getData5()
console.log(rainTotals)//returns [object Promise][object Promise][object Promise][object Promise][object Promise]
There are a couple things happening here. Firstly, to answer your question, because your getDataX functions are declared asynchronous, they return Promises that will eventually either resolve or reject with the actual values that you're looking for.
When working with Promises, you need sum the total values after all of these promises have resolved.
Second, you have a bit of duplication in your code. You'll notice that there is very little difference between the innards of your getDataX function. To simplify this, you can extract the differences as function parameters, and encapsulate all the same code within one function.
This would result in an implementation like below:
function getDaysAgo(days) {
return Math.floor((Date.now() / 1000) - (86400 * days))
}
async function getDataForDaysAgo(days) {
let daysAgo = getDaysAgo(days)
const apiURL = `http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=${daysAgo}&appid=5ffab1cda2c6b2750c78515f41421805`
const apiResponse = await fetch(apiURL)
const responseJson = await apiResponse.json()
var total = 0
responseJson.hourly.forEach(hour => {
total += hour.humidity
});
console.log(`getDataForDaysAgo(${days}) returns ${total}`)
return total
}
async function getDataSums() {
var data1 = await getDataForDaysAgo(5)
var data2 = await getDataForDaysAgo(4)
var data3 = await getDataForDaysAgo(3)
var data4 = await getDataForDaysAgo(2)
var data5 = await getDataForDaysAgo(1)
return data1 + data2 + data3 + data4 + data5;
}
getDataSums().then(result => {
console.log(result)
})
Async functions always returns a promise.
What you can do is to use Promise.all to aggregate them into one array.
Promise.all([getData1(), getData2(), getData3(), getData4(), getData5()]).then((values) => {
var sum = 0;
for(var i=0; i<values.length; i++){
sum += values[i];
}
console.log(sum);
});
Source : Async Functions
You can use await to get the result of async function.
var rainTotals = await getData1() + await getData2() + await getData3() + await getData4() + await getData5();
Async function delays because it wait for await. 'return' give data immediatly so that's why data give empty object(when console.log(getData1()) only empty objects returned). So getding total data should be awaited. 'total' data in each getData function was pushed into an array. Each getData function get await in asyncAll function to log array at once.
// Data array
let arr = [];
//WORKS Provies a json of hourly weather data for (1)24 hr period starting 5 days ago.
const fiveDaysAgo = Math.floor((Date.now() / 1000)-432000);
const fivedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + fiveDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData5(){
const response5 = await fetch(fivedayURL)
const histData5 = await response5.json();
var total5 = 0
for (var i in histData5.hourly){
total5 += histData5.hourly[i].humidity;
};
console.log(total5);
await arr.push(total5);
return total5;
}
getData5();
const fourDaysAgo = Math.floor((Date.now() / 1000)-345600);
const fourdayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + fourDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData4(){
const response4 = await fetch(fourdayURL)
const histData4 = await response4.json();
var total4 = 0;
for (var i in histData4.hourly){
total4 += histData4.hourly[i].humidity
};
console.log(total4);
await arr.push(total4);
return total4;
}
getData4();
const threeDaysAgo = Math.floor((Date.now() / 1000)-259200);
const threedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + threeDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData3(){
const response3 = await fetch(threedayURL);
const histData3 = await response3.json();
var total3 = 0;
for (var i in histData3.hourly){
total3 += histData3.hourly[i].humidity;
};
console.log(total3);
await arr.push(total3);
return total3;
}
getData3();
const twoDaysAgo = Math.floor((Date.now() / 1000)-172800);
const twodayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + twoDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData2(){
const response2 = await fetch(twodayURL);
const histData2 = await response2.json();
var total2 = 0;
for (var i in histData2.hourly){
total2 += histData2.hourly[i].humidity;
};
console.log(total2);
await arr.push(total2);
return total2;
}
const oneDaysAgo = Math.floor((Date.now() / 1000)-86400);
const onedayURL = "http://api.openweathermap.org/data/2.5/onecall/timemachine?lat=29.8833&lon=-97.9414&dt=" + oneDaysAgo +"&appid=5ffab1cda2c6b2750c78515f41421805"
async function getData1(){
const response1 = await fetch(onedayURL);
const histData1 = await response1.json();
var total1 = 0;
for (var i in histData1.hourly){
total1 += histData1.hourly[i].humidity;
};
console.log(total1);
//Push data after data asynced
await arr.push(total1);
return total1;
}
//Moved getData function to asyncAll function.
var rainTotals = [];
// Awaited for getData functions, logged total data.
async function asyncAll() {
await getData1();
await getData2();
await getData3();
await getData4();
await getData5();
await console.log(arr.join(', '), 'async');
}
asyncAll();
I want to access shopify api using Node.js with request method. I get first 50 items but i need to send the last id of the products i get as a response so it can loop through all the products until we don't have another id (i check that if the last array is not 50 in length.)
So when i get the response of lastID i want to feed that again to the same function until the Parraylength is not 50 or not 0.
Thing is request works asynchronously and i don't know how to feed the same function with the result lastID in node.js.
Here is my code
let importedData = JSON.parse(body);
//for ( const product in importedData.products ){
// console.log(`${importedData.products[product].id}`);
//}
lastID = importedData.products[importedData.products.length-1].id;
let lastIDD = lastID;
console.log(`This is ${lastID}`);
importedData ? console.log('true') : console.log('false');
let Prarraylength = importedData.products.length;
console.log(Prarraylength);
//console.log(JSON.stringify(req.headers));
return lastIDD;
});```
You can use a for loop and await to control the flow of your script in this case.
I'd suggest using the request-native-promise module to get items, since it has a promise based interface, but you could use node-fetch or axios (or any other http client) too.
In this case, to show you the logic, I've created a mock rp which normally you'd create as follows:
const rp = require("request-promise-native");
You can see we're looping through the items, 50 at a time. We're passing the last id as a url parameter to the next rp call. Now this is obviously going to be different in reality, but I believe you can easily change the logic as you require.
const totalItems = 155;
const itemsPerCall = 50;
// Mock items array...
const items = Array.from({ length: totalItems}, (v,n) => { return { id: n+1, name: `item #${n+1}` } });
// Mock of request-promise (to show logic..)
// Replace with const rp = require("request-promise-native");
const rp = function(url) {
let itemPointer = parseInt(url.split("/").slice(-1)[0]);
return new Promise((resolve, reject) => {
setTimeout(() => {
let slice = items.slice(itemPointer, itemPointer + itemsPerCall);
itemPointer += itemsPerCall;
resolve( { products: slice });
}, 500);
})
}
async function getMultipleRequests() {
let callIndex = 0;
let lastID = 0;
const MAX_CALLS = 20;
const EXPECTED_ARRAY_LENGTH = 50;
for(let callCount = 1; callCount < MAX_CALLS; callCount++) {
// Replace with the actual url..
let url = "/products/" + lastID;
let importedData = await rp(url);
lastID = importedData.products[importedData.products.length - 1].id;
console.log("Call #: " + ++callIndex + ", Item count: " + importedData.products.length + ", lastID: " + lastID);
if (importedData.products.length < EXPECTED_ARRAY_LENGTH) {
console.log("Reached the end of products...exiting loop...");
break;
}
}
}
getMultipleRequests();
Naive to NodeJS and trying to figure out a way to add results from second table to result set of first table in node js.
How can I access results from first query inside the second query?
Following is my code snippet with comments
function getTasks(callback) {
//first query gives result set
connection.query('SELECT * FROM ' + properties.get('database.Table') +' order by timestamp desc', function(err, rows){
if(!err){
//for each result from result set, match on Id and get values from table_2
for (var i = rows.length - 1; i >= 0; i--) {
connection.query('SELECT * FROM table_2 where taskId = "' + rows[i].taskId + '"', function(err, sets){
if(!err){
//if we have any results from table_2 then create an object
if(sets.length > 0){
var setStatus = [];
for (var i = sets.length - 1; i >= 0; i--) {
setStatus[i] = {Status : sets[i].type+'-'+sets[i].status};
}
//add the setStaus object to results from first table (to rows)
//ISSUE: accessing rows[i] here is alwyas undefined??
}
}
});
}
//need to send the rows with updates from nested block
callback(rows);
}
});
UPDATE: async/await solution worked and by changing i to j for inner iterator!
You can handle using async/await
const execute = (query) => {
return new Promise((resove, reject) => {
connection.query(query,
function (err, rows) {
if (err) reject(err);
resove(rows);
});
});
}
const getTasks = async () => {
const query = 'SELECT * FROM ' + properties.get('database.Table') + ' order by timestamp desc';
const rows = await execute(query);
for (var i = rows.length - 1; i >= 0; i--) {
const innerQuery = 'SELECT * FROM table_2 where taskId = "' + rows[i].taskId + '"';
const sets = await execute(innerQuery);
//Do some stuff
if (sets.length > 0) {
var setStatus = [];
for (var i = sets.length - 1; i >= 0; i--) {
setStatus[i] = {
Status: sets[i].type + '-' + sets[i].status
};
}
}
}
return rows;
};
Yau can call either inside await or given below
getTasks().then((rows) => {
console.log(rows);
}).catch((err) => {
console.log(err);
})
The declaration of i in the second for loop overwrites the value of i from the first loop.
You can fix this by changing the variable declared as part of the second loop to something other than i.
Try to use let or const instead of var. Use a different variable name than i for all your iterators. This is probably messing up with your first i, thus leading to rows[i] as undefined.
You also probably want to look for a MySQL node module that supports ES6 Promises like this one : https://www.npmjs.com/package/mysql2
I'm trying to generate tokens on the fly and save them to the database as I do so.
This is the code to generate a token.
const generateToken = function (maxUse) {
// 12 digit token numbers. 9e+11 possibilities
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
// ensure token doesn't exit exist in db before saving
Token.count({ token }, function (err, count) {
if (count > 0) {
generateToken() ;
} else {
let newToken = new Token({ token, maxUse });
newToken.save(function (err, savedToken) {
if (err) {
console.log(err);
return;
} else {
generateSerial(savedToken._id);
console.log("saved token is =>", savedToken.token);
return savedToken.token;
}
})
}
})
}
How do I write a function that calls this function an arbitrary number of times, appending the the tokens to a file as they are being saved to the database. I realized a while loop wouldn't wouldn't work because of the asynchronous nature of the process.
All the answers I've seen assume that I have the bulk data ahead of time e.g using bulkwrite (mongoose).
An alternative approach is welcomed
Thank you.
The way I see this, you are probably best off keeping a "local list" of the generated tokens and "bulk" inserting via .insertMany(). Mileage may vary on the actual implementation, so we will discuss both that approach as well as handling your recursive function in a sane way with async methods.
Async Loop
You have created a problem where you need to test the values present in order to determine that they are "unique" for insertion. This of course requires async calls in order to look at the database, and therefore rules out "bulk" actions such as "upserts" because you don't know if the item exists before you send it in a loop. So recursion does work in this case.
So the very first thing you should do is to make the "function" asynchronous itself, either returning a callback or a promise.
In essence:
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return Token.count({ token }).then( count => {
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
})
}
Or in more modern terms with async/await
async function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
let count = await Token.count({ token });
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
}
Then it's really just a matter of calling in a loop, either in modern terms as:
let count = 0;
while (count < 500) {
// Random usage 1-5
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = await generateToken(maxUse);
log(token.token);
count++;
}
Or using async.whilst if running under a node version that does not support async/await:
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5 ) + 1;
generateToken(maxUse).then(token => {
log(token.token);
count++;
callback();
}).catch(err => callback(err));
},
(err) => {
if (err) throw err;
// Loop complete, issue callback or promise
}
);
So it's all relatively simple.
Hold Unique Local and "Bulk Insert"
The "alternate" way to handle this is instead to keep an array of the generated tokens "on the client". Then all you need do on each random generation is see if the token was "already seen" and only create an insert operation when a "unique" value is obtained.
This should be much faster than going back and forth to the database with recursive calls since it's all "cached" locally.
In essence, make your generator function very basic:
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return ({ token, maxUse });
}
And then during the loop, make two arrays for the seenTokens and the ops, where the latter denotes the items to later insert in "bulk batches" instead of individual writes:
let count = 0,
seenTokens = [],
ops = [];
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++
if ( count % 500 === 0 ) {
await Token.insertMany(ops);
ops = [];
}
} else {
continue
}
}
if ( count % 500 !== 0 ) {
await Token.insertMany(ops);
ops = [];
}
Of course we are applying the async/await methods there, but that's only for the .insertMany() method being async, and if you are not actually inserting "tens of thousands" then it should be easily handled without even needing to "await" such a call, and then only issue "once".
But the demonstration here accounts for what the code should look like when it "is tens of thousands" with no other alteration. Again you can use other library functions to "await" such calls as required.
And again we can employ async.series and async.whilst for such control:
let count = 0,
seenTokens = [],
ops = [];
asyncSeries(
[
(callback) =>
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
if ( count % 500 === 0 ) {
Token.insertMany(ops,(err,response) => {
console.log(count);
ops = [];
callback(err);
});
} else {
callback();
}
} else {
console.log("trying again: seen token %s", token.token);
callback();
}
},
callback
),
(callback) => {
if ( count % 500 !== 0 ) {
Token.insertMany(ops,callback)
} else {
callback()
}
}
],
(err) => {
if (err) throw err;
ops = [];
// Operations complete, so callback to continue
}
);
All very much the same, and again the "flow control" is really only there to cater for "larger batches", and you could simply use the regular loop to build the ops entries and make one call only to .insertMany(), much as the 500 limit here actually does.
So the simplest form basically is:
let count = 0,
seenTokens = [],
ops = [];
// Regular loop
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
}
}
// Insert all at once
Token.insertMany(ops,(err,result) => {
if (err) throw err;
// now it's complete
})
Of course this whole alternate approach "hinges on" the fact that you never actually maintain "persistence" of the "tokens" in the database, and would not call this function again until those existing entries are cleared. We could "slurp" in all the "taken tokens" and exclude by the same "local cache". But over time this would grow significantly, so there is that point to consider in your overall choice.
As a full listing scaffolded for latest nodejs release, but the general usage is applied inside:
const asyncWhilst = require('async').whilst,
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const tokenSchema = new Schema({
token: { type: Number, unique: true },
maxUse: Number
});
const Token = mongoose.model('Token', tokenSchema);
// Logger helper
function log(data) {
console.log(JSON.stringify(data,undefined,2))
}
// Function implementation
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return Token.count({ token }).then( count => {
if ( count > 0 ) {
generateToken(maxUse);
} else {
return Token.create({ token, maxUse });
}
})
}
// Main program
(async function() {
try {
const conn = await mongoose.connect(uri,options);
console.log("using async/await");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
let count = 0;
while (count < 500) {
// Random usage 1-5
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = await generateToken(maxUse);
log(token.token);
count++;
}
let totalCount = await Token.count();
console.log("Count is: %s", totalCount);
// Or using async.whilst
console.log("Using async.whilst");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
count = 0;
await new Promise((resolve,reject) => {
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5 ) + 1;
generateToken(maxUse).then(token => {
log(token.token);
count++;
callback();
}).catch(err => callback(err));
},
(err) => {
if (err) reject(err);
resolve();
}
);
});
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();
Or as an "alternate" process:
const asyncSeries = require('async').series,
asyncWhilst = require('async').whilst,
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const uri = 'mongodb://localhost/test',
options = { useMongoClient: true };
const tokenSchema = new Schema({
token: { type: Number, unique: true },
maxUse: Number
});
const Token = mongoose.model('Token', tokenSchema);
// Logger helper
function log(data) {
console.log(JSON.stringify(data,undefined,2))
}
// Function implementation
function generateToken(maxUse) {
const min = 100000000000;
const max = 999999999999;
const token = Math.floor(Math.random() * (max -min) + min);
return ({ token, maxUse });
}
// Main program
(async function() {
try {
const conn = await mongoose.connect(uri,options);
console.log("Using async/await");
// clean data
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
let count = 0,
seenTokens = [],
ops = [];
while ( count < 500 ) {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++
if ( count % 500 === 0 ) {
await Token.insertMany(ops);
ops = [];
}
} else {
continue
}
}
if ( count % 500 !== 0 ) {
await Token.insertMany(ops);
ops = [];
}
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
// using async.whilst and indeed async.series for control
console.log("using asyc.whilst");
await Promise.all(
Object.keys(conn.models).map(m => conn.models[m].remove({}))
);
await new Promise((resolve,reject) => {
count = 0,
seenTokens = [],
ops = [];
asyncSeries(
[
(callback) =>
asyncWhilst(
() => count < 500,
(callback) => {
const maxUse = Math.floor(Math.random() * 5) + 1;
let token = generateToken(maxUse);
if ( seenTokens.indexOf(token.token) === -1 ) {
seenTokens.push(token.token);
ops.push(token);
count++;
if ( count % 500 === 0 ) {
Token.insertMany(ops,(err,response) => {
console.log(count);
ops = [];
callback(err);
});
} else {
callback();
}
} else {
console.log("trying again: seen token %s", token.token);
callback();
}
},
callback
),
(callback) => {
if ( count % 500 !== 0 ) {
Token.insertMany(ops,callback)
} else {
callback()
}
}
],
(err) => {
if (err) reject(err);
ops = [];
resolve();
}
);
});
totalCount = await Token.count();
console.log("Count is: %s", totalCount);
} catch (e) {
console.error(e);
} finally {
mongoose.disconnect();
}
})();