How to make a synchronous delay inside a for loop? - javascript

I'm writing a NodeJS script that calls a bunch of APIs via GET (using request from npm) and saves the responses in a JSON file. I'm using a for to loop through IDs to pass to the APIs, but I'm having trouble putting in a delay between call bursts so I don't spam the API server and make it mad at me (rate limiting). Does anyone know how to do this?
My current code (without any delay):
var fs = require('fs');
var request = require('request');
// run through the IDs
for(var i = 1; i <= 4; i++)
{
(function(i)
{
callAPIs(i); // call our APIs
})(i);
}
function callAPIs(id)
{
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) =>
{
if (err)
{throw err;}
fs.writeFile("./result/" + id + '/' + id + '_example.json', body, function (err)
{
if (err)
{throw err;}
});
});
}
I'm looking for this behavior:
callAPIs(1); // start a burst of calls for ID# 1
// wait a bit...
callAPIs(2); // start a burst of calls for ID# 2
// wait a bit...
// etc

You can use the new ES6's async/await
(async () => {
for(var i = 1; i <= 4; i++)
{
console.log(`Calling API(${i})`)
await callAPIs(i);
console.log(`Done API(${i})`)
}
})();
function callAPIs(id)
{
return new Promise(resolve => {
// Simulating your network request delay
setTimeout(() => {
// Do your network success handler function or other stuff
return resolve(1)
}, 2 * 1000)
});
}
A working demo: https://runkit.com/5d054715c94464001a79259a/5d0547154028940013de9e3c

In nodeJS you don't do pauses, you use it's asynchronous nature to await the result of preceding tasks, before resuming on next task.
function callAPIs(id) {
return new Promise((resolve, reject) => {
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) => {
if (err) {
reject(err);
}
fs.writeFile(`./result/${id}/${id}_example.json`, body, err => {
if (err) {
reject(err);
}
resolve();
});
});
});
}
for (let i = 1; i <= 4; i++) {
await callAPIs(array[index], index, array);
}
This code, will do request, write the file, and once it is written to disk, it will process the next file.
Waiting a fixed time before the next task is processed, what if would take a bit more time? What if you're wasting 3 seconds just to be sure it was done...?

You might also want to have a look at the async module. It consists of async.times method which will help you achieve the results you need.
var fs = require('fs');
var request = require('request');
var async = require('async');
// run through the IDs
async.times(4, (id, next) => {
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) => {
if (err) {
next(err, null);
} else {
fs.writeFile("./result/" + id + '/' + id + '_example.json', body, function (err) {
if (err) {
next(err, null);
} else
next(null, null);
});
}
});
}, (err) => {
if (err)
throw err
});
You can read about it from the below shared url:
https://caolan.github.io/async/v3/docs.html#times

Related

Issues with Async/Await during SOAP API call Javascript

Hopefully someone can point me to the right direction. I read up on waiting for functions to complete before continuing and I resolved myself to using await/async but I am just stuck now.
I tried to get the Async/Await process to work, tried to inject the await in various locations, with adjusting the functions to be async, but i can not get the PSA_Resultbody to return to the original request. Any pointers would be appreciated.
Thank you,
CE
PSA_Resultbody = ProcessPSAAPI(xmlpackage, PSA_Action);
console.log("3 - Returned data:" + PSA_Resultbody);
calls the below:
async function ProcessPSAAPI(xmlpackage, PSA_Action) { //psa action is part of the options
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
return PSA_Resultbody;
Based on the above, my console log order is: 3,2,0,1 in stead of 0,1,2,3.
0 and 1 will have the correct data, so the API Call does work, but 2 will be "undefined" and should have the same data that is in 1.
There's no way to await an event emitter, so using async in this case isn't going to be useful. You cannot "return" from inside an event either.
The solution here is to return a new custom promise and to use resolve() inside of the "end" event of your emitter.
It will look something like this:
function ProcessPSAAPI(xmlpackage, PSA_Action) {
return new Promise( (resolve, reject) => {
// other code
res.on("end", function (chunk) {
// other code
resolve(PSA_Resultbody);
});
res.on("error", function (error) {
// other code
reject(error);
});
});
}
Here's a quick tutorial on creating your own promises, which I've written to simplify comprehension of the subject (official docs are somewhat dry and complex imho).
I did not change your code. I just put the appropriate promise structure in to get you started. This should really be a lesson in promises. async await is a shorthand promise structure. A Promise is one way you wait on code. It can be thought of as an array of callbacks that will be executed when the Promise is resolved.
A simple promise works like this:
const myPromise = new Promise(function(resolve, reject) {
/* Your logic goes in here. It can be anything.
* But the important part to remember is that when you have success, resolve it.
* When you have a failure, reject it.
*/
someCallBackPattern(function(error, data) {
if(error) {
reject(error);
} else {
resolve(data);
}
});
});
// To get the data out you use 'then', and 'catch'. then has two arguments.
myPromise.then(function(data) {
// The first argument is the result from resolve.
}, function(err) {
// The second argument is the result from reject.
}).catch((err) => {
// you can also get to the error from the catch callback
});
This is kinda messy and complex. So there is async await.
async function() {
try {
const result = await myFunctionThatReturnsAPromise();
// result is the resolved data
} catch (err) {
// err is the rejected Error
}
}
function myFunctionThatReturnsAPromise() {
return new Promise((resolve, reject) => {
// your code
})
}
And thats how it works.
async function someFunction () { // You can not wait on results unless you are in an await function
PSA_Resultbody = await ProcessPSAAPI(xmlpackage, PSA_Action); // await on your results.
console.log("3 - Returned data:" + PSA_Resultbody);
}
function ProcessPSAAPI(xmlpackage, PSA_Action) { // This does not need to be async. Unless you are awaiting in it.
return new Promise((resolve, reject) => { // async await is a shorthand promise structure. Although you do not need to use promises. It really helps to get the structure correct.
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
resolve(PSA_Resultbody); // Resolve your result
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
})
}

Perform asynchronous actions semi-synchronously in Axios

I have the following code:
* Fetch stats from api
*/
fetchStats() {
this._isFetching = true;
// fetch stats after building url and replacing invalid characters
return new Promise(async (resolve, reject) => {
await API.fetchStats(this.rsn)
.then(jres => {
this.skills = jres.main.skills;
this._isFetching = false;
resolve('success');
})
.catch(err => {
console.log(err);
console.log('error retreiving stats');
this._isFetching = false;
reject('Failed to retreive stats');
})
.finally(() => {
this._isFetching = false;
});
});
}
I thought making it async with await would make it wait until it got the response before continuing. Returning the promise is something I added in testing to see if I could make it synchronous.
Then my code that consumes this method:
memberCollection.forEach(async el => {
await el.player.fetchStats()
.then(() => {
console.log(`Refreshed ${el.player.rsn}'s account`);
})
.catch(console.log(`Failed to refresh ${el.player.rsn}'s account`));
});
My thinking was that it would wait till it got a response then console.log either a successful refresh or a failed refresh. What I am instead seeing is a whole bunch of "success" messages followed by a string of failed messages indicating that it is running both the then and the catch message in the foreach. Does anyone know how I can make this work.
My issue is that Axios keeps timing out (my speculation is that it is due to the number of requests being sent off and the fact that there is a 5-10sec delay as it pulls from the db), if I navigate to the API URL manually it works as well as if I just do one member (as opposed to forEach) it works fine. So I'm trying to limit the number of requests fired off at once. I have tried setting my axios timeout to 10, 20, and 60 seconds, but it made no improvement.
Solution code:
const asyncForEach = async (arr, cb) => {
for(let i=0;i<arr.length;i++) {
let el = arr[i];
try {
let res = await cb(el);
} catch (err) { console.log(err) };
if(el.player && el.player.rsn) console.log(`Processed ${el.player.rsn}`);
}
console.log('done processing in asyncForEach');
}
not linked to axios but to async await.
consider
function slow(i){
return new Promise((ok,ko)=>{
return setTimeout(_=>ok(i), 1000)
})
}
async function asyncForEach(arr, cb){
for(var i = 0; i<arr.length; ++i){
let el = arr[i];
let res = await cb(el);
console.log('async', res, new Date)
}
}
/*
#foreach does not wait, but async and reduce are spaced by one second
foreach 4 2019-10-14T13:43:47.059Z
foreach 5 2019-10-14T13:43:47.071Z
foreach 6 2019-10-14T13:43:47.071Z
async 1 2019-10-14T13:43:47.071Z
async 2 2019-10-14T13:43:48.073Z
async 3 2019-10-14T13:43:49.074Z
reduce 7 2019-10-14T13:43:50.076Z
reduce 8 2019-10-14T13:43:51.078Z
reduce 9 2019-10-14T13:43:52.080Z
*/
async function main(){
await [4,5,6].forEach(async el=>{
let res = await slow(el);
console.log('foreach', res, new Date)
})
await asyncForEach([1,2,3], slow);
await [7,8,9].reduce((acc, el)=>acc.then(async _=>{
let res = await slow(el);
console.log('reduce', res, new Date);
return;
}), Promise.resolve())
}
main();
As you can see from timestamps, forEach does not wait for slow to finish
however, asyncForEach in its iteration does wait
What you may want to do is either
write a for loop as done with asyncForEach
use standard promises (stacking them):
[1,2,3].reduce((acc, el)=>acc.then(_=>{
return slow(el);
}), Promise.resolve())

How to run two functions synchronously with NodeJS?

I am pretty new with NodeJS and got lost with the asynchronous mechanism.
I have a code that should send a HTTP post request to the first URL (for example - https://example.com/first), and then when it got answered with status code 200, send another request to the same server that checks if the server is done with processing the first request (for example - https://example.com/statusCheck).
The server should return a text/plain response contains "true" if it's busy, and "false" if it's ready to use.
I wrote it with a while loop that queries the server every 2 seconds, up to maximum of 10 iterates.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
// Sends up to 10 requests to the server
function checkServerStatus(){
var serverReady = false;
var count = 0;
while (!serverReady && count < 10) {
count++;
setTimeout(function(){
request.get(serverCheck, function(err, resp, body){
if (err){
console.log(err);
} else if (body == "false") {
generatorReady = true;
}
})
}, 2000);
}
return generatorReady;
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(){
var req = request.post(firstURL, function (err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200){
return true;
} else {
return false;
}
});
};
module.exports = function (){
// Sends the first request
var firstRequestStatus = sendFirstRequest();
if (firstRequestStatus) {
return checkServerStatus();
}
};
In other words, I want to run sendFirstRequest first, wait for the response, and in case that the response is true, I want to run the checkServerStatus and get his returned value. If it's possible to do it with a sleep between each while iterate, it will be great (because the setTimeout does not work for me as well) .
Edit: I've heard that I can use function* with yield, or async-await in order to avoid callback hell - how can I implement them in this case?
You should use a Promise to do this. Below is some code using bluebird which will do what you want. The Promise.any method will return the first successful call from the Array out of 10 tries.
const Promise = require('bluebird');
var request = Promise.promisifyAll(require('request'));
var firstURL = "https://example.com/";
var serverCheck = "https://example.com/statusCheck";
request.postAsync(firstURL).then(res => {
if (res.statusCode === 200) return true;
throw new Error('server not ready');
}).then(() =>
Promise.any(new Array(10).fill(request.getAsync(serverCheck)))
).then(res => {
console.log(res);
}).catch(err => console.log(err));
You have to understand that the asynchronous operations can not return a result right after their call. They trigger some handler when they have executed. You can/should use that entry point to initiate or continue your logic flow.
http.post(params, handler(err, resp, body){
if(err){
failFlow(err);
}else if(resp.statusCode === 200) {
successFlow(resp);
}
});
and you can chain as many such asynchronous calls as you need but you can not return a response in this manner.
Also you might be interested in the concept of a Promise.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
var count = 0;
// Sends up to 10 requests to the server
function checkServerStatus() {
if (count++ > 10) return;
request.get(serverCheck, function(err, resp, body) {
if (err) {
console.log(err);
checkServerStatus();
} else if (body == "false") {
// go further
}
});
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(cb) {
var req = request.post(firstURL, function(err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200) {
cb();
} else {
return false;
}
});
};
module.exports = function() {
// Sends the first request
sendFirstRequest(checkServerStatus);
};
You can use the async library.
you dont need to do a setInterval or any timer for that matter, just wait for the response.
specifically you can use async.waterfall for this, something like:
var async = require('async')
var request = require('request')
async.waterfall([
function(cb) {
// send the first request
request.post("https://example.com/first", function (err, resp) {
// send the response to the next function or break in case there was an error
cb(err, resp)
})
},
function(resp, cb) {
// check for the response
if (resp.statusCode === 200) {
// in case the response code is 200 continue to the next function
return cb()
}
// if its not 200 break with the response code as an error
return cb(resp.statusCode)
},
function(cb) {
// send the verify
request.get("https://example.com/statusCheck", function (err, resp, body) {
// send the body of the response to the next function or break in case of an error
cb(err, body)
})
}
], function (err, result) {
// check if there was an error along the way
if (err) {
console.log("there was an error", err)
} else {
// all is good print the result
console.log("result:", result)
}
})
async function main() {
console.log('First call started');
let response1 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('First call finished', response1);
console.log('Second call started');
let response2 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('Second call finished',response2);
}
main();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
In newer versions of nodejs you can use async await like the example above
Notice that $.ajax is not a node function. It is just for demonstration
You can use await on any function that return a promise.
For the next example you need to install request package and use Node >= 8 for using promisify
const {promisify} = require('util');
const request = require('request')
async function main() {
let get = promisify(request);
let response1 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('first random: ',response1.body)
let response2 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('second random: ',response2.body)
}
main();
http://2ality.com/2017/05/util-promisify.html
https://github.com/request/request

How do I check when multiple chai-http requests are really done in a mocha before block?

I'm using a before block in a set of mocha unit tests and within them I'm iterating over a set of calls to get information from a REST API. I'm using chai-http to do this. However I am running into the problem that the done() method is being called before the series of n requests I make have completed. Calling done in the end block results in multiple done() calls but putting outside of the block means it's called before I'm really done! Here is an example of before block:
var flags = [];
var groups = [];
// This functions correctly 1 done() called at the end
before(function(done) {
chai.request(server)
.get('/groups')
.end(function(err, res){
groups = JSON.parse(res.text);
done();
});
});
before(function(done) {
groups.forEach(function(rec) {
chai.request(server)
.get('/groups/' + rec.KEYWORD_GROUP_ID + '/groupflags')
.end(function(res, err) {
Array.prototype.push.apply(flags, JSON.parse(res.text));
// A done() here gets called n times
});
// But here it's called before the requests all end
done();
});
Is there a way of detecting when all of these requests have completed then I can call a single done() to ensure my tests are only executed with the correct context set up?
You could try with async.whilst(). Count up a counter to groups.length and then hit done() in the callback. Link to the function documentation: (http://caolan.github.io/async/docs.html#whilst)
Something like...
let counter = 0;
async.whilst(
() => {
// Test if we have processed all records
return counter < groups.length;
},
(callback) => {
let rec = groups[counter++]; // Sorry Douglas
chai.request(server)
.get('/groups/' + rec.KEYWORD_GROUP_ID + '/groupflags')
.end(function (res, err) {
Array.prototype.push.apply(flags, JSON.parse(res.text));
callback(null, counter);
});
},
(err) => {
assert(!err, err);
done();
}
);
As Alex requested here is what I had initially as a solution:
before('delete keywords in a group', function(done) {
var count = 0;
var length = groups.length;
if (length === 0) {done();}
groups.forEach(function (rec) {
chai.request(server)
.delete('/keywords/' + rec.id)
.end(function (err, res) {
if (err) {
console.error('Delete keywords err: ' + err.message);
this.skip();
} else {
count++;
if (count === length) {done();}
}
});
});
});
This seems to be working but I think for any more complex cases (for example a cascade style delete) the async library provides a more elegant and reliable solution. Hence it is a better fit for the general case.

Correct procedure to call multiple asynchronous functions in node.js

I have a requirement where I need to get the records from table 1 and store in redis cache and once redis cache is finished storing, get table 2 records and store in redis cache. So there are 4 asynchronous functions.
Steps:
Get table 1 records
Store in redis cache
Get table 2 records
Store in redis cache
What is the correct procedure to handle it.
Below is the code which I have written to handle it. Please confirm whether its the right procedure or any other ways to handle it as per node.js
var redis = require("redis");
var client = redis.createClient(6379, 'path', {
auth_pass: 'key'
});
var mysqlConnection = // get the connection from MySQL database
get_Sections1()
function get_Sections1() {
var sql = "select *from employee";
mysqlConnection.query(sql, function (error, results) {
if (error) {
console.log("Error while Sections 1 : " + error);
} else {
client.set("settings1", JSON.stringify(summaryResult), function (err, reply){
if (err) {
console.log("Error during Update of Election : " + err);
} else {
get_Sections2();
}
});
}
});
}
function get_Sections2()
{
var sql = "select *from student";
mysqlConnection.query(sql, function (error, results)
{
if (error)
{
console.log("Error while Sections 2 : " + error);
}
else
{
client.set("settings2", JSON.stringify(summaryResult), function (err, reply)
{
if (err)
{
console.log("Error during Update of Election : " + err);
}
else
{
console.log("Finished the task...");
}
});
}
});
}
Create two parameterised functions. One for retrieval, one for storing.
Then promisify them both.
Then write:
return getTableRecords(1)
.then(storeInRedisCache)
.then(getTableRecords.bind(null,2))
.then(storeInRedisCache)
.then(done);
To promisify a function, something like this might work:
var getEmployees = new Promise(function(resolve, reject) {
var sql = "select *from employee";
mysqlConnection.query(sql, function (error, results) {
if (error) {
return reject();
} else {
return resolve(results);
}
});
});
If you are using an old version of NodeJS you will need a polyfill for Promise.
Here is an alternative to Ben Aston's solution using Promise.coroutine assuming promises:
const doStuff = Promise.coroutine(function*(){
const records = yield getTableRecords(1);
yield storeRecordsInCache(records);
const otherRecords = yield getTableRecords(2);
yield storeRecordsInCache(otherRecords); // you can use loops here too, and try/cath
});
doStuff(); // do all the above, assumes promisification
Alternatively, if you want to use syntax not yet supposed in Node (and use Babel to get support) you can do:
async function doStuff(){
const records = await getTableRecords(1);
await storeRecordsInCache(records);
const otherRecords = await getTableRecords(2);
await storeRecordsInCache(otherRecords); // you can use loops here too, and try/cath
})

Categories