How to run two functions synchronously with NodeJS? - javascript

I am pretty new with NodeJS and got lost with the asynchronous mechanism.
I have a code that should send a HTTP post request to the first URL (for example - https://example.com/first), and then when it got answered with status code 200, send another request to the same server that checks if the server is done with processing the first request (for example - https://example.com/statusCheck).
The server should return a text/plain response contains "true" if it's busy, and "false" if it's ready to use.
I wrote it with a while loop that queries the server every 2 seconds, up to maximum of 10 iterates.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
// Sends up to 10 requests to the server
function checkServerStatus(){
var serverReady = false;
var count = 0;
while (!serverReady && count < 10) {
count++;
setTimeout(function(){
request.get(serverCheck, function(err, resp, body){
if (err){
console.log(err);
} else if (body == "false") {
generatorReady = true;
}
})
}, 2000);
}
return generatorReady;
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(){
var req = request.post(firstURL, function (err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200){
return true;
} else {
return false;
}
});
};
module.exports = function (){
// Sends the first request
var firstRequestStatus = sendFirstRequest();
if (firstRequestStatus) {
return checkServerStatus();
}
};
In other words, I want to run sendFirstRequest first, wait for the response, and in case that the response is true, I want to run the checkServerStatus and get his returned value. If it's possible to do it with a sleep between each while iterate, it will be great (because the setTimeout does not work for me as well) .
Edit: I've heard that I can use function* with yield, or async-await in order to avoid callback hell - how can I implement them in this case?

You should use a Promise to do this. Below is some code using bluebird which will do what you want. The Promise.any method will return the first successful call from the Array out of 10 tries.
const Promise = require('bluebird');
var request = Promise.promisifyAll(require('request'));
var firstURL = "https://example.com/";
var serverCheck = "https://example.com/statusCheck";
request.postAsync(firstURL).then(res => {
if (res.statusCode === 200) return true;
throw new Error('server not ready');
}).then(() =>
Promise.any(new Array(10).fill(request.getAsync(serverCheck)))
).then(res => {
console.log(res);
}).catch(err => console.log(err));

You have to understand that the asynchronous operations can not return a result right after their call. They trigger some handler when they have executed. You can/should use that entry point to initiate or continue your logic flow.
http.post(params, handler(err, resp, body){
if(err){
failFlow(err);
}else if(resp.statusCode === 200) {
successFlow(resp);
}
});
and you can chain as many such asynchronous calls as you need but you can not return a response in this manner.
Also you might be interested in the concept of a Promise.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
var count = 0;
// Sends up to 10 requests to the server
function checkServerStatus() {
if (count++ > 10) return;
request.get(serverCheck, function(err, resp, body) {
if (err) {
console.log(err);
checkServerStatus();
} else if (body == "false") {
// go further
}
});
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(cb) {
var req = request.post(firstURL, function(err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200) {
cb();
} else {
return false;
}
});
};
module.exports = function() {
// Sends the first request
sendFirstRequest(checkServerStatus);
};

You can use the async library.
you dont need to do a setInterval or any timer for that matter, just wait for the response.
specifically you can use async.waterfall for this, something like:
var async = require('async')
var request = require('request')
async.waterfall([
function(cb) {
// send the first request
request.post("https://example.com/first", function (err, resp) {
// send the response to the next function or break in case there was an error
cb(err, resp)
})
},
function(resp, cb) {
// check for the response
if (resp.statusCode === 200) {
// in case the response code is 200 continue to the next function
return cb()
}
// if its not 200 break with the response code as an error
return cb(resp.statusCode)
},
function(cb) {
// send the verify
request.get("https://example.com/statusCheck", function (err, resp, body) {
// send the body of the response to the next function or break in case of an error
cb(err, body)
})
}
], function (err, result) {
// check if there was an error along the way
if (err) {
console.log("there was an error", err)
} else {
// all is good print the result
console.log("result:", result)
}
})

async function main() {
console.log('First call started');
let response1 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('First call finished', response1);
console.log('Second call started');
let response2 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('Second call finished',response2);
}
main();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
In newer versions of nodejs you can use async await like the example above
Notice that $.ajax is not a node function. It is just for demonstration
You can use await on any function that return a promise.
For the next example you need to install request package and use Node >= 8 for using promisify
const {promisify} = require('util');
const request = require('request')
async function main() {
let get = promisify(request);
let response1 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('first random: ',response1.body)
let response2 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('second random: ',response2.body)
}
main();
http://2ality.com/2017/05/util-promisify.html
https://github.com/request/request

Related

Issues with Async/Await during SOAP API call Javascript

Hopefully someone can point me to the right direction. I read up on waiting for functions to complete before continuing and I resolved myself to using await/async but I am just stuck now.
I tried to get the Async/Await process to work, tried to inject the await in various locations, with adjusting the functions to be async, but i can not get the PSA_Resultbody to return to the original request. Any pointers would be appreciated.
Thank you,
CE
PSA_Resultbody = ProcessPSAAPI(xmlpackage, PSA_Action);
console.log("3 - Returned data:" + PSA_Resultbody);
calls the below:
async function ProcessPSAAPI(xmlpackage, PSA_Action) { //psa action is part of the options
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
return PSA_Resultbody;
Based on the above, my console log order is: 3,2,0,1 in stead of 0,1,2,3.
0 and 1 will have the correct data, so the API Call does work, but 2 will be "undefined" and should have the same data that is in 1.
There's no way to await an event emitter, so using async in this case isn't going to be useful. You cannot "return" from inside an event either.
The solution here is to return a new custom promise and to use resolve() inside of the "end" event of your emitter.
It will look something like this:
function ProcessPSAAPI(xmlpackage, PSA_Action) {
return new Promise( (resolve, reject) => {
// other code
res.on("end", function (chunk) {
// other code
resolve(PSA_Resultbody);
});
res.on("error", function (error) {
// other code
reject(error);
});
});
}
Here's a quick tutorial on creating your own promises, which I've written to simplify comprehension of the subject (official docs are somewhat dry and complex imho).
I did not change your code. I just put the appropriate promise structure in to get you started. This should really be a lesson in promises. async await is a shorthand promise structure. A Promise is one way you wait on code. It can be thought of as an array of callbacks that will be executed when the Promise is resolved.
A simple promise works like this:
const myPromise = new Promise(function(resolve, reject) {
/* Your logic goes in here. It can be anything.
* But the important part to remember is that when you have success, resolve it.
* When you have a failure, reject it.
*/
someCallBackPattern(function(error, data) {
if(error) {
reject(error);
} else {
resolve(data);
}
});
});
// To get the data out you use 'then', and 'catch'. then has two arguments.
myPromise.then(function(data) {
// The first argument is the result from resolve.
}, function(err) {
// The second argument is the result from reject.
}).catch((err) => {
// you can also get to the error from the catch callback
});
This is kinda messy and complex. So there is async await.
async function() {
try {
const result = await myFunctionThatReturnsAPromise();
// result is the resolved data
} catch (err) {
// err is the rejected Error
}
}
function myFunctionThatReturnsAPromise() {
return new Promise((resolve, reject) => {
// your code
})
}
And thats how it works.
async function someFunction () { // You can not wait on results unless you are in an await function
PSA_Resultbody = await ProcessPSAAPI(xmlpackage, PSA_Action); // await on your results.
console.log("3 - Returned data:" + PSA_Resultbody);
}
function ProcessPSAAPI(xmlpackage, PSA_Action) { // This does not need to be async. Unless you are awaiting in it.
return new Promise((resolve, reject) => { // async await is a shorthand promise structure. Although you do not need to use promises. It really helps to get the structure correct.
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
resolve(PSA_Resultbody); // Resolve your result
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
})
}

How to make a synchronous delay inside a for loop?

I'm writing a NodeJS script that calls a bunch of APIs via GET (using request from npm) and saves the responses in a JSON file. I'm using a for to loop through IDs to pass to the APIs, but I'm having trouble putting in a delay between call bursts so I don't spam the API server and make it mad at me (rate limiting). Does anyone know how to do this?
My current code (without any delay):
var fs = require('fs');
var request = require('request');
// run through the IDs
for(var i = 1; i <= 4; i++)
{
(function(i)
{
callAPIs(i); // call our APIs
})(i);
}
function callAPIs(id)
{
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) =>
{
if (err)
{throw err;}
fs.writeFile("./result/" + id + '/' + id + '_example.json', body, function (err)
{
if (err)
{throw err;}
});
});
}
I'm looking for this behavior:
callAPIs(1); // start a burst of calls for ID# 1
// wait a bit...
callAPIs(2); // start a burst of calls for ID# 2
// wait a bit...
// etc
You can use the new ES6's async/await
(async () => {
for(var i = 1; i <= 4; i++)
{
console.log(`Calling API(${i})`)
await callAPIs(i);
console.log(`Done API(${i})`)
}
})();
function callAPIs(id)
{
return new Promise(resolve => {
// Simulating your network request delay
setTimeout(() => {
// Do your network success handler function or other stuff
return resolve(1)
}, 2 * 1000)
});
}
A working demo: https://runkit.com/5d054715c94464001a79259a/5d0547154028940013de9e3c
In nodeJS you don't do pauses, you use it's asynchronous nature to await the result of preceding tasks, before resuming on next task.
function callAPIs(id) {
return new Promise((resolve, reject) => {
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) => {
if (err) {
reject(err);
}
fs.writeFile(`./result/${id}/${id}_example.json`, body, err => {
if (err) {
reject(err);
}
resolve();
});
});
});
}
for (let i = 1; i <= 4; i++) {
await callAPIs(array[index], index, array);
}
This code, will do request, write the file, and once it is written to disk, it will process the next file.
Waiting a fixed time before the next task is processed, what if would take a bit more time? What if you're wasting 3 seconds just to be sure it was done...?
You might also want to have a look at the async module. It consists of async.times method which will help you achieve the results you need.
var fs = require('fs');
var request = require('request');
var async = require('async');
// run through the IDs
async.times(4, (id, next) => {
// call some APIs and store the responses asynchronously, for example:
request.get("https://example.com/api/?id=" + id, (err, response, body) => {
if (err) {
next(err, null);
} else {
fs.writeFile("./result/" + id + '/' + id + '_example.json', body, function (err) {
if (err) {
next(err, null);
} else
next(null, null);
});
}
});
}, (err) => {
if (err)
throw err
});
You can read about it from the below shared url:
https://caolan.github.io/async/v3/docs.html#times

node js value return

I am a beginner to node js. Trying to call a API and result should pass to another function. Because of callback functionality second function [Task2()]is executing soon after calling the first function[Task1()], How can I handle this asynchronous behavior of node js code. I have googled for yield , but not succeeded. I have provided below sample code for your reference. Please provide your comments/suggestions.
var result='';
function Task1(){ //2 --> Executing task1
Task_Id='';
var options = {
uri: 'http://url/post', //url to call
method: 'POST',
auth: {
'user': 'user1',
'pass': 'paswd1'
},
json: {
"key":"value"
}
};
function get_createdtaskId(options,callback){
var res='';
request(options, function (error, response, body) {
var data=JSON.stringify(body);
var parsedResponse = JSON.parse(data);
if (!error && response.statusCode == 200) {
res = parsedResponse.TaskID;
}
else{
console.log(error);
res=error;
}
callback(res);
});
}
//to call
Task_Id= get_createdtaskId(options, function(resp){
return resp;
});
return Task_Id;
}
result=Task1(); //1 -->initial function calling
Task2(result){ //3 -->use result from task1 as input parameter for function Task2
//do logic on result received from Task1
}
You have to add a callback function to Task 1 which will be called when it is done:
function Task1(callback){ //2 --> Executing task1
....
callback(result); //get the result this way
};
and then when you call it like this
Task1(function(result){
Task2(result);
});
This is a very generic approach. Check this to learn more on the subject:
Node.js event-driven
You can use Promises in Javascript to solve this.
For example:
Task1 = new Promise((resolve,reject)=>{
resolve("Success");
})
Task1.then(result=>{
//result is the return value from task 1
Task2(result);
}).catch(error=>{
//handle the error
})

NodeJs - Making 'GET' request conditionally in a while loop with request module

I am accessing an API endpoint that is a little iffy. About 70% of the time it sends back the correct JSON response, but a few times it will crap out and send an XML that says "values /values". I want to make a while loop what requests until it gets the correct response back. In my case, I am guaranteed to get back the correct response eventually, so that is why I am looping instead of figuring out why the endpoint is crapping out.
Here is my code:
var gotValidResponse = false;
while(!gotValidResponse){
request(options, function(err, res, body){
if(err){
console.log(err);
return;
}
try{
data = JSON.parse(body);
console.log(data);
gotValidResponse = true;
}catch(e){
console.log('trying again');
}
});
}
processJSON(data);
Obviously the code above does not work, but hopefully it is showing what I am trying to do. Thanks for any help!
EDIT: Like this?
var myData = getStuff(options);
function getStuff(options){
request(options, function (err, res, body) {
if (err) {
console.log(err);
return
}
try {
data = JSON.parse(body);
return data;
} catch (e) {
return getStuff(options);
}
})
}
You almost got it right in your edit. What you need to do is keep calling the function until it returns what you want. Like this (my conditionals are merely illustrative):
var attemps = 1;
var fake = function(data, cb) {
console.log('Attemp nÂș', attemps);
if(attemps < 5) {
attemps += 1;
return fake(data, cb);
} else {
return cb(null, 'completed');
}
}
fake('whatever', function(err, res) {
console.log(res)
})
https://jsfiddle.net/eysu2amp/
If you check the console, you will see that the fake function gets called 5 times and then returns the data. The recursive calling of the function keeps passing the same callback function.

How to wait for N number of async functions to finish in node.js so that I can do some work on all their combined results?

Please look at the code below. the request module is this one(https://www.npmjs.com/package/request)
var urlArray = []; // URL in this level
foo ("xyz.com", false);
function crawl (url, finished) {
request(url, function (error, response, body) {
if (finished == true) { return; }
// do some work on body (including getting getting n number of new URLS from
// the body) and set finished = true if we find what we are looking for.
// for each new url urlArray.push(newURL);
// for each new url call crawl(newurl, finished);
// Now How can I know when ALL these requests have finished?
// so that I can have a urlArray corresponding to this level of crawling tree and
// do some work before starting next level of crawl.
});
}
use Promises.
Check out the Q library (specifically I pointed to the methods you need):
Promise creation:
https://github.com/kriskowal/q/wiki/API-Reference#qdefer
var promise = Q.defer();
doAsyncStuff(callbackOfAsync);
return promise.promise;
functioncallbackOfAsync(isSuccess){
if(isSuccess){
promise.resolve();
}
else{
promise.reject();
}
}
Wait for multiple promises:
https://github.com/kriskowal/q/wiki/API-Reference#promise-for-array-methods
Q.all([getFromDisk(), getFromCloud()]).done(function (values) {
assert(values[0] === values[1]); // values[0] is fromDisk and values[1] is fromCloud
});
I don't really understand your question, but I guess you will need Promises. I assume you are using NodeJS.
function makeRequest (url) {
return new Promise(function (resolve, reject) {
request(url, function (err, response, body) {
if (err || response.statusCode !== 200)
reject(err || body);
else
resolve(body);
}
})
}
this function returns a promise. You can use it this way:
var request = makeRequest('url.com');
request.then(function (urls) {
// this is called if no error occured
urls.forEach(function (url) {
console.log (url);
});
}, function (error) {
// this is called on error
console.log (error);
});
If you want to wait for multiple requests to be answered to perform an action, use Promise.all:
var requests = [makeRequest(url1), makeRequest(url2), makeRequest(url3)];
Promise.all(requests).then(function (data) {
// everything is done
console.log(data);
});
I didn't test the code, but I hope you get the idea.
To answer your question specifically, the following flow of logic should work for you, I have added comments to help it make sense to you:
var urlArray = []; // URL in this level
var finished = false;
foo("xyz.com", false);
function start() {
while (urlArray.length) {
crawl(urlArray.pop());
}
}
function crawl(url) {
request(url, function (error, response, body) {
if (finished) {
return;
}
// 1. at this point, a given batch of crawls have all started
// AND urlArray is empty.
// 2. do some work on body (including getting getting n number of new URLS from
// the body) and set finished = true if we find what we are looking for.
// 3. for each new url urlArray.push(newURL);
// 4. start crawling on new batch of URL set
start();
});
}
All the request callbacks will be executed after start() completes, this guarantees that urlArray will be empty then.
If processing of one crawl response indicates (by setting finished = true;) that what you're looking for has been found all other processing of responses will terminate as soon as they begin.
Otherwise, reponse is processed and a new batch of urls are set for crawling. You call start() to begin crawling each.
It would help you also (as suggested in the other answer) if you acquainted yourself with the concept of Promises.

Categories