I am a beginner to node js. Trying to call a API and result should pass to another function. Because of callback functionality second function [Task2()]is executing soon after calling the first function[Task1()], How can I handle this asynchronous behavior of node js code. I have googled for yield , but not succeeded. I have provided below sample code for your reference. Please provide your comments/suggestions.
var result='';
function Task1(){ //2 --> Executing task1
Task_Id='';
var options = {
uri: 'http://url/post', //url to call
method: 'POST',
auth: {
'user': 'user1',
'pass': 'paswd1'
},
json: {
"key":"value"
}
};
function get_createdtaskId(options,callback){
var res='';
request(options, function (error, response, body) {
var data=JSON.stringify(body);
var parsedResponse = JSON.parse(data);
if (!error && response.statusCode == 200) {
res = parsedResponse.TaskID;
}
else{
console.log(error);
res=error;
}
callback(res);
});
}
//to call
Task_Id= get_createdtaskId(options, function(resp){
return resp;
});
return Task_Id;
}
result=Task1(); //1 -->initial function calling
Task2(result){ //3 -->use result from task1 as input parameter for function Task2
//do logic on result received from Task1
}
You have to add a callback function to Task 1 which will be called when it is done:
function Task1(callback){ //2 --> Executing task1
....
callback(result); //get the result this way
};
and then when you call it like this
Task1(function(result){
Task2(result);
});
This is a very generic approach. Check this to learn more on the subject:
Node.js event-driven
You can use Promises in Javascript to solve this.
For example:
Task1 = new Promise((resolve,reject)=>{
resolve("Success");
})
Task1.then(result=>{
//result is the return value from task 1
Task2(result);
}).catch(error=>{
//handle the error
})
Related
I am pretty new with NodeJS and got lost with the asynchronous mechanism.
I have a code that should send a HTTP post request to the first URL (for example - https://example.com/first), and then when it got answered with status code 200, send another request to the same server that checks if the server is done with processing the first request (for example - https://example.com/statusCheck).
The server should return a text/plain response contains "true" if it's busy, and "false" if it's ready to use.
I wrote it with a while loop that queries the server every 2 seconds, up to maximum of 10 iterates.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
// Sends up to 10 requests to the server
function checkServerStatus(){
var serverReady = false;
var count = 0;
while (!serverReady && count < 10) {
count++;
setTimeout(function(){
request.get(serverCheck, function(err, resp, body){
if (err){
console.log(err);
} else if (body == "false") {
generatorReady = true;
}
})
}, 2000);
}
return generatorReady;
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(){
var req = request.post(firstURL, function (err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200){
return true;
} else {
return false;
}
});
};
module.exports = function (){
// Sends the first request
var firstRequestStatus = sendFirstRequest();
if (firstRequestStatus) {
return checkServerStatus();
}
};
In other words, I want to run sendFirstRequest first, wait for the response, and in case that the response is true, I want to run the checkServerStatus and get his returned value. If it's possible to do it with a sleep between each while iterate, it will be great (because the setTimeout does not work for me as well) .
Edit: I've heard that I can use function* with yield, or async-await in order to avoid callback hell - how can I implement them in this case?
You should use a Promise to do this. Below is some code using bluebird which will do what you want. The Promise.any method will return the first successful call from the Array out of 10 tries.
const Promise = require('bluebird');
var request = Promise.promisifyAll(require('request'));
var firstURL = "https://example.com/";
var serverCheck = "https://example.com/statusCheck";
request.postAsync(firstURL).then(res => {
if (res.statusCode === 200) return true;
throw new Error('server not ready');
}).then(() =>
Promise.any(new Array(10).fill(request.getAsync(serverCheck)))
).then(res => {
console.log(res);
}).catch(err => console.log(err));
You have to understand that the asynchronous operations can not return a result right after their call. They trigger some handler when they have executed. You can/should use that entry point to initiate or continue your logic flow.
http.post(params, handler(err, resp, body){
if(err){
failFlow(err);
}else if(resp.statusCode === 200) {
successFlow(resp);
}
});
and you can chain as many such asynchronous calls as you need but you can not return a response in this manner.
Also you might be interested in the concept of a Promise.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
var count = 0;
// Sends up to 10 requests to the server
function checkServerStatus() {
if (count++ > 10) return;
request.get(serverCheck, function(err, resp, body) {
if (err) {
console.log(err);
checkServerStatus();
} else if (body == "false") {
// go further
}
});
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(cb) {
var req = request.post(firstURL, function(err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200) {
cb();
} else {
return false;
}
});
};
module.exports = function() {
// Sends the first request
sendFirstRequest(checkServerStatus);
};
You can use the async library.
you dont need to do a setInterval or any timer for that matter, just wait for the response.
specifically you can use async.waterfall for this, something like:
var async = require('async')
var request = require('request')
async.waterfall([
function(cb) {
// send the first request
request.post("https://example.com/first", function (err, resp) {
// send the response to the next function or break in case there was an error
cb(err, resp)
})
},
function(resp, cb) {
// check for the response
if (resp.statusCode === 200) {
// in case the response code is 200 continue to the next function
return cb()
}
// if its not 200 break with the response code as an error
return cb(resp.statusCode)
},
function(cb) {
// send the verify
request.get("https://example.com/statusCheck", function (err, resp, body) {
// send the body of the response to the next function or break in case of an error
cb(err, body)
})
}
], function (err, result) {
// check if there was an error along the way
if (err) {
console.log("there was an error", err)
} else {
// all is good print the result
console.log("result:", result)
}
})
async function main() {
console.log('First call started');
let response1 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('First call finished', response1);
console.log('Second call started');
let response2 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('Second call finished',response2);
}
main();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
In newer versions of nodejs you can use async await like the example above
Notice that $.ajax is not a node function. It is just for demonstration
You can use await on any function that return a promise.
For the next example you need to install request package and use Node >= 8 for using promisify
const {promisify} = require('util');
const request = require('request')
async function main() {
let get = promisify(request);
let response1 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('first random: ',response1.body)
let response2 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('second random: ',response2.body)
}
main();
http://2ality.com/2017/05/util-promisify.html
https://github.com/request/request
I've written a program that makes an HTTP GET request for three distinct URLs. The program is supposed to output the message body in the order the URLs are provided, however it's not doing so even though I'm making callbacks in exactly that order.
The final program is supposed to require the user to input the URLs via command line, however I've simply made variable assignments for ease of testing.
I realize this code could be more object-oriented - however I'm new to JavaScript and it's not my focus to learn how at the moment
var http = require('http')
// var url_1 = process.argv[2]
// var url_2 = process.argv[3]
// var url_3 = process.argv[4]
var url_1 = 'http://youvegotmail.warnerbros.com/cmp/0frameset.html'
var url_2 = 'http://www.3riversstadium.com/index2.html'
var url_3 = 'http://toastytech.com/evil/'
var output_1 = ''
var output_2 = ''
var output_3 = ''
function getHttp_1 (callback) {
http.get(url_1, function getResponse (response1) {
response1.setEncoding('utf8')
response1.on('data', function (data) {
output_1 = output_1 + data
})
response1.on('end', function processData() {
console.log("Printing Result 1:")
callback(output_1)
})
})
}
function getHttp_2 (callback) {
http.get(url_2, function getResponse (response2) {
response2.setEncoding('utf8')
response2.on('data', function (data) {
output_2 = output_2 + data
})
response2.on('end', function processData() {
console.log("Printing Result 2:")
callback(output_2)
})
})
}
function getHttp_3 (callback) {
http.get(url_3, function getResponse (response3) {
response3.setEncoding('utf8')
response3.on('data', function (data) {
output_3 = output_3 + data
})
response3.on('end', function processData() {
console.log("Printing Result 3:")
callback(output_3)
})
})
}
function printResults(output) {
console.log("Result")
// console.log(output)
}
getHttp_1(printResults)
getHttp_2(printResults)
getHttp_3(printResults)
EDIT:
Results I'm generally getting:
Printing Result 3:
Result
Printing Result 2:
Result
Printing Result 1:
Result
Results I'm expecting:
Printing Result 1:
Result
Printing Result 2:
Result
Printing Result 3:
Result
In contrast to the sequential callback approach proposed by some answers, using Promises will make this both more efficient (the requests will be made in parallel) and simpler:
var http = require('http'),
urls = [
'http://youvegotmail.warnerbros.com/cmp/0frameset.html',
'http://www.3riversstadium.com/index2.html',
'http://toastytech.com/evil/'
];
Promise.all(urls.map(getUrl))
.then(function (results) {
results.forEach(function (output, i) {
console.log("Result #" + (i + 1) +
" with length: " + output.length);
});
});
function getUrl(url, i) {
return new Promise(function (resolve, reject) {
http.get(url, function getResponse(resp) {
var output = '';
resp.setEncoding('utf8');
resp.on('data', function (data) {
output += data;
});
resp.on('end', function processData() {
console.log("Resolving Result " + (i + 1) + ":");
resolve(output);
});
})
});
}
Welcome to the asynchronous life of node.js! As you fire off those HTTP requests, one will not wait for the request before it to finish before it fires. You are seeing this odd behavior because you are practically sending all 3 requests at once, and simply printing as you see the responses.
Edit: If you do want to see them in correct order, fire off the second HTTP request inside the callback of the first, and then the third inside the callback of the second. That guarantees you won't get the data until after each one before it finishes.
function getHttp_1 (callback) {
http.get(url_1, function getResponse (response1) {
response1.setEncoding('utf8')
response1.on('data', function (data) {
output_1 = output_1 + data
})
response1.on('end', function processData() {
console.log("Printing Result 1:")
callback(output_1)
getHttp_2(callback)
})
})
}
The async module can really help for controlling how async tasks are executed. For example, if you want your requests to happen one after the other:
async.series([
function (next) { makeRequest(url_1, next); },
function (next) { makeRequest(url_2, next); },
function (next) { makeRequest(url_3, next); },
], function (err, result) {
// All done
});
// Or you can get fancy
//async.series([
// makeRequest.bind(null, url_1),
// makeRequest.bind(null, url_2),
// makeRequest.bind(null, url_3),
//]);
function makeRequest(url, callback) {
http.get(url, function getResponse (res) {
var output = '';
res.setEncoding('utf8')
res.on('data', function (data) {
output += data
})
response1.on('end', function processData() {
callback(output)
})
})
}
If you don't care what order they occur in but want to output them in order:
async.parallel([
function (next) { makeRequest(url_1, next); },
function (next) { makeRequest(url_2, next); },
function (next) { makeRequest(url_3, next); },
], function (err, results) {
if (err) {
return void console.error('Got an error:', err.stack);
}
console.log(results); // Will output array of every result in order
});
If the requests are dependent on each other, async.auto is useful to tie the result of one request to the request of another.
JavaScript/AJAX calls are async so don't follow the order you call them. To call them in sequence/specific order, do like:
$(function () {
//setup an array of AJAX options, each object is an index that will specify information for a single AJAX request
var ajaxes = [{ url : '<url>', dataType : 'json' }, { url : '<url2>', dataType : 'utf8' }],
current = 0;
//declare your function to run AJAX requests
function do_ajax() {
//check to make sure there are more requests to make
if (current < ajaxes.length) {
//make the AJAX request with the given data from the `ajaxes` array of objects
$.ajax({
url : ajaxes[current].url,
dataType : ajaxes[current].dataType,
success : function (serverResponse) {
...
//increment the `current` counter and recursively call this function again
current++;
do_ajax();
}
});
}
}
//run the AJAX function for the first time once `document.ready` fires
do_ajax();
});
Another option could be:
function callA() {
$.ajax({
...
success: function() {
//do stuff
callB();
}
});
}
function callB() {
$.ajax({
...
success: function() {
//do stuff
callC();
}
});
}
function callC() {
$.ajax({
...
});
}
callA();
Ref: Multiple Calls in Order
I am new into javascript, and currently I'm trying to learning callback to my script. This script should return reduced words in array of objects
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = []
list.forEach(function (val) {
readFile(val, function(error,data){
txtObj.push(data)
})
})
function readFile(src, cb){
fs.readFile(src,'utf8', function (error,data) {
if (error) return callback(error,null)
return mapred(data)
})
}
return callback(null,txtObj)
}
But it returns empty array. Any help would be appreciated.
Thanks!
`fs.readFile`
is an asynchronous function, before it's done and result callback is invoked, you are returning the empty txtObj array.
how to fix it ?
call return callback(null,txtObj) after fs.readFile is finished running.
and also, as you are running asynchronous function on an array of items one-by-one, it might not still work the way you want. might want to use modudles like async in nodejs
Here comes an asynchronous version using module async. synchronous file operation is strongly objected :)
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
async = require('async');
async.each(list, readFile, function(err) {
callback(err,txtObj)
});
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
EDIT : You can do this without async, but it is little bit dirty isn't it ? also its OK if you remove the self invoking function inside the forEach, i included so that you can access the val, even after the callback is done
var fs = require('fs')
var dict = ['corpus.txt','corpus1.txt','corpus2.txt'];
mapping(dict, function(error,data){
if(error) throw error
console.log(data)
})
function mapping(list, callback){
var txtObj = [],
counter = list.length,
start = 0;
list.forEach(function (val) {
(function(val)
readFile(val, function(error,data) {
txtObj.push(data);
start++;
if(error || (start === counter)) {
callback(error,txtObj);
}
}))(val);
})
function readFile(src, cb) {
fs.readFile(src,'utf8', function (error,data) {
if (error) {
cb(error);
}
else {
txtObj.push(mapred(data));
cb(null);
}
})
}
}
The reason you are getting an empty array result is that you are performing the callback before the readFile function has a chance to populate the array. You are performing multiple asynchronous actions but not letting them to complete before continuing.
If there was only one async action, you would call callback() in the callback function of readFile, but as you need to perform multiple async actions before calling callback(), you should consider using fs.readFileSync().
Sometimes sync cannot be avoided.
function mapping(list, callback)
{
var txtObj = []
list.forEach(function(val)
{
try { txtObj.push(mapred(fs.readFileSync(val, 'utf8'))) }
catch(err) { callback(err) }
})
callback(null, txtObj)
}
Please look at the code below. the request module is this one(https://www.npmjs.com/package/request)
var urlArray = []; // URL in this level
foo ("xyz.com", false);
function crawl (url, finished) {
request(url, function (error, response, body) {
if (finished == true) { return; }
// do some work on body (including getting getting n number of new URLS from
// the body) and set finished = true if we find what we are looking for.
// for each new url urlArray.push(newURL);
// for each new url call crawl(newurl, finished);
// Now How can I know when ALL these requests have finished?
// so that I can have a urlArray corresponding to this level of crawling tree and
// do some work before starting next level of crawl.
});
}
use Promises.
Check out the Q library (specifically I pointed to the methods you need):
Promise creation:
https://github.com/kriskowal/q/wiki/API-Reference#qdefer
var promise = Q.defer();
doAsyncStuff(callbackOfAsync);
return promise.promise;
functioncallbackOfAsync(isSuccess){
if(isSuccess){
promise.resolve();
}
else{
promise.reject();
}
}
Wait for multiple promises:
https://github.com/kriskowal/q/wiki/API-Reference#promise-for-array-methods
Q.all([getFromDisk(), getFromCloud()]).done(function (values) {
assert(values[0] === values[1]); // values[0] is fromDisk and values[1] is fromCloud
});
I don't really understand your question, but I guess you will need Promises. I assume you are using NodeJS.
function makeRequest (url) {
return new Promise(function (resolve, reject) {
request(url, function (err, response, body) {
if (err || response.statusCode !== 200)
reject(err || body);
else
resolve(body);
}
})
}
this function returns a promise. You can use it this way:
var request = makeRequest('url.com');
request.then(function (urls) {
// this is called if no error occured
urls.forEach(function (url) {
console.log (url);
});
}, function (error) {
// this is called on error
console.log (error);
});
If you want to wait for multiple requests to be answered to perform an action, use Promise.all:
var requests = [makeRequest(url1), makeRequest(url2), makeRequest(url3)];
Promise.all(requests).then(function (data) {
// everything is done
console.log(data);
});
I didn't test the code, but I hope you get the idea.
To answer your question specifically, the following flow of logic should work for you, I have added comments to help it make sense to you:
var urlArray = []; // URL in this level
var finished = false;
foo("xyz.com", false);
function start() {
while (urlArray.length) {
crawl(urlArray.pop());
}
}
function crawl(url) {
request(url, function (error, response, body) {
if (finished) {
return;
}
// 1. at this point, a given batch of crawls have all started
// AND urlArray is empty.
// 2. do some work on body (including getting getting n number of new URLS from
// the body) and set finished = true if we find what we are looking for.
// 3. for each new url urlArray.push(newURL);
// 4. start crawling on new batch of URL set
start();
});
}
All the request callbacks will be executed after start() completes, this guarantees that urlArray will be empty then.
If processing of one crawl response indicates (by setting finished = true;) that what you're looking for has been found all other processing of responses will terminate as soon as they begin.
Otherwise, reponse is processed and a new batch of urls are set for crawling. You call start() to begin crawling each.
It would help you also (as suggested in the other answer) if you acquainted yourself with the concept of Promises.
I'm trying to write a small XHR abstraction as well as learn how to create chainable methods, I am nearly there (I think), but am at a loss as to what to do next, I think my setup is wrong.
What I want to do:
$http.get('file.txt')
.success(function () {
console.log('Success');
})
.error(function () {
console.log('Error');
});
What I've got:
window.$http = {};
$http.get = function (url, cb, data) {
var xhr = {
success: function (callback) {
callback();
return this;
},
error: function (callback) {
callback();
return this;
}
};
// just a test to call the success message
if (window) {
xhr.success.call(xhr);
}
return xhr;
};
I'm having trouble 'wiring' up the success/error messages, can anybody help point me in the right direction? Thanks in advance.
jsFiddle
Your chaining is OK, but you have a error at this line:
if (window) {
xhr.success.call(xhr); // Uncaught TypeError: undefined is not a function
}
So JavaScript breaks and doesn't return xhr. Delete thoses lines and it will work.
success and error are simply functions that store the passed functions into an internal storage. Once the XHR responds, your code should execute all callbacks accordingly, depending on the response status.
Now what you need is an object instance per request that stores its own set of success and error callbacks. Also, success and error methods should return the same instance to allow chaining.
This should set you to the right track:
(function (window) {
window.$http = {};
// An object constructor for your XHR object
function XHRObject(url,data){
// store the request data
this.url = url;
this.data = data;
// The callback storage
this.callbacks = {};
this.init();
}
// Methods
XHRObject.prototype = {
init : function(){
// Actual call here
// Depending on result, execute callbacks
var callbacksToExecute;
if(readyState === 4 && response.status === 200){
callbacksToExecute = this.callbacks.success;
} else {
callbacksToExecute = this.callbacks.error;
}
callbacksToExecute.forEach(function(callback){
callback.call(null);
});
},
success : function(cb){
// Create a success callback array and store the callback
if(this.callbacks.hasOwnProperty('success') this.callbacks.success = [];
this.callbacks.success.push(cb);
// You also need a flag to tell future callbacks to execute immediately
// if the current object has already responded
return this;
},
...
}
// A call to get basically returns an object
$http.get = function (url, data) {
return new XHRObject(url,data);
};
})(this);
I hope you can make something out of this:
window.$http = {};
$http.get = function (url, cb, data) {
var xhr = function(){
return {
success: function (callback) {
callback();
return this;
},
error: function (callback) {
callback();
return this;
}
};
};
return new xhr();
}
$http.get('url','cb','data')
.success(function () {
console.log('Success');
})
.error(function () {
console.log('Error');
});
Edit: I just realized this is basically the same code you wrote, except I'm missing the if(). It seems that test was causing the code to break.