Using imported asynchronous method with promises in main entry point - javascript

I'm creating a Node.js module with an asynchronous method - a simple HTTP GET request. Here is the code:
//mymodule.js
var https = require('https');
function getSomething(url_str)
{
var callback_fn = function(response){
var body = '';
response.on('data', function (data) {
body += data;
});
response.on('end', function () {
//console.log(body);
return body;
});
};
return https.request(url_str, callback_fn).end();
}
var module_obj = {
getSome: getSomething
};
module.exports = module_obj;
This module is called by my app.js - a web server - like so:
//app.js
var myModule = require('./mymodule');
var http = require('http');
var qs = require('querystring');
var server_fn = function(request, response){
response.setHeader('Access-Control-Allow-Origin', '*');
response.setHeader('Access-Control-Request-Method', '*');
response.setHeader('Access-Control-Allow-Methods', 'OPTIONS, GET');
response.setHeader('Access-Control-Allow-Headers', '*');
if ( request.method === 'OPTIONS' ) {
response.writeHead(200);
response.end();
return;
}
if (request.method == 'POST') {
var body = '';
request.on('data', function (data) {
body += data;
// Too much POST data, kill the connection!
// 1e6 === 1 * Math.pow(10, 6) === 1 * 1000000 ~~~ 1MB
if (body.length > 1e6)
request.connection.destroy();
});
request.on('end', function () {
var post = qs.parse(body),
post_url = post.url,
post_method = post.method;
var promise_flow = new Promise(function(resolve, reject){
if(post_method === 'get_me_something')
{
response_str = myModule.getSome(post_url);
resolve(response_str);
}
else
{
resolve('nothing');
}
});
promise_flow
.then(function(response){
response.write(response);
response.end();
return;
}).catch(function(error){
response.write(error);
response.end();
return;
})
});
}
};
var http_server = http.createServer(server_fn);
http_server.listen(2270);
console.log("server listening on 2270");
So basically, I start things up via node app.js, and then I post the URL, and then the module should fetch the Web page and then return the content.
Unfortunately, I'm getting the following error:
UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 1): TypeError: First argument must be a string or Buffer
I believe this is because the response I'm getting from my modules getSomething method is false, as opposed to the content of the requested Web page.
I know I can fix this by moving the https.get operation from mymodule.js and putting it inline with app.js, and then calling resolve on end, but I'd like to keep the current module setup.
Is there a workaround to get the asynchronous method in the imported module to work with the existing promise chain as setup?
UPDATE
After further review, I noticed that I wasn't quite running things the right way. I updated the code as follows:
//...
var promise_flow = new Promise(function(resolve, reject){
if(post_method === 'get_me_something')
{
myModule.getSome(post_url)
.then(function(data){
resolve(data);
})
.catch(function(err){
reject(err);
});
}
else
{
resolve('nothing');
}
});
//...
This way, I think it fits with the true spirit of Promises.

Your getSomething function doesn't return a promise. Make it returns a promise, and fulfill the promise in response.on('end').
function getSomething(url_str)
{
return new Promise(function(resolve, reject) {
var callback_fn = function(response){
var body = '';
response.on('data', function (data) {
body += data;
});
response.on('end', function () {
//console.log(body);
resolve(body);
});
};
https.request(url_str, callback_fn).end();
});
}
Then in your main file, call it like this : myModule.getSomething(post_url).then(resolve);.

Related

Issues with Async/Await during SOAP API call Javascript

Hopefully someone can point me to the right direction. I read up on waiting for functions to complete before continuing and I resolved myself to using await/async but I am just stuck now.
I tried to get the Async/Await process to work, tried to inject the await in various locations, with adjusting the functions to be async, but i can not get the PSA_Resultbody to return to the original request. Any pointers would be appreciated.
Thank you,
CE
PSA_Resultbody = ProcessPSAAPI(xmlpackage, PSA_Action);
console.log("3 - Returned data:" + PSA_Resultbody);
calls the below:
async function ProcessPSAAPI(xmlpackage, PSA_Action) { //psa action is part of the options
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
return PSA_Resultbody;
Based on the above, my console log order is: 3,2,0,1 in stead of 0,1,2,3.
0 and 1 will have the correct data, so the API Call does work, but 2 will be "undefined" and should have the same data that is in 1.
There's no way to await an event emitter, so using async in this case isn't going to be useful. You cannot "return" from inside an event either.
The solution here is to return a new custom promise and to use resolve() inside of the "end" event of your emitter.
It will look something like this:
function ProcessPSAAPI(xmlpackage, PSA_Action) {
return new Promise( (resolve, reject) => {
// other code
res.on("end", function (chunk) {
// other code
resolve(PSA_Resultbody);
});
res.on("error", function (error) {
// other code
reject(error);
});
});
}
Here's a quick tutorial on creating your own promises, which I've written to simplify comprehension of the subject (official docs are somewhat dry and complex imho).
I did not change your code. I just put the appropriate promise structure in to get you started. This should really be a lesson in promises. async await is a shorthand promise structure. A Promise is one way you wait on code. It can be thought of as an array of callbacks that will be executed when the Promise is resolved.
A simple promise works like this:
const myPromise = new Promise(function(resolve, reject) {
/* Your logic goes in here. It can be anything.
* But the important part to remember is that when you have success, resolve it.
* When you have a failure, reject it.
*/
someCallBackPattern(function(error, data) {
if(error) {
reject(error);
} else {
resolve(data);
}
});
});
// To get the data out you use 'then', and 'catch'. then has two arguments.
myPromise.then(function(data) {
// The first argument is the result from resolve.
}, function(err) {
// The second argument is the result from reject.
}).catch((err) => {
// you can also get to the error from the catch callback
});
This is kinda messy and complex. So there is async await.
async function() {
try {
const result = await myFunctionThatReturnsAPromise();
// result is the resolved data
} catch (err) {
// err is the rejected Error
}
}
function myFunctionThatReturnsAPromise() {
return new Promise((resolve, reject) => {
// your code
})
}
And thats how it works.
async function someFunction () { // You can not wait on results unless you are in an await function
PSA_Resultbody = await ProcessPSAAPI(xmlpackage, PSA_Action); // await on your results.
console.log("3 - Returned data:" + PSA_Resultbody);
}
function ProcessPSAAPI(xmlpackage, PSA_Action) { // This does not need to be async. Unless you are awaiting in it.
return new Promise((resolve, reject) => { // async await is a shorthand promise structure. Although you do not need to use promises. It really helps to get the structure correct.
var options = {...};
var req = https.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function (chunk) {
var body = Buffer.concat(chunks);
console.log('0 - Start '+body.toString());
if(res.statusCode != 200) {
PSA_Resultcode = "Error: " +res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
} else {
PSA_Resultcode = "Success: " +res.statusCode +" - "+ res.statusMessage;
PSA_Resultbody = ParseResults(body.toString()); //parse the results for later use --SCRIPT NEEDS TO WAIT FOR RESULTBODY TO COMPLETE
console.log("1 -PSA_Resultbody as part of RES = "+PSA_Resultbody);
resolve(PSA_Resultbody); // Resolve your result
}
});
res.on("error", function (error) {
console.error(error);
PSA_Resultcode = res.statusCode +" - "+ res.statusMessage;
reject(new Error(PSA_Resultcode)); // Reject you errors
});
});
console.log('2 -RESULT BODY BEFORE SENDING BACK TO INITIATING FUNCTION: '+PSA_Resultbody);
req.write(xmlpackage);
req.end();
})
}

How to run two functions synchronously with NodeJS?

I am pretty new with NodeJS and got lost with the asynchronous mechanism.
I have a code that should send a HTTP post request to the first URL (for example - https://example.com/first), and then when it got answered with status code 200, send another request to the same server that checks if the server is done with processing the first request (for example - https://example.com/statusCheck).
The server should return a text/plain response contains "true" if it's busy, and "false" if it's ready to use.
I wrote it with a while loop that queries the server every 2 seconds, up to maximum of 10 iterates.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
// Sends up to 10 requests to the server
function checkServerStatus(){
var serverReady = false;
var count = 0;
while (!serverReady && count < 10) {
count++;
setTimeout(function(){
request.get(serverCheck, function(err, resp, body){
if (err){
console.log(err);
} else if (body == "false") {
generatorReady = true;
}
})
}, 2000);
}
return generatorReady;
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(){
var req = request.post(firstURL, function (err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200){
return true;
} else {
return false;
}
});
};
module.exports = function (){
// Sends the first request
var firstRequestStatus = sendFirstRequest();
if (firstRequestStatus) {
return checkServerStatus();
}
};
In other words, I want to run sendFirstRequest first, wait for the response, and in case that the response is true, I want to run the checkServerStatus and get his returned value. If it's possible to do it with a sleep between each while iterate, it will be great (because the setTimeout does not work for me as well) .
Edit: I've heard that I can use function* with yield, or async-await in order to avoid callback hell - how can I implement them in this case?
You should use a Promise to do this. Below is some code using bluebird which will do what you want. The Promise.any method will return the first successful call from the Array out of 10 tries.
const Promise = require('bluebird');
var request = Promise.promisifyAll(require('request'));
var firstURL = "https://example.com/";
var serverCheck = "https://example.com/statusCheck";
request.postAsync(firstURL).then(res => {
if (res.statusCode === 200) return true;
throw new Error('server not ready');
}).then(() =>
Promise.any(new Array(10).fill(request.getAsync(serverCheck)))
).then(res => {
console.log(res);
}).catch(err => console.log(err));
You have to understand that the asynchronous operations can not return a result right after their call. They trigger some handler when they have executed. You can/should use that entry point to initiate or continue your logic flow.
http.post(params, handler(err, resp, body){
if(err){
failFlow(err);
}else if(resp.statusCode === 200) {
successFlow(resp);
}
});
and you can chain as many such asynchronous calls as you need but you can not return a response in this manner.
Also you might be interested in the concept of a Promise.
var request = require('request');
var firstURL = "https://example.com/first";
var serverCheck = "https://example.com/statusCheck";
var count = 0;
// Sends up to 10 requests to the server
function checkServerStatus() {
if (count++ > 10) return;
request.get(serverCheck, function(err, resp, body) {
if (err) {
console.log(err);
checkServerStatus();
} else if (body == "false") {
// go further
}
});
}
// Sends the first request and return True if the response equals to 200
function sendFirstRequest(cb) {
var req = request.post(firstURL, function(err, resp, body) {
if (err) {
console.log(err);
return false;
} else if (resp.statusCode === 200) {
cb();
} else {
return false;
}
});
};
module.exports = function() {
// Sends the first request
sendFirstRequest(checkServerStatus);
};
You can use the async library.
you dont need to do a setInterval or any timer for that matter, just wait for the response.
specifically you can use async.waterfall for this, something like:
var async = require('async')
var request = require('request')
async.waterfall([
function(cb) {
// send the first request
request.post("https://example.com/first", function (err, resp) {
// send the response to the next function or break in case there was an error
cb(err, resp)
})
},
function(resp, cb) {
// check for the response
if (resp.statusCode === 200) {
// in case the response code is 200 continue to the next function
return cb()
}
// if its not 200 break with the response code as an error
return cb(resp.statusCode)
},
function(cb) {
// send the verify
request.get("https://example.com/statusCheck", function (err, resp, body) {
// send the body of the response to the next function or break in case of an error
cb(err, body)
})
}
], function (err, result) {
// check if there was an error along the way
if (err) {
console.log("there was an error", err)
} else {
// all is good print the result
console.log("result:", result)
}
})
async function main() {
console.log('First call started');
let response1 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('First call finished', response1);
console.log('Second call started');
let response2 = await $.ajax({url: "https://api.stackexchange.com/2.2/questions/269754/answers/?order=desc&site=meta.stackoverflow&client_id=3519&callback=?"})
console.log('Second call finished',response2);
}
main();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
In newer versions of nodejs you can use async await like the example above
Notice that $.ajax is not a node function. It is just for demonstration
You can use await on any function that return a promise.
For the next example you need to install request package and use Node >= 8 for using promisify
const {promisify} = require('util');
const request = require('request')
async function main() {
let get = promisify(request);
let response1 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('first random: ',response1.body)
let response2 = await get('https://www.random.org/integers/?num=1&min=1&max=100&col=1&base=10&format=plain&rnd=new');
console.log('second random: ',response2.body)
}
main();
http://2ality.com/2017/05/util-promisify.html
https://github.com/request/request

how do I get this promise back to the controller?

So I have a series of functions I've defined in the service that upload an image to my Amazon s3 bucket, and I can console.log and alert in the service itself and everything is coming back correct.
However, now I want to return that promise to the controller so I can let the user know the upload is finished. I'm just not sure how I would do that. I've tried putting returns at the filereader.onload but then I get errors saying that what I've given back isn't a promise and such. Here's my code:
angular.module("testApp", [])
.controller("testCtrl", function($scope, amazonService) {
$scope.test = "leeroy jenkins";
$scope.upload = function() {
amazonService.uploadImage($('#file'));
}
})
.service("amazonService", function($http) {
var url = "/api/"
var uploadImageFilestream = function(obj) {
return $http({
method: "PUT",
url: url + "media/images",
data: obj
}).then(function(res) {
if (res.status === 200) {
alert("upload successful!");
}
return res;
});
}
var formatFileName = function(filename, newBase) {
//first, get the file extension
var extension = filename.substring(filename.lastIndexOf("."));
return newBase + extension;
}
this.uploadImage = function(obj) {
var file = obj[0].files[0];
var fileReader = new FileReader();
fileReader.onload = function(loaded) {
uploadImageFilestream({fileName: formatFileName(file.name, "test1"), fileBody: loaded.target.result});
}
fileReader.readAsDataURL(file);
}
})
I know that if I combined the uploadImageFilestream function with the uploadImage function it would work, but I'm not sure how to structure it with the promise in a separate function.
Use $q:
.service("amazonService", function($http, $q) {
var url = "/api/"
var uploadImageFilestream = function(obj) {
return $http({
method: "PUT",
url: url + "media/images",
data: obj
});
}
var formatFileName = function(filename, newBase) {
//first, get the file extension
var extension = filename.substring(filename.lastIndexOf("."));
return newBase + extension;
}
this.uploadImage = function(obj) {
var file = obj[0].files[0];
var fileReader = new FileReader();
var deferer = $q.defer();
fileReader.onload = function(loaded) {
uploadImageFilestream({fileName: formatFileName(file.name, "test1"), fileBody: loaded.target.result})
.then(function(res) {
if (res.status === 200) {
deferer.resolve();
alert("upload successful!");
}
return res;
});
}
fileReader.readAsDataURL(file);
return deferer.promise;
}
})
You should be using $q service of AngularJs to create deferred object and return promise.
I have modified the following code as to demonstrate way to use promises.
angular.module("testApp", [])
.controller("testCtrl", function($scope, amazonService) {
$scope.test = "leeroy jenkins";
$scope.upload = function() {
var promise = amazonService.uploadImage($('#file')); // call to function returns promise
promise.then(function(){ // when promise is resolved, desired data is passed
alert("success");
}).catch(function(error){ // when promise is rejected, related error object is passed
alert("failure");
});
}
})
.service("amazonService", function($http, $q) { // added $q service to handle promises
var url = "/api/"
var uploadImageFilestream = function(obj) {
return $http({
method: "PUT",
url: url + "media/images",
data: obj
}).then(function(res) {
if (res.status === 200) {
alert("upload successful!");
}
return res;
});
}
var formatFileName = function(filename, newBase) {
//first, get the file extension
var extension = filename.substring(filename.lastIndexOf("."));
return newBase + extension;
}
this.uploadImage = function(obj) {
var file = obj[0].files[0];
var fileReader = new FileReader();
var deferredObject = $q.defer(); // added deferred object which will be used to return promise and resolve or reject is as shown below
fileReader.onload = function(loaded) {
uploadImageFilestream({fileName: formatFileName(file.name, "test1"), fileBody: loaded.target.result}).then(response){
deferredObject.resolve(response); // when resolve function of deferred object is called success callback in controller will be called with the data you pass here
}).catch(function(errorObj){
deferredObject.reject(errorObj); // when reject function of deferred object is called error callback is controller will be called with the error object you pass here
});
}
fileReader.readAsDataURL(file);
return deferredObject.promise; // return promise object which will be resolve or reject and accordingly success callback and error callback will be called with then and catch respectively
}
});
Link to AngularJs Reference.
There are other different ways too to create and return promise you can see it in reference.
One other way to create and return promise as given in reference is using $q object as function and passing callback directly as shown below:
// for the purpose of this example let's assume that variables `$q` and `okToGreet`
// are available in the current lexical scope (they could have been injected or passed in).
// for the purpose of this example let's assume that variables `$q` and `okToGreet`
// are available in the current lexical scope (they could have been injected or passed in).
function asyncGreet(name) {
// perform some asynchronous operation, resolve or reject the promise when appropriate.
return $q(function(resolve, reject) {
setTimeout(function() {
if (okToGreet(name)) {
resolve('Hello, ' + name + '!');
} else {
reject('Greeting ' + name + ' is not allowed.');
}
}, 1000);
});
}
var promise = asyncGreet('Robin Hood');
promise.then(function(greeting) {
alert('Success: ' + greeting);
}, function(reason) {
alert('Failed: ' + reason);
});

Make a blocking call to a function in Node.js required in this case?

I am starting to learn node.js. I am stuck with a problem here. I am calling a weather service which returns a JSON(url below).
http://api.wunderground.com/api/Your_key/conditions/q/CA/San_Francisco.json
I want to display the results from the api on an HTML page.
I have written the following code(getInfo module) to retrieve and return the JSON.
var fbResponse;
module.exports = function (url) {
var http=require('http');
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
fbResponse = JSON.parse(body);
console.log("Got response: ", fbResponse.response);
});
}).on('error', function(e) {
console.log("Got error: ", e);
});
return fbResponse;
};
So to use this module I created a test.js file as follows:
var relay = require('./getInfo');
var url = 'http://api.wunderground.com/api/Your_key/conditions/q/CA/San_Francisco.json';
var response;
var x=relay(url);
console.log (x);
Output is as follows:
undefined // console.log(x) from test.js
Got response: { version: '0.1',
termsofService: 'http://www.wunderground.com/weather/api/d/terms.html',
features: { conditions: 1 } }
The console output in the test code runs first with no data in it. The HTTP get competes later and displays the actual output I need.
How can I modify the test code to make a blocking call such that var x in the test code actually have the JSON output instead of undefined?
Can I achieve the desired result without a blocking call to the getInfo module?
As you know, node is asynchronous, so the callback of http.get and res.on('end', .. will fire after relay function is executed and it is returned. So normally you can't return the result from it.
You have a couple of choices:
Pass a callback to relay and use that:
module.exports = function (url, cb) {
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
cb(null, JSON.parse(body));
});
}).on('error', cb);
};
Then use it like this:
var relay = require('./getInfo');
relay(url, function (err, x) {
if (err) {
console.error('oohh i got a error: ', err)
}
console.log('oohh i got a response: ', x)
});
Use promises. This is almost same as passing callbacks. A little less lightweight, but when combining different asynchronous operations, you will understand how awesome they are. For just one asynchronous call there might not be any difference. Here I use q. You can also use bluebird which is way more performant but lacks some of the sugar of q. You can read this article to understand why promises are cleaner than callbacks in some cases.
module.exports = function (url) {
var deferred = Q.defer();
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
deferred.resolve(JSON.parse(body));
});
}).on('error', function(e) {
deferred.reject(e);
});
return deferred.promise;
};
var relay = require('./getInfo');
relay(url).then(function responseHandler(x) {
console.log('my awesome response')
}, function errorHandler(err) {
console.error('got an error', err);
});
Use generators. It is part of Ecmascript 6 specification it only exists in node v0.11.x and later. But it would be almost what you want.
With that past promise example we can do this:
Q.async(function *() {
var relay = require('./getInfo');
var x = yield relay(url);
console.log('my awesome response', x)
});
This is almost what you want. You can also achieve it using the callback solution with co library:
co(function *() {
var relay = require('./getInfo');
var x = yield relay.bind(null, url);
console.log('my awesome response', x);
});
You can also use node-fibers in above example which is almost a similar tool like generators.
If you want to use bleeding edge Javascript, you can use Async/Await instead of generators with promises.
You need to pass in a callback instead:
var http = require('http');
module.exports = function(url, cb) {
http.get(url, function(res) {
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
var resp, err;
try {
resp = JSON.parse(body);
} catch (ex) {
err = ex;
}
cb(err, resp);
});
}).on('error', function(e) {
console.log("Got error: ", e);
cb(e);
});
};
Then use it like:
var relay = require('./getInfo');
var url = 'http://api.wunderground.com/api/Your_key/conditions/q/CA/San_Francisco.json';
var response;
relay(url, function(err, resp) {
if (err) throw err; // TODO: handle better
console.dir(resp);
});
you can take a callback in your module function to return the result.
module.exports = function (url, onsuccess) {
...
res.on('end', function() {
fbResponse = JSON.parse(body);
if(onsuccess){
onsuccess(null, fbResponse);
}
Then in your caller code:
relay(url, function(err, result){
console.log(result);
});
Another option is to use httpsync module which provides synchronous apis for the same functionality that 'http' module provides. But in node js programming, you should always avoid synchronous calls.

Web scraping and promise with node

I am using cheerio and node to do web scraping. I thought it would be good idea to use promise for making it easier to deal with the asynchronous code. So, tried to chain the promises but could not make it working. I am pasting my code over here such that somebody could help me figure out what exactly I have been doing wrong.
http.createServer(function(req, res){
res.writeHead(200, {"Content-Type": "application/json"})
loadPage().then(parseLoadedData);
}).listen(3000, function(error){
console.log(error);
});
function fetchMainPage(){
var deferred = q.defer();
http.get('http://www.google.com?q=node', function(response){
var responseString = '';
response.on('data', function(data){
responseString += data.toString('utf8');
});
response.on('error', function(error){
deferred.reject(error);
});
response.on('end', function(){
deferred.resolve(responseString);
});
});
return deferred.promise;
}
function parseMainContent(responseString){
var deferred = q.defer();
var $ = cheerio.load(responseString);
var rightCol = $('#right-col');
var children = rightCol.children();
var keys = Object.keys(children);
var results = [];
keys.forEach(function(key){
var div = children[key];
div.children.forEach(function(aChild){
if(aChild.name == 'h3' && aChild.children[0].data == "Some title"){
lis = aChild.next.children;
var results = lis.map(function(li){
var anchor = $(li).find('a');
if(anchor != undefined && anchor.attr('href') != undefined)
return [anchor.text(), anchor.attr('href')]
});
results = results.filter(function(result){
return result != undefined;
});
deferred.resolve(results);
}
});
});
return deferred.promise;
}
var loadPage = function(){
return fetchMainPage().then(function(data){
return data;
})
},
parseLoadedData = function(data){
return parseMainContent(data).then(function(results){
console.log(results);
});
}
The problem here is I can't get my parseLoadedData being called. The response is fetched from the server but the second chaining does not seem to be working. I would like to thank you all in advance for helping me out.
Note: The url I am using is different and so the parsing function deal with that specific url only.
You don't really need the loadPage function since fetchMainPage already returns a promise so this should work:
var loadPage = function(){
return fetchMainPage();
}
To chain promises every then callback should return another promise and you were returning the data.
Eg.:
var loadPage = function(){
var deferred = q.defer();
fetchMainPage().then(function(data){
return someOtherPromise(data);
}).then(function(someOtherData) {
return myThirdPromise(someOtherData);
}).then(function(myThirdData) {
return deferred.resolve(myThirdData);
});
}
// IS THE SAME AS
var loadPage2 = function(){
return fetchMainPage().then(function(data){
return someOtherPromise(data);
}).then(function(someOtherData) {
return myThirdPromise(someOtherData);
});
}

Categories