Best way to make a http request and then potentially another one - javascript

Title isn't so clear but to elaborate, I need to make a HTTP request to an API endpoint, and so far I'm using a function that looks something like this:
function getPostsFromAPI(argOne, argTwo) {
const apiUrl = `https://www.exampleapi.com/v1/userposts`
apiGet(`${apiUrl}?name=argOne&something=argTwo`).then(userPosts => {
// do stuff with userPosts
return userPostData
}).catch(handleError)
}
However, the API response can include the following:
{
//...
"has_more": true,
"next_offset": 10
}
In which case, I'd need to send the API call a second time, this time with the &offset=10 argument.
The promise would need to continue making API calls until has_more: true is no longer present. My initial thought would be to just re-run getPostsFromAPI() based on an if statement from inside itself, but I can't figure out how to make that work cleanly inside a promise. Ultimately, the promise should keep making requests until the API says that it's ran out of data to give (I'll implement my own limit).
What would be the best way to achieve this?

The algorithm to achieve this is much more obvious if you use async/await. You can just create an empty array, and gradually append to it in a loop until the server indicates there are no more results.
async function getPostsFromAPI(argOne, argTwo) {
const apiUrl = `https://www.exampleapi.com/v1/userposts`
let results = [];
let offset = 0;
while (true) {
let response = await apiGet(`${apiUrl}?name=argOne&something=argTwo&offset=${offset}`);
results = results.concat(response.records);
if (response.has_more) {
offset = response.next_offset;
} else {
return results;
}
}
}
If you can't use async/await and have to stick to promises, you can use recursion to have a method invoke itself each time a response indicates there are more records:
function getPostsFromAPI(argOne, argTwo) {
return new Promise((resolve, reject) => {
const apiUrl = `https://www.exampleapi.com/v1/userposts`;
let results = [];
let offset = 0;
const getNextPage = (offset = 0) => {
apiGet(`${apiUrl}?name=argOne&something=argTwo&offset=${offset}`).then((response) => {
results = results.concat(response.records);
if (response.has_more) {
getNextPage(response.next_offset);
} else {
resolve(results);
}
}).catch(reject);
}
getNextPage(0);
});
}
Note that as a matter of general good practice you should never construct a query string through concatenation or template strings. You should use URLSearchParams.toString() to ensure your query string is properly encoded. You can do so indirectly by creating a new URL:
const url = new URL(`https://www.exampleapi.com/v1/userposts`)
url.searchParams.append("argOne", argOne);
url.searchParams.append("argTwo", argTwo);
url.searchParams.append("offset", offset);
url.toString()

This is a great use case for an async generator.
Would look something like the following
async function* getPostsFromAPI(arg1, arg2) {
const apiUrl = `https://www.exampleapi.com/v1/userposts`
let response = { next_offset: 0 };
do {
response = await apiGet(`${apiUrl}?name=${arg1}&something=${arg2}&offset=${response.next_offset}`)
response.items.forEach((item) => {
yield item
})
} while (response.has_more)
}

Related

Am I using Promise.all correctly?

I am using it many places in my code and it works. but at one place it didn't give any error, also did not give me the desired result. and when I show my code to the support forum they suggest that "You are using the JS object/class “Promise” incorrectly."
Can Anyone guide me on what's wrong with my code
here is my code sample:
let charity = {};
await Promise.all(
charity = charityData.map(function( data ) {
let address = data.zipCode
let url = "https://maps.googleapis.com/maps/api/geocode/json?&address="+`'${address}'`+"&key=***Google geocoding Key***"; //client's Key
let urlResponse = Backendless.Request.get(url)
// let latitude = urlResponse.results[0].geometry.location.lat;
// let longitude = urlResponse.results[0].geometry.location.lng;
//let updateCharitiesData = {'objectId': data.objectId, 'latitude':latitude, 'longitude':longitude};
return urlResponse;
})
);
return charity;
Almost. Assuming Backendless.Request.[method] returns a promise it would be more correct to do something along the lines of:
async function getCharityData() {
const charity = await Promise.all(charityData.map( async function(data) {
const address = data.zipCode;
const url =
`https://maps.googleapis.com/maps/api/geocode/json?&address=${address}&key=***Google geocoding Key***`; //client's Key
const urlResponse = await Backendless.Request.get(url);
return urlResponse;
}));
return charity
}
Promise.all requires an array as its argument to work correctly; passing an Array.map here and assigning the returned value to charity both ensures your Promise.all runs as expected and the returned array is an array of resolved promises.
I would do it like this:
function getCharityData() {
// `charity` is an array of Promises that will each resolve to
// a response.
const charity = charityData.map((data) => {
let address = data.zipCode;
let url = 'https://maps.googleapis.com/maps/api/geocode'
let urlResponse = Backendless.Request.get(url);
return urlResponse;
});
return Promise.all(charity);
}
try {
const charityData = await getCharityData();
} catch (e) {
console.error(e);
}
This way, charityData will be an array of fetched responses.
In your code, the result of Promise.all() is never assigned to charity before it's returned, and that is the value you want.
If you have access to Async/Await I'd simply do the following:
function getCharityData(charityData) {
let results = [];
for (let i = 0; i < charityData.length; i++) {
let url = `https://maps.googleapis.com/maps/api/geocode/json?&address=${charityData[i].zipCode}&key=***Google geocoding Key***`;
try {
let result = await Backendless.Request.get(url);
results.push(result);
} catch (err) {
console.log("Oh dear!");
}
}
return results;
}
For your use case, there's no need to use any Promise libraries when you have Async/Await, good old fashioned for loops and await (I'd personally prefer to do this sort of call sequentially instead of in parallel like Promise.all implores when I'm querying external APIs. This also ensures we don't fail fast like Promise.all does.).

Recursion with an API, using Vanilla JS

I'm playing with the Rick and Morty API and I want to get all of the universe's characters
into an array so I don't have to make more API calls to work the rest of my code.
The endpoint https://rickandmortyapi.com/api/character/ returns the results in pages, so
I have to use recursion to get all the data in one API call.
I can get it to spit out results into HTML but I can't seem to get a complete array of JSON objects.
I'm using some ideas from
Axios recursion for paginating an api with a cursor
I translated the concept for my problem, and I have it posted on my Codepen
This is the code:
async function populatePeople(info, universePeople){ // Retrieve the data from the API
let allPeople = []
let check = ''
try {
return await axios.get(info)
.then((res)=>{
// here the current page results is in res.data.results
for (let i=0; i < res.data.results.length; i++){
item.textContent = JSON.stringify(res.data.results[i])
allPeople.push(res.data.results[i])
}
if (res.data.info.next){
check = res.data.info.next
return allPeople.push(populatePeople(res.data.info.next, allPeople))
}
})
} catch (error) {
console.log(`Error: ${error}`)
} finally {
return allPeople
}
}
populatePeople(allCharacters)
.then(data => console.log(`Final data length: ${data.length}`))
Some sharp eyes and brains would be helpful.
It's probably something really simple and I'm just missing it.
The following line has problems:
return allPeople.push(populatePeople(res.data.info.next, allPeople))
Here you push a promise object into allPeople, and as .push() returns a number, you are returning a number, not allPeople.
Using a for loop to push individual items from one array to another is really a verbose way of copying an array. The loop is only needed for the HTML part.
Also, you are mixing .then() with await, which is making things complex. Just use await only. When using await, there is no need for recursion any more. Just replace the if with a loop:
while (info) {
....
info = res.data.info.next;
}
You never assign anything to universePeople. You can drop this parameter.
Instead of the plain for loop, you can use the for...of syntax.
As from res you only use the data property, use a variable for that property only.
So taking all that together, you get this:
async function populatePeople(info) {
let allPeople = [];
try {
while (info) {
let {data} = await axios.get(info);
for (let content of data.results) {
const item = document.createElement('li');
item.textContent = JSON.stringify(content);
denizens.append(item);
}
allPeople.push(...data.results);
info = data.info.next;
}
} catch (error) {
console.log(`Error: ${error}`)
} finally {
section.append(denizens);
return allPeople;
}
}
Here is working example for recursive function
async function getAllCharectersRecursively(URL,results){
try{
const {data} = await axios.get(URL);
// concat current page results
results =results.concat(data.results)
if(data.info.next){
// if there is next page call recursively
return await getAllCharectersRecursively(data.info.next,results)
}
else{
// at last page there is no next page so return collected results
return results
}
}
catch(e){
console.log(e)
}
}
async function main(){
let results = await getAllCharectersRecursively("https://rickandmortyapi.com/api/character/",[])
console.log(results.length)
}
main()
I hesitate to offer another answer because Trincot's analysis and answer is spot-on.
But I think a recursive answer here can be quite elegant. And as the question was tagged with "recursion", it seems worth presenting.
const populatePeople = async (url) => {
const {info: {next}, results} = await axios .get (url)
return [...results, ...(next ? await populatePeople (next) : [])]
}
populatePeople ('https://rickandmortyapi.com/api/character/')
// or wrap in an `async` main, or wait for global async...
.then (people => console .log (people .map (p => p .name)))
.catch (console .warn)
.as-console-wrapper {max-height: 100% !important; top: 0}
<script>/* dummy */ const axios = {get: (url) => fetch (url) .then (r => r .json ())} </script>
This is only concerned with fetching the data. Adding it to your DOM should be a separate step, and it shouldn't be difficult.
Update: Explanation
A comment indicated that this is hard to parse. There are two things that I imagine might be tricky here:
First is the object destructuring in {info: {next}, results} = <...>. This is just a nice way to avoid using intermediate variables to calculate the ones we actually want to use.
The second is the spread syntax in return [...results, ...<more>]. This is a simpler way to build an array than using .concat or .push. (There's a similar feature for objects.)
Here's another version doing the same thing, but with some intermediate variables and an array concatenation instead. It does the same thing:
const populatePeople = async (url) => {
const response = await axios .get (url)
const next = response .info && response .info .next
const results = response .results || []
const subsequents = next ? await populatePeople (next) : []
return results .concat (subsequents)
}
I prefer the original version. But perhaps you would find this one more clear.

Run HTTP requests in chunks

I want to run 1 thundered http requests in configurable chunks, and set configurable timeout between chunk requests. The request is based on the data provided with some.csv file.
It doesn't work because I am getting a TypeError, but when I remove () after f, it doesn't work either.
I would be very grateful for a little help. Probably the biggest problem is that I don't really understand how exactly promises work, but I tried multiple solutions and I wasn't able to achieve what I want.
The timeout feature will probably give me even more headache so I would appreciate any tips for this too.
Can you please help me to understand why it doesn't work?
Here is the snippet:
const rp = require('request-promise');
const fs = require('fs');
const { chunk } = require('lodash');
const BATCH_SIZE = 2;
const QUERY_PARAMS = ['clientId', 'time', 'changeTime', 'newValue'];
async function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return async () => { return await rp(options) };
}
async function batchRequestRunner(data) {
const promises = [];
for (row of data) {
row = row.split(',');
promises.push(update(row[0], row[1], QUERY_PARAMS.join(',')));
}
const batches = chunk(promises, BATCH_SIZE);
for (let batch of batches) {
try {
Promise.all(
batch.map(async f => { return await f();})
).then((resp) => console.log(resp));
} catch (e) {
console.log(e);
}
}
}
async function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
await batchRequestRunner(requestData);
}
main();
Clarification for the first comment:
I have a csv file which looks like below:
clientId,startTime
123,13:40:00
321,13:50:00
the file size is ~100k rows
the file contains information how to update time for a particular clientId in the database. I don't have an access to the database but I have access to an API which allows to update entries in the database.
I cannot make 100k calls at once, because: my network is limited (I work remotely because of coronavirus), it comsumpts a lot of memory, and API can also be limited and can crash if I will make all the requests at once.
What I want to achieve:
Load csv into memory, convert it to an Array
Handle api requests in chunks, for example take first two rows from the array, make API call based on the first two rows, wait 1000ms, take another two rows, and continue processing until the end of array (csv file)
Well, it seems like this is a somewhat classic case of where you want to process an array of values with some asynchronous operation and to avoid consuming too many resources or overwhelming the target server, you want to have no more than N requests in-flight at the same time. This is a common problem for which there are pre-built solutions for. My goto solution is a small piece of code called mapConcurrent(). It's analagous to array.map(), but it assumes a promise-returning asynchronous callback and you pass it the max number of items that should ever be in-flight at the same time. It then returns to you a promise that resolves to an array of results.
Here's mapConcurrent():
// takes an array of items and a function that returns a promise
// returns a promise that resolves to an array of results
function mapConcurrent(items, maxConcurrent, fn) {
let index = 0;
let inFlightCntr = 0;
let doneCntr = 0;
let results = new Array(items.length);
let stop = false;
return new Promise(function(resolve, reject) {
function runNext() {
let i = index;
++inFlightCntr;
fn(items[index], index++).then(function(val) {
++doneCntr;
--inFlightCntr;
results[i] = val;
run();
}, function(err) {
// set flag so we don't launch any more requests
stop = true;
reject(err);
});
}
function run() {
// launch as many as we're allowed to
while (!stop && inflightCntr < maxConcurrent && index < items.length) {
runNext();
}
// if all are done, then resolve parent promise with results
if (doneCntr === items.length) {
resolve(results);
}
}
run();
});
}
Your code can then be structured to use it like this:
function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return rp(options);
}
function processRow(row) {
let rowData = row.split(",");
return update(rowData[0], rowData[1], rowData[2]);
}
function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
// process this entire array with up to 5 requests "in-flight" at the same time
mapConcurrent(requestData, 5, processRow).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
}
You can obviously adjust the number of concurrent requests to whatever number you want. I set it to 5 here in this example.

Using promise in loop results in Promise failure

I'd like to reuse the same code in a loop. This code contains promises. However, when iterating, this code results in an error.
I've tried using for and while loops. There seems to be no issue when I use the for loop for a single iteration.
Here is a minimal version of my code:
var search_url = /* Some initial URL */
var glued = "";
for(var i = 0; i < 2; i++)
{
const prom = request(search_url)
.then(function success(response /* An array from a XMLHTTPRequest*/) {
if (/* Some condition */)
{
search_url = /* Gets next URL */
glued += processQuery(response[0]);
} else {
console.log("Done.")
}
})
.catch(function failure(err) {
console.error(err.message); // TODO: do something w error
})
}
document.getElementById('api-content').textContent = glued;
I expect the results to append to the variable glued but instead, I get an error: failure Promise.catch (async) (anonymous) after the first iteration of the loop.
Answer:
You can use the Symbol.iterator in accordance with for await to perform asynchronous execution of your promises. This can be packaged up into a constructor, in the example case it's called Serial (because we're going through promises one by one, in order)
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
What is the above?
It's a constructor called Serial.
It takes as an argument an array of Functions that return Promises.
The functions are stored in Serial.promises
It has an empty array stored in Serial.resolved - this will store the resolved promise requests.
It has two methods:
addPromise: Takes a Function that returns a Promise and adds it to Serial.promises
resolve: Asynchronously calls a custom Symbol.iterator. This iterator goes through every single promise, waits for it to be completed, and adds it to Serial.resolved. Once this is completed, it returns a map function that acts on the populated Serial.resolved array. This allows you to simply call resolve and then provide a callback of what to do with the array of responses. A.e. .resolve()((resolved_requests) => //do something with resolved_requests)
Why does it work?
Although many people don't realize this Symbol.iterator is much more powerful than standard for loops. This is for two big reasons.
The first reason, and the one that is applicable in this situation, is because it allows for asynchronous calls that can affect the state of the applied object.
The second reason is that it can be used to provide two different types of data from the same object. A.e. You may have an array that you would like to read the contents of:
let arr = [1,2,3,4];
You can use a for loop or forEach to get the data:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
But if you adjust the iterator:
arr[Symbol.iterator] = function* () {
yield* this.map(v => v+1);
};
You get this:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
for(let v of arr) console.log(v);
// 2, 3, 4, 5
This is useful for many different reasons, including timestamping requests/mapping references, etc. If you'd like to know more please take a look at the ECMAScript Documentation: For in and For Of Statements
Use:
It can be used by calling the constructor with an Array of functions that return Promises. You can also add Function Promises to the Object by using
new Serial([])
.addPromise(() => fetch(url))
It doesn't run the Function Promises until you use the .resolve method.
This means that you can add promises ad hoc if you'd like before you do anything with the asynchronous calls. A.e. These two are the same:
With addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3)]);
promises.addPromise(() => fetch(url4));
promises.resolve().then((responses) => responses)
Without addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3), () => fetch(url4)])
.resolve().then((responses) => responses)
Data:
Since I can't really replicate your data calls, I opted for JSONPlaceholder (a fake online rest api) to show the promise requests in action.
The data looks like this:
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1",
"https://jsonplaceholder.typicode.com/todos/2",
"https://jsonplaceholder.typicode.com/todos/3"]
//since our constructor takes functions that return promises, I map over the URLS:
.map(url => () => fetch(url));
To get the responses we can call the above data using our constructor:
let promises = new Serial(searchURLS)
.resolve()
.then((resolved_array) => console.log(resolved_array));
Our resolved_array gives us an array of XHR Response Objects. You can see that here:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve().then((resolved_array) => console.log(resolved_array));
Getting Results to Screen:
I opted to use a closure function to simply add text to an output HTMLElement.
This is added like this:
HTML:
<output></output>
JS:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
Putting it together:
If we use the output snippet along with our Serial object the final functional code looks like this:
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
What's happening above is this:
we input all our functions that return promises. new Serial(searchURLS)
we tell it to resolve all the requests .resolve()
after it resolves all the requests, we tell it to take the requests and map the array .then(resolved => resolved.map
the responses we turn to objects by using .json method. This is necessary for JSON, but may not be necessary for you
after this is done, we use .then(obj => to tell it to do something with each computed response
we output the title to the screen using output(obj.title)
Result:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
<output></output>
Why go this route?
It's reusable, functional, and if you import the Serial Constructor you can keep your code slim and comprehensible. If this is a cornerstone of your code, it'll be easy to maintain and use.
Using it with your code:
I will add how to specifically use this with your code to fully answer your question and so that you may understand further.
NOTE glued will be populated with the requested data, but it's unnecessary. I left it in because you may have wanted it stored for a reason outside the scope of your question and I don't want to make assumptions.
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
Using it with your code - Working Example:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
<div id="api-content"></div>
Final Note
It's likely that we will be seeing a prototypal change to the Promise object in the future that allows for easy serialization of Promises. Currently (7/15/19) there is a TC39 Proposal that does add a lot of functionality to the Promise object but it hasn't been fully vetted yet, and as with many ideas trapped within the Proposal stage, it's almost impossible to tell when they will be implemented into Browsers, or even if the idea will stagnate and fall off the radar.
Until then workarounds like this are necessary and useful( the reason why I even went through the motions of constructing this Serializer object was for a transpiler I wrote in Node, but it's been very helpful beyond that! ) but do keep an eye out for any changes because you never know!
Hope this helps! Happy Coding!
Your best bet is probably going to be building up that glued variable with recursion.
Here's an example using recursion with a callback function:
var glued = "";
requestRecursively(/* Some initial URL string */, function() {
document.getElementById('api-content').textContent = glued;
});
function requestRecursively(url, cb) {
request(url).then(function (response) {
if (/* Some condition */) {
glued += processQuery(response[0]);
var next = /* Gets next URL string */;
if (next) {
// There's another URL. Make another request.
requestRecursively(next, cb);
} else {
// We're done. Invoke the callback;
cb();
}
} else {
console.log("Done.");
}
}).catch(function (err) {
console.error(err.message);
});
}

How to use promises to wait for async API calls

I am creating an API that when GET, a series of calls to the News API are made, news article titles are extracted into a giant string, and that string is processed into an object to be delivered to a wordcloud on the front-end. So far, I've been able to use underscore's _.after and request-promise to make my app wait till all API calls have completed before calling processWordBank() which takes the giant string and cleans it up into an object. However, once processWordBank() is called, I don't understand where the flow of the program is. Ideally, processWordBank() returns obj to cloudObj in the router, so that the obj can be passed to res.json() and spit out as the response. I believe my use of _.after has put me in a weird situation, but it's the only way I've been able to get async calls to finish before proceeding to next desired action. Any suggestions?
(I've tried to leave out all unnecessary code but let me know if this is insufficient)
// includes...
var sourceString = ""
// router
export default ({ config }) => {
let news = Router()
news.get('/', function(req, res){
var cloudObj = getSources()
res.json({ cloudObj })
})
return news
}
// create list of words (sourceString) by pulling news data from various sources
function getSources() {
return getNewsApi()
}
// NEWS API
// GET top 10 news article titles from News API (news sources are determined by the values of newsApiSource array)
function getNewsApi() {
var finished = _.after(newsApiSource.length, processWordBank)
for(var i = 0; i < newsApiSource.length; i++) {
let options = {
uri: 'https://newsapi.org/v1/articles?source=' + newsApiSource[i] + '&sortBy=' + rank + '&apiKey=' + apiKey,
json: true
}
rp(options)
.then(function (res) {
let articles = res.articles // grab article objects from the response
let articleTitles = " " + _.pluck(articles, 'title') // extract title of each news article
sourceString += " " + articleTitles // add all titles to the word bank
finished() // this async task has finished
})
.catch(function (err) {
console.log(err)
})
}
}
// analyse word bank for patterns/trends
function processWordBank(){
var sourceArray = refineSource(sourceString)
sourceArray = combineCommon(sourceArray)
sourceArray = getWordFreq(sourceArray)
var obj = sortToObject(sourceArray[0], sourceArray[1])
console.log(obj)
return obj
}
A big issue in your asynchronous flow is that you use a shared variable sourceString to handle the results. When you have multiple calls to getNewsApi() your result is not predictable and will not always be the same, because there is no predefined order in which the asynchronous calls are executed. Not only that, but you never reset it, so all subsequent calls will also include the results of the previous calls. Avoid modifying shared variables in asynchronous calls and instead use the results directly.
I've been able to use underscore's _.after and request-promise to make my app wait till all API calls have completed before calling processWordBank()
Although it would possible to use _.after, this can be done very nicely with promises, and since you're already using promises for your requests, it's just a matter of collecting the results from them. So because you want to wait until all API calls are completed you can use Promise.all which returns a promise that resolves with an array of the values of all the promises, once all of them are fulfilled. Let's have a look at a very simple example to see how Promise.all works:
// Promise.resolve() creates a promise that is fulfilled with the given value
const p1 = Promise.resolve('a promise')
// A promise that completes after 1 second
const p2 = new Promise(resolve => setTimeout(() => resolve('after 1 second'), 1000))
const p3 = Promise.resolve('hello').then(s => s + ' world')
const promises = [p1, p2, p3]
console.log('Waiting for all promises')
Promise.all(promises).then(results => console.log('All promises finished', results))
console.log('Promise.all does not block execution')
Now we can modify getNewsApi() to use Promise.all. The array of promises that is given to Promise.all are all the API request you're doing in your loop. This will be created with Array.protoype.map. And also instead of creating a string out of the array returned from _.pluck, we can just use the array directly, so you don't need to parse the string back to an array at the end.
function getNewsApi() {
// Each element is a request promise
const apiCalls = newsApiSource.map(function (source) {
let options = {
uri: 'https://newsapi.org/v1/articles?source=' + source + '&sortBy=' + rank + '&apiKey=' + apiKey,
json: true
}
return rp(options)
.then(function (res) {
let articles = res.articles
let articleTitles = _.pluck(articles, 'title')
// The promise is fulfilled with the articleTitles
return articleTitles
})
.catch(function (err) {
console.log(err)
})
})
// Return the promise that is fulfilled with all request values
return Promise.all(apiCalls)
}
Then we need to use the values in the router. We know that the promise returned from getNewsApi() fulfils with an array of all the requests, which by themselves return an array of articles. That is a 2d array, but presumably you would want a 1d array with all the articles for your processWordBank() function, so we can flatten it first.
export default ({ config }) => {
let news = Router()
new.get('/', (req, res) => {
const cloudObj = getSources()
cloudObj.then(function (apiResponses) {
// Flatten the array
// From: [['source1article1', 'source1article2'], ['source2article1'], ...]
// To: ['source1article1', 'source1article2', 'source2article1', ...]
const articles = [].concat.apply([], apiResponses)
// Pass the articles as parameter
const processedArticles = processWordBank(articles)
// Respond with the processed object
res.json({ processedArticles })
})
})
}
And finally processWordBank() needs to be changed to use an input parameter instead of using the shared variable. refineSource is no longer needed, because you're already passing an array (unless you do some other modifications in it).
function processWordBank(articles) {
let sourceArray = combineCommon(articles)
sourceArray = getWordFreq(sourceArray)
var obj = sortToObject(sourceArray[0], sourceArray[1])
console.log(obj)
return obj
}
As a bonus the router and getNewsApi() can be cleaned up with some ES6 features (without the comments from the snippets above):
export default ({ config }) => {
const news = Router()
new.get('/', (req, res) => {
getSources().then(apiResponses => {
const articles = [].concat(...apiResponses)
const processedArticles = processWordBank(articles)
res.json({ processedArticles })
})
})
}
function getNewsApi() {
const apiCalls = newsApiSource.map(source => {
const options = {
uri: `https://newsapi.org/v1/articles?source=${source}&sortBy=${rank}&apiKey=${apiKey}`,
json: true
}
return rp(options)
.then(res => _.pluck(res.articles, 'title'))
.catch(err => console.log(err))
})
return Promise.all(apiCalls)
}

Categories