Return paginated output recursively with Fetch API - javascript

Summary
I'd like to collate paginated output into an array using JavaScript's Fetch API recursively. Having started out with promises, I thought an async/await function would be more suitable.
Attempt
Here's my approach:
global.fetch = require("node-fetch");
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
// If another page exists, merge it into the array
// Else return the complete array of paginated output
if (next_page) {
data = data.concat(fetchRequest(next_page));
} else {
console.log(data);
return data;
}
} catch (err) {
return console.error(err);
}
}
// Live demo endpoint to experiment with
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9");
For this demo, it should result in 2 requests which yield a single array of 20 objects. Although the data is returned, I can't fathom how to collate it together into an array. Any guidance would be really appreciated. Thanks for your time.
Solution #1
Thanks to #ankit-gupta:
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page;
if (/<([^>]+)>; rel="next"/g.test(response.headers.get("link"))) {
next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
}
// If another page exists, merge its output into the array recursively
if (next_page) {
data = data.concat(await fetchRequest(next_page));
}
return data;
} catch (err) {
return console.error(err);
}
}
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9").then(data =>
console.log(data)
);
For each page, subsequent calls are made recursively and concatenated together into one array. Would it be possible to chain these calls in parallel, using Promises.all, similar to this answer?
On a side note, any ideas why StackOverflow Snippets fails on the second Fetch?

You need to wrap next_page in a condition, otherwise it will lead to type error on the last call (Since /<([^>]+)>; rel="next"/g.exec(response.headers.get("link")) will be null)
Before concating data, you need the promise to get resolved.
Making some minor changes to your code can result in the correct output:
global.fetch = require("node-fetch");
async function fetchRequest(url) {
try {
// Fetch request and parse as JSON
const response = await fetch(url);
let data = await response.json();
// Extract the url of the response's "next" relational Link header
let next_page;
if(/<([^>]+)>; rel="next"/g.exec(response.headers.get("link")))
next_page = /<([^>]+)>; rel="next"/g.exec(response.headers.get("link"))[1];
// If another page exists, merge it into the array
// Else return the complete array of paginated output
if (next_page) {
let temp_data = await fetchRequest(next_page);
data = data.concat(temp_data);
}
return data;
} catch (err) {
return console.error(err);
}
}
// Live, demo endpoint to experiment
fetchRequest("https://jsonplaceholder.cypress.io/posts?_page=9").then(data => {
console.log(data);
});

Related

How to retrieve object from JSON in nodejs?

Having this code:
const fs = require('fs')
const file = 'books.json';
class Book{
constructor(code) {
this._code = code;
}
get code() {
return this._code;
}
set code(value) {
this._code = value;
}
}
async function writeBooks(){
const data = JSON.stringify([new Book('c1'), new Book('c2')]);
await fs.promises.writeFile(file, data, 'utf8');
}
async function getBook(code){
try{
const data = await fs.promises.readFile(file);
const array = JSON.parse(data);
return array.find(b => b.code === code);
} catch (err){
console.log(err)
}
}
writeBooks();
getBook('c1').then(b => console.log(b));
I am getting undefined (instead of the expecting book object).
How to get the object (the above problem)
If async function always returns promise, how can I then return object for the client, instead of him having to call then() from the getBook(code)?
do I need to await for the fs.promises.writeFile()? as I am doing in writeBooks()? As fas as I understand the async/await now, is that the return value from await function is the data or error. But since the writeFile() does not returns anything, or error at most (as opposed to readFile()) why would I want to await for no data?
Actually the root of problem is not about async/awaits or promises. The problem is trying to write an array to a json file. If you write your json data like the code snippet below (as a key-value pair), your problem is solved.
{"1": [new Book('c1').code, new Book('c2').code]} //as a key-value pair
const fs = require('fs')
const file = 'books.json';
class Book{
constructor(code) {
this._code = code;
}
get code() {
return this._code;
}
set code(value) {
this._code = value;
}
}
async function writeBooks(){
const data = JSON.stringify({"1": [new Book('c1').code, new Book('c2').code]});
await fs.promises.writeFile(file, data, 'utf8');
}
async function getBook(code){
try{
const data = await fs.promises.readFile(file);
const dataOnJsonFile = JSON.parse(data);
return dataOnJsonFile["1"];
} catch (err){
console.log(err)
}
}
writeBooks();
getBook('c1').then(b => console.log(b));
The above problem is that the Books returned from JSON.parse have only data, not methods, and thus I cannot get the code via get code(){}, but only as public parameter of class Book as book._code, which however breaks encapsulation (convetion is that _[propery] is private, and there should be appropriate getters/setters). So I made the properties public (and broke encapsulation), because I still don't know, how to assign methods to object created from JSON.
No, the result of async is always Promise. You cannot unwrap it inside async, the client will always have to unwrap it. (so await fs.promises.WriteFile() will unwrap it, but then immediately wrap it back, before async function returns.
as explained above.

Handle dynamic number of promises in Node Js

I'm fetching product's data from a bunch of URLs. The result of these promises (along with product details) may contain a key called nextPage, which has a URL as value. I need to fetch data from that URL too. How can I wait for all the original set of promises + dynamic promises attached based on nextPage key ?
Currently I'm trying the following:
const allUrls = ['url1', 'url2', 'url3'];
const promiseArray = [];
const getData = (url) => {
const p = axios.get(url).then((data) => {
if (data.nextPage) {
// Push this into promises so that the data is not lost
promiseArray.push(Promise.resolve(data));
// There are more pages to fetch data from
return getData(data.nextPage);
}
// No further pages, so return the result
return data;
});
return p;
}
allUrls.forEach((url) => {
const p = getData(url);
promiseArray.push(p);
});
Promise.all(promiseArray).then((data) => {
// Error: data only has the value of allUrls, not dynamically added data
})
As I have put a comment, when the Promise.all resolves, I only get the data of original URLs put in allUrls, not the dynamically added promises.
The problem is that by the time Promise.all is executed, promiseArray will only contain the promises for the product's data, not the details. You'd need to await each product data promise, and then push the resulting promise to the array (note that I'm using async/await as it allows for cleaner code):
for (const url of allUrls) {
const p = await getData(url);
promiseArray.push(p);
}
const result = await Promise.all(promiseArray);
console.log(result);

Embedding multiple videos using oEmbed and Javascript

I'm trying to embed multiple videos to a web page using Vimeo's oEmbed. The idea is to simply enter the url in the CMS which will generate a div for each item containing the code below.
This javascript is doing what I want but only works with the first item. When I check the console there's only one response which contains the JSON metadata for the first item/video.
Probably this is not the best method but is getting the job done, all I need is to make it work for multiple items. Any ideas how can I do that?
Thank you
<div class="vimeo-video" id="[[+ID]]-video"></div>
<div class="vimeo-info" id="[[+ID]]-info"></div>
<script>
const getJSON = async url => {
try {
const response = await fetch(url);
if (!response.ok) // check if response worked (no 404 errors etc...)
throw new Error(response.statusText);
const data = await response.json(); // get JSON from the response
return data; // returns a promise, which resolves to this data value
} catch (error) {
return error;
}
}
console.log("Fetching data...");
getJSON("https://vimeo.com/api/oembed.json?url=[[+myVideoURL]]").then(data => {
document.getElementById("[[+ID]]-video").innerHTML = data.html;
document.getElementById("[[+ID]]-info").innerHTML = '<h2>' + data.title + '</h2>' + data.description;
console.log(data);
}).catch(error => {
console.error(error);
});
</script>
In case somebody with basic javascript skills like me goes through something similar. The problem was a rookie's mistake, I had to use var instead of const.
The reason is because var variables can be updated and re-declared but const variables can neither be updated nor re-declared. So here's the working code:
var getJSON = async (url) => {
try {
var response = await fetch(url);
if (!response.ok)
// check if response worked (no 404 errors etc...)
throw new Error(response.statusText);
var data = await response.json(); // get JSON from the response
return data; // returns a promise, which resolves to this data value
} catch (error) {
return error;
}
};

Recursion with an API, using Vanilla JS

I'm playing with the Rick and Morty API and I want to get all of the universe's characters
into an array so I don't have to make more API calls to work the rest of my code.
The endpoint https://rickandmortyapi.com/api/character/ returns the results in pages, so
I have to use recursion to get all the data in one API call.
I can get it to spit out results into HTML but I can't seem to get a complete array of JSON objects.
I'm using some ideas from
Axios recursion for paginating an api with a cursor
I translated the concept for my problem, and I have it posted on my Codepen
This is the code:
async function populatePeople(info, universePeople){ // Retrieve the data from the API
let allPeople = []
let check = ''
try {
return await axios.get(info)
.then((res)=>{
// here the current page results is in res.data.results
for (let i=0; i < res.data.results.length; i++){
item.textContent = JSON.stringify(res.data.results[i])
allPeople.push(res.data.results[i])
}
if (res.data.info.next){
check = res.data.info.next
return allPeople.push(populatePeople(res.data.info.next, allPeople))
}
})
} catch (error) {
console.log(`Error: ${error}`)
} finally {
return allPeople
}
}
populatePeople(allCharacters)
.then(data => console.log(`Final data length: ${data.length}`))
Some sharp eyes and brains would be helpful.
It's probably something really simple and I'm just missing it.
The following line has problems:
return allPeople.push(populatePeople(res.data.info.next, allPeople))
Here you push a promise object into allPeople, and as .push() returns a number, you are returning a number, not allPeople.
Using a for loop to push individual items from one array to another is really a verbose way of copying an array. The loop is only needed for the HTML part.
Also, you are mixing .then() with await, which is making things complex. Just use await only. When using await, there is no need for recursion any more. Just replace the if with a loop:
while (info) {
....
info = res.data.info.next;
}
You never assign anything to universePeople. You can drop this parameter.
Instead of the plain for loop, you can use the for...of syntax.
As from res you only use the data property, use a variable for that property only.
So taking all that together, you get this:
async function populatePeople(info) {
let allPeople = [];
try {
while (info) {
let {data} = await axios.get(info);
for (let content of data.results) {
const item = document.createElement('li');
item.textContent = JSON.stringify(content);
denizens.append(item);
}
allPeople.push(...data.results);
info = data.info.next;
}
} catch (error) {
console.log(`Error: ${error}`)
} finally {
section.append(denizens);
return allPeople;
}
}
Here is working example for recursive function
async function getAllCharectersRecursively(URL,results){
try{
const {data} = await axios.get(URL);
// concat current page results
results =results.concat(data.results)
if(data.info.next){
// if there is next page call recursively
return await getAllCharectersRecursively(data.info.next,results)
}
else{
// at last page there is no next page so return collected results
return results
}
}
catch(e){
console.log(e)
}
}
async function main(){
let results = await getAllCharectersRecursively("https://rickandmortyapi.com/api/character/",[])
console.log(results.length)
}
main()
I hesitate to offer another answer because Trincot's analysis and answer is spot-on.
But I think a recursive answer here can be quite elegant. And as the question was tagged with "recursion", it seems worth presenting.
const populatePeople = async (url) => {
const {info: {next}, results} = await axios .get (url)
return [...results, ...(next ? await populatePeople (next) : [])]
}
populatePeople ('https://rickandmortyapi.com/api/character/')
// or wrap in an `async` main, or wait for global async...
.then (people => console .log (people .map (p => p .name)))
.catch (console .warn)
.as-console-wrapper {max-height: 100% !important; top: 0}
<script>/* dummy */ const axios = {get: (url) => fetch (url) .then (r => r .json ())} </script>
This is only concerned with fetching the data. Adding it to your DOM should be a separate step, and it shouldn't be difficult.
Update: Explanation
A comment indicated that this is hard to parse. There are two things that I imagine might be tricky here:
First is the object destructuring in {info: {next}, results} = <...>. This is just a nice way to avoid using intermediate variables to calculate the ones we actually want to use.
The second is the spread syntax in return [...results, ...<more>]. This is a simpler way to build an array than using .concat or .push. (There's a similar feature for objects.)
Here's another version doing the same thing, but with some intermediate variables and an array concatenation instead. It does the same thing:
const populatePeople = async (url) => {
const response = await axios .get (url)
const next = response .info && response .info .next
const results = response .results || []
const subsequents = next ? await populatePeople (next) : []
return results .concat (subsequents)
}
I prefer the original version. But perhaps you would find this one more clear.

Return each response from a loop

I have an function using axios where I am deleting multiple records based on the number of ids returned for a specific user.
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
for (const rId of recordIds) {
const response = await axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}});
}
return response;
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
This isn't working, and I'm unsure why. I would like to see the response for each axios.delete call, but I'm not sure how to get that. Currently its returning as response undefined.
Why your code didn't work:
const response is declared inside the loop scope, and is not accessible out of this closure.
Even if it was defined before the loop (using let const), and assigned inside the loop, you would still be able to return only the last response.
You can push each response to an array (responses), and return the array:
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
const responses = [];
for (const rId of recordIds) {
const response = await axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}});
responses.push(response);
}
return responses;
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
However, in this case multiple parallel requests would be better, since you don't actually need to delete one by one. I would use Array.map() to iterate the recordIds array, and return a promise for each one, then wait for all responses using Promise.all(), which would also return an array of responses:
async function DeleteAllRecords (emailAddress) {
try {
var accessToken = await setup.getAccessToken(emailAddress);
var userId = await user.getUserId(emailAddress);
var recordIds = await getAllRecordID(emailAddress);
console.log(`Deleting all records for `+emailAddress+``);
return Promise.all(recordIds.map(rId => axios.delete(`${process.env.API_URL}/`+userId+`/records/`+recordIds+``, {'headers': {Authorization: 'Bearer '+accessToken+''}})));
}
catch(e) {
console.error(``+emailAddress+` produced the Record Delete Error = ` + e);
}
}
This isn't working, and I'm unsure why. I would like to see the response for each axios.delete call, but I'm not sure how to get that. Currently its returning as response undefined.
There are several issues in your code:
const response declared within loop block, but you try to return it after the loop. const and let are strictly block scoped, thus by referring to response after the loop block basically tells JS to return an undefined variable.
You write several times to response. If the issue above wasnt in place, it would still not work correctly, since you only would end up with the response of the last loop run. Here you'd have to collect the response from all runs, e.g. into a list.

Categories