I'm working on a small Javascript application that uses an API from pokeapi.co. Basically, it is supposed to fetch a few datas from the API and then display them on an HTML page. Here is the main part of the code :
const searchInput= document.querySelector(".recherche-poke input");
let allPokemon= [];
let tableauFin= [];
const listePoke= document.querySelector('.liste-poke');
function fetchPokemonBase(){
fetch("https://pokeapi.co/api/v2/pokemon?limit=75")
.then(reponse => reponse.json())
.then((allPoke) =>{
allPoke.results.forEach((pokemon) =>{
fetchPokemonComplet(pokemon);
})
})
}
fetchPokemonBase();
function fetchPokemonComplet(pokemon){
let objPokemonFull = {};
let url = pokemon.url;
let nameP = pokemon.name;
fetch(url)
.then(reponse => reponse.json())
.then((pokeData) => {
objPokemonFull.pic = pokeData.sprites.front_default;
objPokemonFull.type = pokeData.types[0].type.name;
objPokemonFull.id = pokeData.id;
fetch(`https://pokeapi.co/api/v2/pokemon-species/${nameP}`)
.then(reponse => reponse.json())
.then((pokeData) => {
objPokemonFull.name= pokeData.names[4].name;
allPokemon.push(objPokemonFull);
if(allPokemon.length === 75){
tableauFin= allPokemon.sort((a, b) => {
return a.id - b.id;
}).slice(0, 21);
createCard(tableauFin);
}
})
});
}
function createCard(arr){
for(let i= 0; i< arr.length; i++){
console.log(i + '\n');
const carte= document.createElement("li");
const txtCarte= document.createElement('h5');
txtCarte.innerText= arr[i].name;
const idCarte= document.createElement('p');
idCarte.innerText= `ID# ${arr[i].id}`;
const imgCarte= document.createElement('img');
imgCarte.src= arr[i].pic;
carte.appendChild(imgCarte);
carte.appendChild(txtCarte);
carte.appendChild(idCarte);
listePoke.appendChild(carte);
}
}
Here's what the code does : it gather a list of 75 pokemons from the pokeapi API. Then it fetches the data about each pokemon from the same site and stores each data into one element of the allPokemon array, when the length of this array reaches 75, we begin to create the HTML elements to display the data.
The ambigious part here is :
if(allPokemon.length === 75){
tableauFin= allPokemon.sort((a, b) => {
return a.id - b.id;
}).slice(0, 21);
createCard(tableauFin);
}
This code works but only when none of the requests fail. Otherwise, the length of the array allPokemon never reaches 75 and the rest of the code won't be executed. When I run the code, I run on XHR GET errors and the script stops before displaying the datas and it's (I think) what caused one of the promises to fail. I tried things like if(allPokemon.length === 74) (if I have one error, for example) and the code works just fine but that is surely not the solution.
Is there a way for me to "count" the errors I get from my requests so that I can do something like if(allPokemon.length === 75 - errorsCount) or maybe there is a smarter way to write my code?
Thanks in advance.
I think you can use promise all for this.
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/all
function fetchPokemonBase(){
const promises = []
fetch("https://pokeapi.co/api/v2/pokemon?limit=75")
.then(reponse => reponse.json())
.then((allPoke) =>{
allPoke.results.forEach((pokemon) =>{
promises.push(fetchPokemonComplet(pokemon).catch(error => console.error(error)));
})
})
.then(() => {
Promise.all(promises)
.then(() => {
tableauFin= allPokemon.sort((a, b) => {
return a.id - b.id;
}).slice(0, 21);
createCard(tableauFin);
})
})
}
fetchPokemonBase();
let counter = 0
function fetchPokemonComplet(pokemon){
let objPokemonFull = {};
let url = pokemon.url;
let nameP = pokemon.name;
return fetch(url)
.then(reponse => reponse.json())
.then((pokeData) => {
counter++;
objPokemonFull.pic = pokeData.sprites.front_default;
objPokemonFull.type = pokeData.types[0].type.name;
objPokemonFull.id = pokeData.id;
return fetch(`https://pokeapi.co/api/v2/pokemon-species/${nameP}`)
.then(reponse => reponse.json())
.then((pokeData) => {
objPokemonFull.name= pokeData.names[4].name;
allPokemon.push(objPokemonFull);
})
});
}
So what you do is, instead of executing the fetch in the foreach, we push each fetch to an array of promises. Then we use the promise.all to execute them, and for each promise we catch the errors. So if one fails, the next promise will just continue.
With this code we push every fetch for the individual pokemons to an array:
promises.push(fetchPokemonComplet(pokemon).catch(error => console.error(error)));
Then we have an array of promises, which are the fetches to the server.
With the following code we execute these promises.
Promise.all(promises)
.then(() => {
tableauFin= allPokemon.sort((a, b) => {
return a.id - b.id;
}).slice(0, 21);
createCard(tableauFin);
})
the then on promise.all will be executed when all the promises are done.
And since we catch on every individual promise we don't have to worry about that.
And we don't care about the length and don't have to keep count.
Let me know if it works, or need any help.
From what I see, you will receive a list of between 0 and 75 pokemons from the first API call. Then, you fetch each one, however many it returned, up to 75 entries.
So, I think you want to just make sure your list isn't empty:
if(allPokemon.length > 0)
Related
I'm trying to fetch data using the Spotify API endpoint, this is my code:
//gets around 300+ items from my MongoDB
let artistsArray = []
const artists = await Artist.find({ createdBy: userId })
const sleep = m => new Promise(r => setTimeout(r, m))
const fetchData = async (artist, groupType) => {
try {
const data = await axios.get(
`/artists/${artist.artistSpotifyId}/albums?include_groups=${groupType}&limit=5`,
)
// working with data here, putting it into the artistArray not important
} catch (error) {
if (error.response.status === 429) {
//rate limit error, if received, sleep for suggested time and retry
const retryAfter = parseInt(error.response.headers['retry-after']) * 1000
await sleep(retryAfter)
await fetchData(artist, groupType)
}
}
}
//should go through each artist and get albums for each artist
await Promise.all(
artists.map(async artist => {
await fetchData(artist, 'album')
await fetchData(artist, 'single')
}),
)
When I tested that with few items, it worked fine, however with higher amount of items(mentioned 300+) I started getting rate limit errors 429 from Spotify. They always suggest retry-after period so I tried to implement it in the catch, so there'd be a sleep triggered every time error is catched but I can't seem to figure out why it doesn't work using the .map. When error 429 gets encountered, the loop just continues.
When using for loop, it works just fine, but it's kind of slow and therefore I haven't encountered any rate limit error.
for (let artist of artists) {
await fetchData(artist, 'album')
await fetchData(artist, 'single')
}
Any suggestion why is .map ignoring the sleep function?
Array.map() is not going to wait for the completion of the previous function call before calling the next one, it's just not how it works.
As in this example you can see that all the console statements print at the same time.
const sleep = m => new Promise(r => setTimeout(r, m));
const arr = [
1,2,3,4
];
arr.map(async item => {
await sleep(2000);
console.log(item);
});
They do so after a 2 second delay. But all print at roughly the same time (after the same 2 seconds wait time).
I would recommend using a for...of loop since that will cause each pass through the loop to await the completion of the previous.
async function main(){
const sleep = m => new Promise(r => setTimeout(r, m));
const arr = [
1,2,3,4
];
let newArray = []; //to simulate the behavior of Array.map();
for(const item of arr){
await sleep(2000);
console.log(item);
newArray.push(item); //to simulate the behavior of Array.map();
}
console.log(newArray);
}
main();
The sleep isn't ignored, I don't think (unless the delay ends up being e.g. zero). (You could find that out by e.g. adding suitable console.logs.)
The issue is that by using artists.map() like that you're shooting off 300 requests in a single instant, some of which may succeed, some of which may not, and some may have "overlapping" retry-after hints.
You should probably limit the overall concurrency of your requests using e.g. p-queue or p-limit.
Others have explained that the failing requests are those that are started before the rate limit is hit. A simple fix is to perform the requests in chunks with delays in between.
// thanks to https://stackoverflow.com/a/60779547/294949
const chunk = (array, size) =>
array.reduce((acc, _, i) => {
if (i % size === 0) acc.push(array.slice(i, i + size))
return acc
}, []);
// like the OP's but nothing fancy in the catch
const oneFetch = async (artist, groupType) => {
try {
const data = await axios.get(
`/artists/${artist.artistSpotifyId}/albums
include_groups=${groupType}&limit=5`,
)
// working with data here
} catch (error) {
console.log(error)
}
}
}
// the OP's sleep
const sleep = m => new Promise(r => setTimeout(r, m))
Doing a chunk is just like the original logic with a sleep at the end.
const doAChunk = async (array, groupType, thenDelay) => {
const promises = array.map(a => oneFetch(a, groupType))
const results = await Promise.all(promises)
await sleep(thenDelay);
return results;
}
// the OP function modified. find the right chunk size
// and retry time from api docs (and/or experimentation)
const fetchData = async (artist, groupType, chunkSize, delayBetween) => {
let artistsArray = [];
const artists = await Artist.find({ createdBy: userId })
const chunks = chunk(artists, chunkSize);
const promises = chunks.map(chunk => doAChunk(chunk, 'album', delayBetween);
// notice the flat(), since chunking produces arrays of arrays
const results = await Promise.all(promises).flat();
return results;
}
var https = require("https");
const arr = [];
for (let i = 1; i < 26; i++) {
https.get(
`https://jsonmock.hackerrank.com/api/countries?page=${i}`,
(res) => {
res.on("data", (data) => {
JSON.parse(data).data.map((info, i) => {
let { name } = info;
arr.push(name);
console.log(name);
});
});
}
);
}
console.log(arr);
when I'm just logging JSON.parse(data) I'm getting the required data on my console
but When I'm trying to push it into an array it's not happening instead it logs an empty array onto the console
really need to know the reason as I'm stuck with this for 3 days now
Your issue is that the callback to https.get - i.e. (res) => is called asynchronously - therefore, console.log(arr); is executed before the 25 https.get requests are even made - you'll see that arr.push does actually work if you console.log(arr); where you console.log(name); - so your assumption that you are not able to push is incorrect
You are pushing to the array, you just never console.log the array when it has data in it
I guess one way I can suggest doing this with rather old version of Node.js you are using (14.16.0) is as follows
var https = require("https");
function fn(callback) {
const result = [];
let done = 0;
for (let i = 1; i < 26; i++) {
https.get(`https://jsonmock.hackerrank.com/api/countries?page=${i}`, (res) => {
res.on("data", (data) => {
JSON.parse(data).data.map((info) => {
let { name } = info;
result.push(name);
console.log(name);
});
// keep track of how many requests have "completed"
done = done + 1;
// when all are done, call the callback
if (done === 25) {
callback(result);
}
});
});
}
}
fn(arr => { // here is where arr is populated, nowhere else
console.log(arr);
});
Note, arr will only be accessible inside the callback - you will NOT be able to access arr at the top-level like you want - the only possible way you could is to use a version of Node.js that supports top-level await - and convert the code to use Promise (which is a trivial task)
Not sure it's important, but there's no guarantee that the names in arr will be in the correct order ... i.e. the results from iteration 3 may be pushed after iteration 4, for example, since that's the nature of network requests, there is no guarantee when they will finish
As an aside. If you were to use the latest (18.1 at the time of writing) version of node.js - the above can be written like
const promises = Array.from({length:25}, async (_, i) => {
const res = await fetch(`https://jsonmock.hackerrank.com/api/countries?page=${i+1}`);
const data = await res.json();
return data.map(({name}) => name);
});
const arr = (await Promise.all(promises)).flat(2);
console.log(arr);
Things of note are
native fetch - which makes networks requests simple compared to regular node.js methods
top-level await - so, you can use arr at the top-level of the code
6 lines of code vs over 20
I am trying to create a function that will get an Array of promises and will execute them in chunks.
here I have an example of a function I wrote that should have accomplished what Ive wanted.
but what is happening is that all the promises are already executed after the first promise.all.
The nodeJS debugger shows that its working as I wished it was. but I can see in whireshark that all the requests are are sent after the first chunk.
const reqs = [...Array(10)].map(() => () => axios.post('http://localhost:7000/test', {}))
const handleChunks = (reqs) => {
const chunks = []
const chunkSize = 5
for (let i = 0; i < reqs.length; i += chunkSize) {
chunks.push(reqs.slice(i, i + chunkSize))
}
chunks.reduce(async (acc, chunk) => {
const chunkToPromise = chunk.map((chunkFunc) => chunkFunc())
return Promise.all(chunkToPromise).then(async (result) => {
acc = await acc
acc.push(result)
return acc
})
}, Promise.resolve([]))
}
Its seems to me that all the Promises that I am creating are already declaring themselves in the Task Queue and that Promise.all doesn't really take what you gave to him but all the promises who are in the Task Queue
Does any one have any clue how to handle this kind of problem?
Edit: I found out that on the client that works the same way...
There are three facets here, chunking the Promises, running the chunk, and waiting for them all to finish. Let's create some helpers:
// NOTE: works but probably not optimal. I fired this off the top of my head,
// for something like this in production I'd use a library function from
// lodash or ramda or something.
const partition = (n, xs) => {
const results = [];
let i = k = 0;
while (i < xs.length) {
results.push([]);
while (k < n) {
if (i === xs.length) break;
results[results.length - 1].push(xs[i]);
i++;
k++;
}
k = 0;
}
return results;
};
// This will make the requests from the passed in array and
// wait for them all to finish. Note that unlike Promise.all
// allSettled will not bail early if you get a rejection.
const runPromiseFns = async (ps) => Promise.allSettled(ps.map(x => x()));
// This will take an array of arrays of Promise generating functions
// and wait for each sub-array to settle before moving on
// to the next one.
const runChunks = async (chunks) => {
for (let chunk of chunks) {
await runPromiseFns(chunk);
}
return;
};
Great. And now:
// Array of request thunks from your original code
const reqs = [...Array(10)].map(() => () => axios.post('http://localhost:7000/test', {}));
// Break into chunks of 5
const chunks = partition(5, reqs);
// Run through the execution pipe. Here allDone is a
// Promise you can .then or await somewhere else.
const allDone = runChunks(chunks);
const promiseAllFn = (pollingRequests) => Promise.all(pollingRequests.map((pollingRequest) => callApi(pollingRequest)));
chunk(Object.values(subRegister), 4).reduce(
(prev, pollingRequests) => prev.then(() => promiseAllFn(pollingRequests)),
Promise.resolve('start')
);
My question is basically a combination of
What is the best way to limit concurrency?
Wait until all promises complete even if some rejected
I'm aware of Promise.allSettled, but I'm failing to find a good way to also limit concurrency.
What I have so far:
Idea 1 using p-limit:
const pLimit = require('p-limit');
const limit = pLimit(10);
let promises = files.map(pair => {
var formData = {
'file1': fs.createReadStream(pair[0]),
'file2': fs.createReadStream(pair[1])
};
return limit(() => uploadForm(formData));
});
(async () => {
const result = await Promise.allSettled(promises).then(body => {
body.forEach(value => {
if(value.status == "rejected")
file.write(value.reason + '\n---\n');
});
});
})();
My problem with this solution is, that I have to create all promises in the first place and in doing so opening two file streams for each promise and I'll hit the limit of open files.
Idea 2 using p-queue:
I tried around with generator functions to create and add new promises in the queue.on 'next' event, but I couldn't get it to work properly and this is probably not the right tool for this job.
Idea 3 using a PromisePool:
This looked very promising in the beginning. Some of them support a generator function to create the promises for the pool, but I couldn't find one, who explicitly stated to behave like Promise.allSettled.
I implemented es6-promise-pool only to find out that it will stop after the first promise rejection.
It's simple enough to implement it yourself - make an array of functions that, when called, return the Promise. Then implement a limiter function that takes functions from that array and calls them, and once finished, recursively calls the limiter again until the array is empty:
const request = (file) => new Promise((res, rej) => {
console.log('requesting', file);
setTimeout(() => {
if (Math.random() < 0.5) {
console.log('resolving', file);
res(file);
} else {
console.log('rejecting', file);
rej(file);
}
}, 1000 + Math.random() * 1000);
});
const files = [1, 2, 3, 4, 5, 6];
const makeRequests = files.map(file => () => request(file));
const results = [];
let started = 0;
const recurse = () => {
const i = started++;
const makeRequest = makeRequests.shift();
return !makeRequest ? null : Promise.allSettled([makeRequest()])
.then(result => {
results[i] = result[0];
return recurse();
})
};
const limit = 2;
Promise.all(Array.from({ length: limit }, recurse))
.then(() => {
console.log(results);
});
If the order of the results doesn't matter, it can be simplified by removing the started and i variables.
The accepted answer works more or less like p-limit.
You were having issues with p-limit because the streams were declared outside the limit callback.
This would have solved your problem:
let promises = files.map(pair => {
return limit(() => uploadForm({
'file1': fs.createReadStream(pair[0]),
'file2': fs.createReadStream(pair[1])
}));
});
I'm trying to write a callback function that returns an array of all the posts of a given category from a wordpress blog so i can pull this data into a static site compiler.
The API only returns 100 at a time so i have to loop through them page by page and add them to an array.
I've managed to loop through them and log them to the console but i can't work out how to add them to an array while using promises. I'm not sure what arguement i should pass to push()
any pointers would be appreciated.
const getData = (category, number = 0, page = 1) =>
fetch(`https://public-api.wordpress.com/rest/v1/sites/www.accessaa.co.uk/posts?category=${category}&number=${number}&page=${page}&order_by=date`)
.then(res => res.json())
const found = (category) =>
getData(category)
.then(json => json.found)
var total = new Promise(function(resolve, reject) {
resolve(found('news'))
})
var times = total.then(function(value) {
return Math.ceil(value/100)
})
var calls = times
.then(function(callsToMake) {
items = []
for (i = 1; i < callsToMake; i++) {
getData('news', 100, i)
.then(json => json.posts)
.then(items.push(posts))
}
return items
})
I changed some of the code structure for readability purposes.
The solution to your problem is to make a pool of asynchronous tasks and then run them in parallel. Promise.all([promises]) is perfect for the latter because it will return an array of the resolved values until all of the promises have been successfully resolved or one of them has rejected.
const getData = (category, number = 0, page = 1) =>
fetch(`https://public-api.wordpress.com/rest/v1/sites/www.accessaa.co.uk/posts?category=${category}&number=${number}&page=${page}&order_by=date`)
.then(res => res.json())
const found = (category)=> getData(category).then(json => json.found);
found('news')
.then((value)=>{
return Math.ceil(value/100);
})
.then((callsToMake)=>{
let tasks = [];
for (i = 1; i < callsToMake; i++) {
tasks.push(getData('news', 100, i)) //<--- Fill tasks array with promises that will eventually return a value
}
return Promise.all(tasks); //<-- Run these tasks in parallel and return an array of the resolved values of the N Promises.
})
.then((arrOfPosts)=>{
let allPosts = [];
for(var elem of arrOfPosts)
allPosts = allPosts.concat(elem.posts);
console.log(allPosts);
}).catch((err)=>{
console.log(err);
})
You need to declare "posts":
.then(json => json.posts)
.then(posts=> items.push(posts))
Or simply:
.then(json => items.push(json.posts))
B.T.W: 'var total' is weird. you're rapping A promise with a promise