I would like to fetch data from this API: "https://swapi.co/api/planets". The data is paginated like
https://swapi.co/api/planets/?page=1, https://swapi.co/api/planets/?page=2....
I want to build an API which accepts url, page, and callback as arguments. It will fetch all of the data page by page, until it reaches the page specified in the arguments.
E.g.
function loadData(https://swapi.co/api/planets, 5, cb)
this will load data from page1, page2, page3, page4 and page5.
Here is my attempt however it doesn't work. Can someone please point me in the right direction ?
function fn(url, page, pages, cb) {
return new Promise((resolve, reject) => {
const endpoint = `${url}/?page=${page}`;
fetch(endpoint).then(response => {
if (response.status !== 200) {
throw `${response.status} ${response.statusText}`;
}
response.json().then(data => {
cb(data);
if (page <= pages) {
fn(url, page + 1, pages, cb)
.then(resolve)
.catch(reject);
} else {
resolve();
}
});
});
});
}
function cb(data) {
console.log(data)
}
fn('https://swapi.co/api/planets', 1, 3, cb).then(() => {});
Follow up question: if instead of passing in a callback as the argument, I want to pass in an array to collect the data loaded, how should I tweak this API?
As has been said before, you probably don't need to wait for each call to finish before doing the next, but if you really want to I just have to say I find it much easier to parse (as a human) using async / await.
I've done a quick basic version here in this snippet for you to see, with no error handling or checking the data is right. Maybe this approach might suit you better?
async function getPage(page) {
let response = await fetch(`https://swapi.co/api/planets/?page=${page}`);
let data = await response.json()
return data;
}
async function getPages(startPage, endPage) {
let currentPage = startPage;
let finalArr = [];
while(currentPage <= endPage) {
const pageData = await getPage(currentPage);
console.log('adding page', currentPage);
finalArr.push(...pageData.results);
currentPage++;
}
console.log(finalArr)
}
getPages(1, 3);
Since each request does not depend on the previous, I would use Promise.all method instead of requesting each page when the previous request has completed.
You can do something like:
function loadData(url, pages, cb) {
const urls = [];
for (let i = 1; i <= pages; i++) {
urls.push(`${url}/?page=${i}`);
}
return Promise.all(urls.map(url => fetch(url).then(response => response.json())))
.then(responses => {
var results = responses.reduce((accum, response) => [...accum, ...response.results], []);
cb(results);
return results;
});
}
Related
Below I have a Node.js function that makes a series of requests to different urls, then for each url I use the Cheerio web scraping library to loop through elements on the dom and create a sub array. At the end of each request (after the sub array is full) I'd like to push the contents of that array to a larger array, which is outside of the request scope.
The approach I'm trying doesn't seem to be working. It looks like I don't have access to 'allPlayers' from inside the .then block.
function readPlayers(teamUrls){
const allPlayers = [];
teamUrls.forEach((teamUrl, i) => {
const options = {
gzip: true,
uri: teamUrl,
Connection: 'keep-alive',
transform: function (body) {
return cheerio.load(body);
}
};
request(options)
.then(($) => {
const team = [];
$('tbody').children('tr').each(function(j, element){
const playerName = $(element).children('td').eq(1).children('span').eq(1).find('a').text().trim();
const player = { 'playerName': playerName };
team.push(player);
});
allPlayers.push(team);
}).catch(err => console.log("error: " + err)) );
});
}
So I'm wondering the best way to re-write this code to make the requests work and populate the outer array (allPlayers) with the results.
I've looked into trying to push the entire request directly into the outer array, to no avail.
In this example I'm using request-promise to make the request.
I've looked into using Promise.map, which I think is suited for this situation. Then I would return the entire request (I think), but I don't exactly understand what I'm doing in that case.. or if it will work.
Could anyone explain the scoping in this case, why I can't do it like I'm trying.
Many thanks
You have to remember when you are using asynchronous function you cannot go back to synchronous code execution.
This is one of the methods you can do it. It will fetch all the players parallely:
async function readPlayers(teamUrls) {
const playerPromises = teamUrls.map((teamUrl, i) => {
const options = {
gzip: true,
uri: teamUrl,
Connection: 'keep-alive',
transform: function(body) {
return cheerio.load(body);
}
};
return request(options)
});
const players = await Promise.all(playerPromises);
return players.reduce((allPlayers, $) =>{
const team = [];
$('tbody').children('tr').each(function(j, element) {
const playerName = $(element).children('td').eq(1).children('span').eq(1).find('a').text().trim();
const player = { playerName: playerName };
team.push(player);
});
allPlayers.push(team);
return allPlayers;
},[])
}
And you can use it using await readPlayers(array) or readPlayers(array).then(allteamplayers=>{...})
Note: In the current code it will be a 2D array, [[{p1:p1}..], [{p2:p2}..]] etc
If you use a forEach, every callback will run asynchronously and you won't be able to await them. You could swap it to a for loop, collect your promises in an array and then await the completion of all of them:
async function readPlayers(teamUrls) {
const allPlayers = [];
const allPromises = [];
for (var i = 0; i < teamUrls.length; i++) {
var teamUrl = teamUrls[i];
const options = {
gzip: true,
uri: teamUrl,
Connection: "keep-alive",
transform: function(body) {
return cheerio.load(body);
}
};
allPromises.push(
request(options)
.then($ => {
const team = [];
$("tbody")
.children("tr")
.each(function(j, element) {
const playerName = $(element)
.children("td")
.eq(1)
.children("span")
.eq(1)
.find("a")
.text()
.trim();
const player = { playerName: playerName };
team.push(player);
});
allPlayers.push(team);
})
.catch(err => console.log("error: " + err))
);
// wait untill all the promises resolve
await Promise.all(allPromises);
console.log(allPlayers);
return allPlayers;
}
}
Then you can get all the players by awaiting your function:
var allPlayers = await readPlayers(teamUrls);
I want to run 1 thundered http requests in configurable chunks, and set configurable timeout between chunk requests. The request is based on the data provided with some.csv file.
It doesn't work because I am getting a TypeError, but when I remove () after f, it doesn't work either.
I would be very grateful for a little help. Probably the biggest problem is that I don't really understand how exactly promises work, but I tried multiple solutions and I wasn't able to achieve what I want.
The timeout feature will probably give me even more headache so I would appreciate any tips for this too.
Can you please help me to understand why it doesn't work?
Here is the snippet:
const rp = require('request-promise');
const fs = require('fs');
const { chunk } = require('lodash');
const BATCH_SIZE = 2;
const QUERY_PARAMS = ['clientId', 'time', 'changeTime', 'newValue'];
async function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return async () => { return await rp(options) };
}
async function batchRequestRunner(data) {
const promises = [];
for (row of data) {
row = row.split(',');
promises.push(update(row[0], row[1], QUERY_PARAMS.join(',')));
}
const batches = chunk(promises, BATCH_SIZE);
for (let batch of batches) {
try {
Promise.all(
batch.map(async f => { return await f();})
).then((resp) => console.log(resp));
} catch (e) {
console.log(e);
}
}
}
async function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
await batchRequestRunner(requestData);
}
main();
Clarification for the first comment:
I have a csv file which looks like below:
clientId,startTime
123,13:40:00
321,13:50:00
the file size is ~100k rows
the file contains information how to update time for a particular clientId in the database. I don't have an access to the database but I have access to an API which allows to update entries in the database.
I cannot make 100k calls at once, because: my network is limited (I work remotely because of coronavirus), it comsumpts a lot of memory, and API can also be limited and can crash if I will make all the requests at once.
What I want to achieve:
Load csv into memory, convert it to an Array
Handle api requests in chunks, for example take first two rows from the array, make API call based on the first two rows, wait 1000ms, take another two rows, and continue processing until the end of array (csv file)
Well, it seems like this is a somewhat classic case of where you want to process an array of values with some asynchronous operation and to avoid consuming too many resources or overwhelming the target server, you want to have no more than N requests in-flight at the same time. This is a common problem for which there are pre-built solutions for. My goto solution is a small piece of code called mapConcurrent(). It's analagous to array.map(), but it assumes a promise-returning asynchronous callback and you pass it the max number of items that should ever be in-flight at the same time. It then returns to you a promise that resolves to an array of results.
Here's mapConcurrent():
// takes an array of items and a function that returns a promise
// returns a promise that resolves to an array of results
function mapConcurrent(items, maxConcurrent, fn) {
let index = 0;
let inFlightCntr = 0;
let doneCntr = 0;
let results = new Array(items.length);
let stop = false;
return new Promise(function(resolve, reject) {
function runNext() {
let i = index;
++inFlightCntr;
fn(items[index], index++).then(function(val) {
++doneCntr;
--inFlightCntr;
results[i] = val;
run();
}, function(err) {
// set flag so we don't launch any more requests
stop = true;
reject(err);
});
}
function run() {
// launch as many as we're allowed to
while (!stop && inflightCntr < maxConcurrent && index < items.length) {
runNext();
}
// if all are done, then resolve parent promise with results
if (doneCntr === items.length) {
resolve(results);
}
}
run();
});
}
Your code can then be structured to use it like this:
function update(id, time, query) {
const options = {
method: 'POST',
uri: `https://requesturl/${id}?query=${query}`,
body: {
"prop": {
"time": time
}
},
headers: {
"Content-Type": "application/json"
},
json: true
}
return rp(options);
}
function processRow(row) {
let rowData = row.split(",");
return update(rowData[0], rowData[1], rowData[2]);
}
function main() {
const input = fs.readFileSync('./input.test.csv').toString().split("\n");
const requestData = input.slice(1);
// process this entire array with up to 5 requests "in-flight" at the same time
mapConcurrent(requestData, 5, processRow).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
}
You can obviously adjust the number of concurrent requests to whatever number you want. I set it to 5 here in this example.
I have the following code:
* Fetch stats from api
*/
fetchStats() {
this._isFetching = true;
// fetch stats after building url and replacing invalid characters
return new Promise(async (resolve, reject) => {
await API.fetchStats(this.rsn)
.then(jres => {
this.skills = jres.main.skills;
this._isFetching = false;
resolve('success');
})
.catch(err => {
console.log(err);
console.log('error retreiving stats');
this._isFetching = false;
reject('Failed to retreive stats');
})
.finally(() => {
this._isFetching = false;
});
});
}
I thought making it async with await would make it wait until it got the response before continuing. Returning the promise is something I added in testing to see if I could make it synchronous.
Then my code that consumes this method:
memberCollection.forEach(async el => {
await el.player.fetchStats()
.then(() => {
console.log(`Refreshed ${el.player.rsn}'s account`);
})
.catch(console.log(`Failed to refresh ${el.player.rsn}'s account`));
});
My thinking was that it would wait till it got a response then console.log either a successful refresh or a failed refresh. What I am instead seeing is a whole bunch of "success" messages followed by a string of failed messages indicating that it is running both the then and the catch message in the foreach. Does anyone know how I can make this work.
My issue is that Axios keeps timing out (my speculation is that it is due to the number of requests being sent off and the fact that there is a 5-10sec delay as it pulls from the db), if I navigate to the API URL manually it works as well as if I just do one member (as opposed to forEach) it works fine. So I'm trying to limit the number of requests fired off at once. I have tried setting my axios timeout to 10, 20, and 60 seconds, but it made no improvement.
Solution code:
const asyncForEach = async (arr, cb) => {
for(let i=0;i<arr.length;i++) {
let el = arr[i];
try {
let res = await cb(el);
} catch (err) { console.log(err) };
if(el.player && el.player.rsn) console.log(`Processed ${el.player.rsn}`);
}
console.log('done processing in asyncForEach');
}
not linked to axios but to async await.
consider
function slow(i){
return new Promise((ok,ko)=>{
return setTimeout(_=>ok(i), 1000)
})
}
async function asyncForEach(arr, cb){
for(var i = 0; i<arr.length; ++i){
let el = arr[i];
let res = await cb(el);
console.log('async', res, new Date)
}
}
/*
#foreach does not wait, but async and reduce are spaced by one second
foreach 4 2019-10-14T13:43:47.059Z
foreach 5 2019-10-14T13:43:47.071Z
foreach 6 2019-10-14T13:43:47.071Z
async 1 2019-10-14T13:43:47.071Z
async 2 2019-10-14T13:43:48.073Z
async 3 2019-10-14T13:43:49.074Z
reduce 7 2019-10-14T13:43:50.076Z
reduce 8 2019-10-14T13:43:51.078Z
reduce 9 2019-10-14T13:43:52.080Z
*/
async function main(){
await [4,5,6].forEach(async el=>{
let res = await slow(el);
console.log('foreach', res, new Date)
})
await asyncForEach([1,2,3], slow);
await [7,8,9].reduce((acc, el)=>acc.then(async _=>{
let res = await slow(el);
console.log('reduce', res, new Date);
return;
}), Promise.resolve())
}
main();
As you can see from timestamps, forEach does not wait for slow to finish
however, asyncForEach in its iteration does wait
What you may want to do is either
write a for loop as done with asyncForEach
use standard promises (stacking them):
[1,2,3].reduce((acc, el)=>acc.then(_=>{
return slow(el);
}), Promise.resolve())
I'd like to reuse the same code in a loop. This code contains promises. However, when iterating, this code results in an error.
I've tried using for and while loops. There seems to be no issue when I use the for loop for a single iteration.
Here is a minimal version of my code:
var search_url = /* Some initial URL */
var glued = "";
for(var i = 0; i < 2; i++)
{
const prom = request(search_url)
.then(function success(response /* An array from a XMLHTTPRequest*/) {
if (/* Some condition */)
{
search_url = /* Gets next URL */
glued += processQuery(response[0]);
} else {
console.log("Done.")
}
})
.catch(function failure(err) {
console.error(err.message); // TODO: do something w error
})
}
document.getElementById('api-content').textContent = glued;
I expect the results to append to the variable glued but instead, I get an error: failure Promise.catch (async) (anonymous) after the first iteration of the loop.
Answer:
You can use the Symbol.iterator in accordance with for await to perform asynchronous execution of your promises. This can be packaged up into a constructor, in the example case it's called Serial (because we're going through promises one by one, in order)
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
What is the above?
It's a constructor called Serial.
It takes as an argument an array of Functions that return Promises.
The functions are stored in Serial.promises
It has an empty array stored in Serial.resolved - this will store the resolved promise requests.
It has two methods:
addPromise: Takes a Function that returns a Promise and adds it to Serial.promises
resolve: Asynchronously calls a custom Symbol.iterator. This iterator goes through every single promise, waits for it to be completed, and adds it to Serial.resolved. Once this is completed, it returns a map function that acts on the populated Serial.resolved array. This allows you to simply call resolve and then provide a callback of what to do with the array of responses. A.e. .resolve()((resolved_requests) => //do something with resolved_requests)
Why does it work?
Although many people don't realize this Symbol.iterator is much more powerful than standard for loops. This is for two big reasons.
The first reason, and the one that is applicable in this situation, is because it allows for asynchronous calls that can affect the state of the applied object.
The second reason is that it can be used to provide two different types of data from the same object. A.e. You may have an array that you would like to read the contents of:
let arr = [1,2,3,4];
You can use a for loop or forEach to get the data:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
But if you adjust the iterator:
arr[Symbol.iterator] = function* () {
yield* this.map(v => v+1);
};
You get this:
arr.forEach(v => console.log(v));
// 1, 2, 3, 4
for(let v of arr) console.log(v);
// 2, 3, 4, 5
This is useful for many different reasons, including timestamping requests/mapping references, etc. If you'd like to know more please take a look at the ECMAScript Documentation: For in and For Of Statements
Use:
It can be used by calling the constructor with an Array of functions that return Promises. You can also add Function Promises to the Object by using
new Serial([])
.addPromise(() => fetch(url))
It doesn't run the Function Promises until you use the .resolve method.
This means that you can add promises ad hoc if you'd like before you do anything with the asynchronous calls. A.e. These two are the same:
With addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3)]);
promises.addPromise(() => fetch(url4));
promises.resolve().then((responses) => responses)
Without addPromise:
let promises = new Serial([() => fetch(url), () => fetch(url2), () => fetch(url3), () => fetch(url4)])
.resolve().then((responses) => responses)
Data:
Since I can't really replicate your data calls, I opted for JSONPlaceholder (a fake online rest api) to show the promise requests in action.
The data looks like this:
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1",
"https://jsonplaceholder.typicode.com/todos/2",
"https://jsonplaceholder.typicode.com/todos/3"]
//since our constructor takes functions that return promises, I map over the URLS:
.map(url => () => fetch(url));
To get the responses we can call the above data using our constructor:
let promises = new Serial(searchURLS)
.resolve()
.then((resolved_array) => console.log(resolved_array));
Our resolved_array gives us an array of XHR Response Objects. You can see that here:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve().then((resolved_array) => console.log(resolved_array));
Getting Results to Screen:
I opted to use a closure function to simply add text to an output HTMLElement.
This is added like this:
HTML:
<output></output>
JS:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
Putting it together:
If we use the output snippet along with our Serial object the final functional code looks like this:
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
What's happening above is this:
we input all our functions that return promises. new Serial(searchURLS)
we tell it to resolve all the requests .resolve()
after it resolves all the requests, we tell it to take the requests and map the array .then(resolved => resolved.map
the responses we turn to objects by using .json method. This is necessary for JSON, but may not be necessary for you
after this is done, we use .then(obj => to tell it to do something with each computed response
we output the title to the screen using output(obj.title)
Result:
let output = ((selector) => (text) => document.querySelector(selector).textContent += text)("output");
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
let searchURLs = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2", "https://jsonplaceholder.typicode.com/todos/3"].map(url => () => fetch(url));
let promises = new Serial(searchURLs).resolve()
.then((resolved) => resolved.map(response =>
response.json()
.then(obj => output(obj.title))));
<output></output>
Why go this route?
It's reusable, functional, and if you import the Serial Constructor you can keep your code slim and comprehensible. If this is a cornerstone of your code, it'll be easy to maintain and use.
Using it with your code:
I will add how to specifically use this with your code to fully answer your question and so that you may understand further.
NOTE glued will be populated with the requested data, but it's unnecessary. I left it in because you may have wanted it stored for a reason outside the scope of your question and I don't want to make assumptions.
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
Using it with your code - Working Example:
function Serial(promises = []) {
return {
promises,
resolved: [],
addPromise: function(fn) {
promises.push(fn);
},
resolve: async function(cb = i => i, err = (e) => console.log("trace: Serial.resolve " + e)) {
try {
for await (let p of this[Symbol.iterator]()) {}
return this.resolved.map(cb);
} catch (e) {
err(e);
}
},
[Symbol.iterator]: async function*() {
this.resolved = [];
for (let promise of this.promises) {
let p = await promise().catch(e => console.log("trace: Serial[Symbol.iterator] ::" + e));
this.resolved.push(p);
yield p;
}
}
}
}
//setup urls:
var search_urls = ["https://jsonplaceholder.typicode.com/todos/1", "https://jsonplaceholder.typicode.com/todos/2"];
var request = (url) => () => fetch(url);
let my_requests = new Serial(search_urls.map(request));
//setup glued (you don't really need to, but if for some reason you want the info stored...
var glued = "";
//setup helper function to grab title(this is necessary for my specific data)
var addTitle = (req) => req.json().then(obj => (glued += obj.title, document.getElementById('api-content').textContent = glued));
// put it all together:
my_requests.resolve().then(requests => requests.map(addTitle));
<div id="api-content"></div>
Final Note
It's likely that we will be seeing a prototypal change to the Promise object in the future that allows for easy serialization of Promises. Currently (7/15/19) there is a TC39 Proposal that does add a lot of functionality to the Promise object but it hasn't been fully vetted yet, and as with many ideas trapped within the Proposal stage, it's almost impossible to tell when they will be implemented into Browsers, or even if the idea will stagnate and fall off the radar.
Until then workarounds like this are necessary and useful( the reason why I even went through the motions of constructing this Serializer object was for a transpiler I wrote in Node, but it's been very helpful beyond that! ) but do keep an eye out for any changes because you never know!
Hope this helps! Happy Coding!
Your best bet is probably going to be building up that glued variable with recursion.
Here's an example using recursion with a callback function:
var glued = "";
requestRecursively(/* Some initial URL string */, function() {
document.getElementById('api-content').textContent = glued;
});
function requestRecursively(url, cb) {
request(url).then(function (response) {
if (/* Some condition */) {
glued += processQuery(response[0]);
var next = /* Gets next URL string */;
if (next) {
// There's another URL. Make another request.
requestRecursively(next, cb);
} else {
// We're done. Invoke the callback;
cb();
}
} else {
console.log("Done.");
}
}).catch(function (err) {
console.error(err.message);
});
}
I need to create a function that runs a 'getFile' function on each item in an array. The getFile function logs 'File contents of x' x being whatever element is in the array.
Currently, I have a working function that runs the getFile on the array and waits for the final response before logging the results.
However, I now need to log the responses as I receive them in order. For example, if my array is [1, 2, 3, 4, 5] currently it logs 'File contents of x' in a random order, so if it was to return the logs, 3 then 4 then 1. As soon as I receive 1, I need to log that, then once I receive 2 logs that and so on.
I will insert my current code below. The problem I'm having is I need to know when the 'empty space' in my array becomes populated so I can log it in real time. Therefore allowing my user to see the result build up rather than just having to wait until all the responses have come back
function fetchContentOfFiles(fileNames, testCB) {
const fileContent = [];
let counter = 0;
fileNames.forEach((file, i) => {
getFile(file, (err, fileName) => {
if (err) console.log(err)
else {
fileContent[i] = fileName;
counter++
if (counter === fileNames.length) {
testCB(null, fileContent)
};
console.log(fileContent)
};
});
});
};
The cleanest way to write this would be to use a for loop inside an async function. Promisify getFile so that it returns a Promise, then await it in every iteration of the loop. At the end of the loop, call the callback:
const getFileProm = file => new Promise((resolve, reject) => {
getFile(file, (err, fileName) => {
if (err) reject(err);
else resolve(fileName);
});
});
async function fetchContentOfFiles(fileNames, testCB) {
const fileContent = [];
try {
for (let i = 0; i < fileNames.length; i++) {
fileContent.push(
await getFileProm(fileNames[i])
);
}
} catch(e) {
// handle errors, if you want, maybe call testCB with an error and return?
}
testCB(null, fileContent);
}
It would probably be even better if fetchContentOfFiles was called and handled as a Promise rather than with callbacks, and then the errors can be handled in the consumer:
async function fetchContentOfFiles(fileNames) {
const fileContent = [];
for (let i = 0; i < fileNames.length; i++) {
fileContent.push(
await getFileProm(fileNames[i])
);
}
return fileContent;
}
fetchContentOfFiles(arr)
.then((fileContent) => {
// do stuff with fileContent
})
.catch((err) => {
// something went wrong
});