Object property coming up as undefined after assigning with an async fetch - javascript

I'm having some trouble with adding a key to an object as seen here:
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
})
})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(result.items.item)
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);
Here are the results from the console.logs:
image
I fear the issue relates with the async call, but I'm confused as to why the first console.log() works fine then. If it is an async issue, how do I go about resolving this?

Your first console.log works because the "game" variable already exists and contains data before you even make the async fetch request. You could call it before the fetch and it would still be ok.
Your second console.log trying to output "game. statistics" is being run before the fetch has returned with any data. This is because async calls do not stop and wait for the code to complete the async task before moving on to the next lines of code. That is the intended purpose of an asynchronous code block. It will run the code inside the callback with the response once it's returned to perform anything that relies on the returned data. But without blocking the browser from continuing through the code to run the rest of the lines of code.
To achieve what you seem to be attempting to do, you could either place the task that you need to run after getting the data in a separate function and then calling it with the response.
games.forEach(game => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id='+game.$.objectid+'&stats=1')
.then(response => {
processData(response, game);
})
});
const processData = (response, game) => {
const xml = response.text();
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
})
console.log('game', game);
console.log('STATS!', game.statistics);
}
or you could explicitly tell it to wait for the completion of the async task to complete before moving on. This would require you to either use promises or wrap the entire games foreach loop in an async function. This is because only an async function knows how to handle processing an await on another async function called inside of itself.
Code for updated question
The editor wont let me format code properly anymore, but essentially the simplest solution is all your data handling logic should be executed within the XML callback. From your shared code I can't see any requirement for it to exist outside of the callback where the data is handled after it has been retrieved.
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
// BEGINNING OF "XML2JS.parseString"
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(game); // The forEach means that result.items.item == game
// END OF "XML2JS.parseString"
})
})
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);

Move console.log('STATS!', game.statistics); immediately below game.statistics =.
Or, do everything inside an async function:
(async () => {
for (const game of games) {
const response = fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1');
const xml = await response.text();
await new Promise(resolve => {
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
resolve();
});
});
}
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();

If you want to create a sequencial flow, you should follow as below:
await Promise.all(games.map(async game => {
await new Promise((resolve) => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1')
.then(async response => {
const xml = await response.text(); // XML2JS boilerplate
xml2js.parseString(xml, (err, result) => { // XML2JS boilerplate
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0]; // Creating a new statistics key on the game object and assigning it the statistics from the API call
resolve();
});
});
});
}));
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();

Related

Merging various backend requests in the express res.send()

I'm trying to make several asynchronous backend calls to generate a JSON response in my express API. Because of the nature of the API, I have 3 requests that are being made that are dependent on each other in some way.
Request 1: Returns an Array of values that are used to make request 2. Each value will be used as a mapping for the remaining requests. That is to say, it will be a unique identifier used to map the response from the requests in Request 3.
Request 2 (Parallel Batch): A request is made using each value from the Array returned in request 1. Each of these returns a value to be used in each of the Request 3s. That is to say, it's a 1-to-1
Request 3 (Parallel Batch): This request takes the response from Request 2, and makes a 1-to-1 follow up request to get more data on that specific mapping (the id from request 1)
I would like the final data I send to the consumer to look like this:
{
id1: details1,
id2: details2,
id3: details3,
...
}
Here is the code I have so far...
app.get("/artists/:artist/albums", (req, res) => {
console.log("#############")
const artistName = req.params.artist
let response = {};
let s3Promise = s3.listAlbums(artistName)
let albumDetailsPromises = []
s3Promise
.then((data) => {
data.map((album) => {
// Each album name here will actually be used as the unique identifier for
// the final response
// Build an Array of promises that will first fetch the albumId, then use
// that album id to fetch the details on the album
albumDetailsPromises.push(
discogs.getAlbumId(artistName, album).then( // Returns a promise
({ data }) => {
let masterId = data.results[0].id
let recordName = data.results[0].title
// Storing the album name to carry as a unique id alongside the promise
return [album, discogs.getAlbumDetails(masterId) // Returns a promise ]
}
)
)
})
})
.then(() => {
// When all the albumIds have been fetched, there will still exist a promise in the
// second index of each element in the albumDetailsPromises array
Promise.all(albumDetailsPromises)
.then((namedPromises) => {
namedPromises.map(
(album) => {
let albumName = album[0] // Unique Id
let albumDetailPromise = album[1]
// Resolving the albumDetailsPromise here, and storing the value on
// a response object that we intend to send as the express response
albumDetailPromise
.then(
({ data }) => {
response[albumName] = data
})
.catch(err => response[albumName] = err)
})
})
})
.catch((err) => console.log(err))
})
As of now, everything seems to be working as expected, I just can't seem to figure out how to "await" the response object being updated at the end of all these Promises. I've omitted res.send(response) from this example because it's not working, but that's of course my desired outcome.
Any advice is appreciated! New to javascript...
I would recommend rewriting this using async/await as it helps to reduce nesting. You can also extract the logic the get the album-details into a separate function, as this also increases the readability of the code. Something like this (this still needs error-handling, but it should give you a start):
app.get("/artists/:artist/albums", async (req, res) => {
const artistName = req.params.artist;
const albumNames = await s3.listAlbums(artistName);
const result = {};
const albumDetailPromises = albumNames.map(albumName => requestAlbumDetails(discogs, artistName, albumName));
const resolvedAlbumDetails = await Promise.all(albumDetailPromises);
// map desired response structure
for(const albumDetail of resolvedAlbumDetails) {
result[albumDetail.albumName] = albumDetail.albumDetails;
}
res.json(result);
});
async function requestAlbumDetails(service, artistName, albumName) {
const albumInfo = await service.getAlbumId(artistName, albumName);
const masterId = albumInfo.results[0].id;
const albumDetails = await service.getAlbumDetails(masterId);
return { albumName, albumDetails };
}
To answer your question how you could do it with your code:
You'd need to wait for all details to be fulfilled using another Promise.all call and then just send the response in the then-handler:
Promise.all(albumDetailsPromises)
.then((namedPromises) => {
const detailsPromises = namedPromises.map(
(album) => {
let albumName = album[0];
let albumDetailPromise = album[1];
return albumDetailPromise
.then(({ data }) => {
response[albumName] = data;
})
.catch(err => response[albumName] = err);
});
return Promise.all(detailsPromises)
.then(() => res.json(response));
})
Refactored using async/await...
app.get("/artists/:artist/albums", async (req, res) => {
const artistName = req.params.artist
let response = {};
let albums = await s3.listAlbums(artistName)
const promises = albums.map(async (album) => {
let result = await discogs.getAlbumId(artistName, album)
try {
let masterId = result.data.results[0].id
let tempRes = await discogs.getAlbumDetails(masterId)
return [album, tempRes.data]
} catch (error) {
return [album, { "msg": error.message }]
}
})
responses = await Promise.all(promises)
responses.map(data => { response[data[0]] = data[1] })
res.send(response)
})

How do I make a long list of http calls in serial?

I'm trying to only make one http call at time but when I log the response from getUrl they are piling up and I start to get 409s (Too many requests)
function getUrl(url, i, cb) {
const fetchUrl = `https://api.scraperapi.com?api_key=xxx&url=${url.url}`;
fetch(fetchUrl).then(async res => {
console.log(fetchUrl, 'fetched!');
if (!res.ok) {
const err = await res.text();
throw err.message || res.statusText;
}
url.data = await res.text();
cb(url);
});
}
let requests = urls.map((url, i) => {
return new Promise(resolve => {
getUrl(url, i, resolve);
});
});
const all = await requests.reduce((promiseChain, currentTask) => {
return promiseChain.then(chainResults =>
currentTask.then(currentResult => [...chainResults, currentResult]),
);
}, Promise.resolve([]));
Basically I don't want the next http to start until the previous one has finished. Otherwise I hammer their server.
BONUS POINTS: Make this work with 5 at a time in parallel.
Since you're using await, it would be a lot easier to use that everywhere instead of using confusing .thens with reduce. It'd also be good to avoid the explicit Promise construction antipattern. This should do what you want:
const results = [];
for (const url of urls) {
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
}
Then your results variable will contain an array of response texts (or an error will have been thrown, and the code won't reach the bottom).
The syntax for an async function is an async keyword before the argument list, just like you're doing in your original code:
const fn = async () => {
const results = [];
for (const url of urls) {
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
}
// do something with results
};
To have a limited number of requests at a time, make a queue system - when a request completes, recursively call a function that makes another request, something like:
const results = [];
const queueNext = async () => {
if (!urls.length) return;
const url = urls.shift();
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
await queueNext();
}
await Promise.all(Array.from({ length: 5 }, queueNext));
// do something with results
You cannot use Array methods to sequentually run async operations because array methods are all synchronous.
The easiest way to achieve sequential async tasks is through a loop. Otherwise, you will need to write a custom function to imitate a loop and run .then after a async task ends, which is quite troublesome and unnecessary.
Also, fetch is already returning a Promise, so you don't have to create a Promise yourself to contain that promise returned by fetch.
The code below is a working example, with small changes to your original code (see comments).
// Fake urls for example purpose
const urls = [{ url: 'abc' }, { url: 'def', }, { url: 'ghi' }];
// To imitate actual fetching
const fetch = (url) => new Promise(resolve => {
setTimeout(() => {
resolve({
ok: true,
text: () => new Promise(res => setTimeout(() => res(url), 500))
});
}, 1000);
});
function getUrl(url, i, cb) {
const fetchUrl = `https://api.scraperapi.com?api_key=xxx&url=${url.url}`;
return fetch(fetchUrl).then(async res => { // <-- changes here
console.log(fetchUrl, 'fetched!');
if (!res.ok) {
const err = await res.text();
throw err.message || res.statusText;
}
url.data = await res.text();
return url; // <--- changes here
});
}
async function getAllUrls(urls){
const result = [];
for (const url of urls){
const response = await getUrl(url);
result.push(response);
}
return result;
}
getAllUrls(urls)
.then(console.log);
async/await is perfect for this.
Assuming you have an array of URLs as strings:
let urls = ["https://example.org/", "https://google.com/", "https://stackoverflow.com/"];
You simply need to do:
for (let u of urls) {
await fetch(u).then(res => {
// Handle response
}).catch(e => {
// Handle error
});
}
The loop will not iterate until the current fetch() has resolved, which will serialise things.
The reason array.map doesn't work is as follows:
async function doFetch(url) {
return await fetch(url).then(res => {
// Handle response
}).catch(e => {
// Handle error
});
}
let mapped = urls.map(doFetch);
is equivalent to:
let mapped;
for (u of urls) {
mapped.push(doFetch(u));
}
This will populate mapped with a bunch of Promises immediately, which is not what you want. The following is what you want:
let mapped;
for (u of urls) {
mapped.push(await doFetch(u));
}
But this is not what array.map() does. Therefore using an explicit for loop is necessary.
Many people provided answers using for loop. But in some situation await in for loop is not welcome, for example, if you are using Airbnb style guide.
Here is a solution using recursion.
// Fake urls for example purpose
const urls = [{ url: 'abc' }, { url: 'def', }, { url: 'ghi' }];
async function serialFetch(urls) {
return await doSerialRecursion(
async (url) => {
return result = await fetch(url)
.then((response) => {
// handle response
})
.catch((err) => {
// handle error
});
},
urls,
0
);
}
async function doSerialRecursion(fn, array, startIndex) {
if (!array[startIndex]) return [];
const currResult = await fn(array[startIndex]);
return [currResult, ...(await doSerialRecursion(array, fn, startIndex + 1))];
}
const yourResult = await serialFetch(urls);
The doSerialRecursion function will serially execute the function you passed in, which is fetch(url) in this example.

How to cache data from async function that uses fetch in Node

I was trying to see if there was a way to cache a json response from a fetch async call, possibly using LRU.
I've tried using several packages, such as node-cache and lru-cache, but I don't think they worked because my function is asynchronous.
This is what my fetch function basically looks like:
const jsonFetch = async (url) => {
try {
const response = await fetch (url)
const json = await response.json();
return json
}
catch (error) {
console.log(error)
}
}
For example, if I get someone to hit my route 20 times in a minute, I'd like to easily fetch the data and return the response within 0.03 ms instead of 0.3 ms. Currently, it is always using the a URL to fetch the data.
This has been here for a while, but I agree with the comment from #sleepy012. If I wanted to avoid parallel calls, the trick should be to cache the promise, not only the value. So something like this should work:
let cache = {}
function cacheAsync(loader) {
return async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const responsePromise = loader(url)
cache[url] = responsePromise
return responsePromise
}
catch (error) {
console.log('Error', error.message)
}
};
}
function delayedLoader(url) {
console.log('Loading url: ' + url)
return new Promise((r) => setTimeout(r, 1000,'Returning ' + url));
}
const cachedLoader = cacheAsync(delayedLoader);
cachedLoader('url1').then((d) => console.log('First load got: ' + d));
cachedLoader('url1').then((d) => console.log('Second load got: ' + d));
cachedLoader('url2').then((d) => console.log('Third load got: ' + d));
cachedLoader('url2').then((d) => console.log('Fourth load got: ' + d));
console.log('Waiting for load to complete');
There's nothing about async functions that will prevent caching results. It's possible the libraries you're looking at can't handle the promises, but here's a basic proof of concept that might help to get things started:
let cache = {}
const jsonFetch = async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const response = await fetch (url)
const json = response.json();
cache[url] = json // cache response keyed to url
return json
}
catch (error) {
console.log(error)
}
}
jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id))
// should be cached -- same url
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id)), 2000)
// not in cache
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/2").then((user) => console.log(user.id)), 2000)
You will only get cache hits on requests made after the first request returns a value to cache

Node js lost in asynchronous behaviour: undefined

Objective
Disclaimer: I am new to node world and having tough time wrapping head around node asynchronous behaviour.
I am trying to write a wrapper function to do a https.get on a given url and return json output.
Code
const https = require('https');
// Get the user details
var myUrl = <valid-url>;
const getJson = function(url) {
// https get request
const req = https.get(url, (res) => {
// get the status code
const { statusCode } = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
error = new Error('Request Failed.\n' +
`Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
error = new Error('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
console.log(parsedData);
} catch (e) {
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
}
console.log(getJson(myUrl));
Output
undefined
{ user_id: <user-id>,
name: 'Ajay Krishna Teja',
email: <my-email> }
Issue
So the https.get is able to hit end point and get data but not able to return the json. Constantly returning Undefined.
Things I tried
Returning parsedData on res.on(end) block
Defining a var and copying parsedData
Copying to a global variable (although I knew it's very bad practice)
Places I looked up
Node.js variable declaration and scope
How to get data out of a Node.js http get request
Javascript function returning undefined value in node js
Updated: Working code
const getJson = function(url,callback) {
// https get request
const req = https.get(url, (res) => {
// get the status code
const { statusCode } = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
error = new Error('Request Failed.\n' +
`Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
error = new Error('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
// consume response data to free up memory
res.resume();
return;
}
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => { rawData += chunk; });
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
callback(parsedData);
} catch (e) {
callback(false);
console.error(e.message);
}
});
}).on('error', (e) => {
console.error(`Got error: ${e.message}`);
});
return req;
}
// calling
getJson(amznProfileURL,(res) => {
console.log(res);
});
Short answer: You are not returning anything in your getJson function and undefined is the default Node/Javascript return value.
function getJson(){
callAsyncFunction(param1, param2, param3)
// there is no return value!
}
Longer answer: Javascript (and Node as a result) is a single threaded language that uses callbacks as it's mechanism to return async results back to the callee. To do this, you pass a function into asynchronous functions as a parameter and then that function gets called at some point in the future whenever the asynchronous function is ready to send back it's result. Calling return from this "anonymous function" is actually just returning from the "callback" function you are sending into the async function.
function getJson(){
console.log('A')
// request is started, but getJson continues execution!
http.get(url, (res)=> {
console.log('C') // by the time I'm called, 'B' has already been printed and the function has returned!
return true // this won't return getJson! It will only return the callback function which doesn't do anything!
})
console.log('B')
// end of function without return value, return undefined!
}
// Will print 'A', 'B', 'C'
There are a couple different ways you can handle this. Callbacks have been used traditionally but Javascript also natively supports Promises which are a little easier to manage and are used in many popular frameworks by default.
You can implement your function with callbacks by providing your own callback parameter to call as soon as http.get returns itself.
// define getJson with second callback parameter
const getJson = function(url, callback) {
http.get(url, (res) => {
if(res){
callback(res) // result came back, send to your own callback function
} else {
callback(false) // request failed, send back false to signify failure
}
})
}
// now I can use getJson and get the result!
getJson('http://getjson.com', (res) => {
console.log('got result!', res)
})
This is a pretty common hump to get over with async functions in node (and javascript in general).
What's happening is that your console.log(getJson(myUrl)) is called before the http request has returned anything. Basically, things like this won't work with async functions.
If you put your console.log() inside res.on('end) it will work. The way you need to deal with this if either put all your logic in the res.on('end) which kind of sucks, or pass a callback to your getJson() function which you call in res.on('end'), or wrap everything in a promise, which you can return from getJson().
To use a callback you would do something like this:
const getJson = function(url, callback) {
// a bunch of code
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
callback(null, parsedDate) // callbacks in node traditionaly pass an error as the first arg
}
//finish
}
The you call it with a function:
getJson(url, function(err, return_val) {
if (err) // handle error
console.log(return_val)
}
You can also look at other HTTP libraries like Axios that will return a promise without much work. With axios and similar libraries you can simply:
axios.get(url)
.then(response => {
console.log(response);
})
.catch(function (error) {
console.log(error);
});
Which is one of the reasons people use these libraries. More here: https://github.com/axios/axios
Because it runs asynchronously, it does not wait for the function call to end.
You can fix it with promise pattern.
Try something like this:
/**
* Created by bagjeongtae on 2017. 10. 2..
*/
function parseData(url) {
return new Promise((resolve, reject) => {
https.get(url, (res) => {
// get the status code
const {statusCode} = res;
const contentType = res.headers['content-type'];
// check for the errors
let error;
if (statusCode !== 200) {
reject('Request Failed.\n' + `Status Code: ${statusCode}`);
} else if (!/^application\/json/.test(contentType)) {
reject('Invalid content-type.\n' +
`Expected application/json but received ${contentType}`);
}
if (error) {
console.error(error.message);
reject(error.messag);
}
res.resume();
//parse json
res.setEncoding('utf8');
let rawData = '';
res.on('data', (chunk) => {
rawData += chunk;
});
res.on('end', () => {
try {
const parsedData = JSON.parse(rawData);
console.log(parsedData);
resolve(parseData);
} catch (e) {
console.error(e.message);
reject(e.messag);
}
});
});
});
};
parseData('http://www.example.com').then( result =>{
console.log(result);
}, err => {
console.log(err);
})
Running getJson from console.log is asynchronous, so it does not wait for getJson to finish.
Asynchronous can be used like a synchronous.
I think the output is correct.The getJson(myUrl) is return undefined since you not set a return in the getJson function,the javascript return undefined by default and the
{ user_id: <user-id>,
name: 'Ajay Krishna Teja',
email: <my-email> }
is the output by console.log(parsedData) in you code.

How to retrieve all posts of a user via Facebook Graph API using promises and recursion?

I am currently developing a web app which uses the Facebook Graph API.
What I would like to achieve is to get all posts of a user.
However, this is not that easy since I have to paginate the results.
At the moment I am struggeling with promises.
What I try to achieve is to fill an array with the post objects.
Therefore I use promises and recursion which does not work as expected.
My code currently looks as follows:
// Here I retrieve the user with his or her posts,
// just the first 25 due to pagination
if (accessToken) {
return new Promise(resolve => {
FB.api('/me?fields=id,name,posts&access_token=' + accessToken, response => {
this.get('currentUser').set('content', response);
resolve()
})
})
}
// Returns all posts of a given user
function getAllPostsOfUser(posts, postsArr) {
// Process each post of the current pagination level
for (var post of posts.data) {
// Only store meaningful posts
if (post !== undefined && post.message !== undefined) {
postsArr.push(post)
}
}
// Further posts are retrievalable via paging.next which is an url
if (posts.data.length !== 0 && posts.paging.next !== undefined) {
FB.api(posts.paging.next, response => {
getAllPostsOfUser(response, postsArr)
resolve()
})
}
return postsArr
}
var posts = getAllPostsOfUser(this.get('currentUser').content.posts, [])
// I want to use all the posts here
console.log(posts)
The problem I have is that I want to use the posts where the console.log is placed but when I log the posts array a lot of posts are missing.
I am sure that I did something wrong with the promises but I do not know what.
I would be glad if anyone could guide me to a solution.
Thank you in advance.
Try this:
function getAllPosts() {
return new Promise((resolve, reject) => {
let postsArr = [];
function recursiveAPICall(apiURL) {
FB.api(apiURL, (response) => {
if (response && response.data) {
//add response to posts array (merge arrays), check if there is more data via paging
postsArr = postsArr.concat(response.data);
if (response.paging && response.paging.next) {
recursiveAPICall(response.paging.next);
} else {
resolve(postsArr);
}
} else {
reject();
}
});
}
recursiveAPICall("/me/posts?fields=message&limit=100");
});
}
getAllPosts()
.then((response) => {
console.log(response);
})
.catch((e) => {
console.log(e);
});
Not tested, just a quick example I came up with. It returns a promise and uses a recursive function to get all entries. BTW, you don't need to add the Access Token. If you are logged in, the SDK will use it internally.
This is an old question that is already answered but I thought it could use a more modern answer, considering how many lines of code could be saved. This code has not been tested with the real API but it should work.
This function returns a promise of an array of posts.
async function getPosts(url = "/me/posts?fields=message&limit=100") {
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
if (error || !data) throw new Error(error || "Could not get posts")
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
With comments:
async function getPosts(url = "/me/posts?fields=message&limit=100") {
// get response data out of callback
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
// if there was an error or there wasn't any data, throw
if (error || !data) throw new Error(error || "Could not get posts")
// return this page's data + if there's a next page, recursively get its data
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
The function can then be consumed like so:
async function main() {
try {
const posts = await getPosts(); // the array of posts
console.log(posts);
} catch (error) {
console.error(error);
}
}
main();
Below is a snippet demonstrating the function using a fake API.
// fake api for testing purposes
const FB = {
api(url, callback) {
const pages = [
{
data: ["post1", "post2", "post3"],
paging: { next: 1 },
},
{
data: ["post4", "post5", "post6"],
paging: { next: 2 },
},
{
data: ["post7", "post8", "post9"],
},
];
if (typeof url !== "number") return callback(pages[0]);
return callback(pages[url]);
},
};
async function getPosts(url = "/me/posts?fields=message&limit=100") {
const { error, paging, data } = (await new Promise(r => FB.api(url, r))) || {}
if (error || !data) throw new Error(error || "Could not get posts")
return data.concat(paging?.next ? await getPosts(paging.next) : [])
}
async function main() {
try {
const posts = await getPosts(); // the array of posts
console.log(posts);
} catch (error) {
console.error(error);
}
}
main();
.as-console-wrapper{max-height:none !important;top: 0;}
Also see erikhagreis's ESM wrapper on GitHub.

Categories