How to cache data from async function that uses fetch in Node - javascript

I was trying to see if there was a way to cache a json response from a fetch async call, possibly using LRU.
I've tried using several packages, such as node-cache and lru-cache, but I don't think they worked because my function is asynchronous.
This is what my fetch function basically looks like:
const jsonFetch = async (url) => {
try {
const response = await fetch (url)
const json = await response.json();
return json
}
catch (error) {
console.log(error)
}
}
For example, if I get someone to hit my route 20 times in a minute, I'd like to easily fetch the data and return the response within 0.03 ms instead of 0.3 ms. Currently, it is always using the a URL to fetch the data.

This has been here for a while, but I agree with the comment from #sleepy012. If I wanted to avoid parallel calls, the trick should be to cache the promise, not only the value. So something like this should work:
let cache = {}
function cacheAsync(loader) {
return async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const responsePromise = loader(url)
cache[url] = responsePromise
return responsePromise
}
catch (error) {
console.log('Error', error.message)
}
};
}
function delayedLoader(url) {
console.log('Loading url: ' + url)
return new Promise((r) => setTimeout(r, 1000,'Returning ' + url));
}
const cachedLoader = cacheAsync(delayedLoader);
cachedLoader('url1').then((d) => console.log('First load got: ' + d));
cachedLoader('url1').then((d) => console.log('Second load got: ' + d));
cachedLoader('url2').then((d) => console.log('Third load got: ' + d));
cachedLoader('url2').then((d) => console.log('Fourth load got: ' + d));
console.log('Waiting for load to complete');

There's nothing about async functions that will prevent caching results. It's possible the libraries you're looking at can't handle the promises, but here's a basic proof of concept that might help to get things started:
let cache = {}
const jsonFetch = async (url) => {
if (url in cache) { // return cached result if available
console.log("cache hit")
return cache[url]
}
try {
const response = await fetch (url)
const json = response.json();
cache[url] = json // cache response keyed to url
return json
}
catch (error) {
console.log(error)
}
}
jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id))
// should be cached -- same url
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/1").then((user) => console.log(user.id)), 2000)
// not in cache
setTimeout(() => jsonFetch("https://jsonplaceholder.typicode.com/todos/2").then((user) => console.log(user.id)), 2000)
You will only get cache hits on requests made after the first request returns a value to cache

Related

Promise wont return valid value

I have this test I made just to check an API, but then i tryied to add an URL from a second fetch using as parameter a value obtained in the first fetch and then return a value to add in the first fecth. The idea is to add the image URL to the link. thanks in advance.
function script() {
const url = 'https://pokeapi.co/api/v2/pokemon/?offset=20&limit=20'
const result = fetch(url)
.then( (res)=>{
if(res.ok) {
return res.json()
} else {
console.log("Error!!")
}
}).then( data => {
console.log(data)
const main = document.getElementById('main');
main.innerHTML=`<p><a href='${data.next}'>Next</a></p>`;
for(let i=0; i<data.results.length;i++){
main.innerHTML=main.innerHTML+`<p><a href=${getImageURL(data.results[i].url)}>${data.results[i].name}</a></p>`;
}
})
}
async function getImageURL(imgUrl) {
const resultImg = await fetch(imgUrl)
.then( (res)=> {
return res.json()
})
.then (data => {
console.log(data.sprites.other.dream_world.front_default);
})
return resultImg.sprites.other.dream_world.front_default;
}
In general, don't mix .then/.catch handlers with async/await. There's usually no need, and it can trip you up like this.
The problem is that your fulfillment handler (the .then callback) doesn't return anything, so the promise it creates is fulfilled with undefined.
You could return data, but really just don't use .then/.catch at all:
async function getImageURL(imgUrl) {
const res = await fetch(imgUrl);
if (!res.ok) {
throw new Error(`HTTP error ${res.status}`);
}
const resultImg = await res.json();
return resultImg.sprites.other.dream_world.front_default;
}
[Note I added a check of res.ok. This is (IMHO) a footgun in the fetch API, it doesn't reject its promise on HTTP errors (like 404 or 500), only on network errors. You have to check explicitly for HTTP errors. (I wrote it up on my anemic old blog here.)]
There's also a problem where you use getImageURL:
// Incorrent
for (let i = 0; i < data.results.length; i++) {
main.innerHTML=main.innerHTML+`<p><a href=${getImageURL(data.results[i].url)}>${data.results[i].name}</a></p>`;
}
The problen here is that getImageURL, like all async functions, returns a promise. You're trying to use it as those it returned the fulfillment value you're expecting, but it can't — it doesn't have that value yet.
Instead, you need to wait for the promise(s) youre creating in that loop to be fulfilled. Since that loop is in synchronous code (not an async function), we'd go back to .then/.catch, and since we want to wait for a group of things to finish that can be done in parallel, we'd do that with Promise.all:
// ...
const main = document.getElementById('main');
const html = `<p><a href='${data.next}'>Next</a></p>`;
Promise.all(data.results.map(async ({url, name}) => {
const realUrl = await getImageURL(url);
return `<p><a href=${realUrl}>${name}</a></p>`;
}))
.then(paragraphs => {
html += paragraphs.join("");
main.innerHTML = html;
})
.catch(error => {
// ...handle/report error...
});
For one, your
.then (data => {
console.log(//...
at the end of the promise chain returns undefined. Just remove it, and if you want to console.log it, do console.log(resultImg) in the next statement/next line, after await.
This the final version that accomplish my goal. Just want to leave this just in case someone finds it usefull. Thanks for those who answer!
function script() {
const url = 'https://pokeapi.co/api/v2/pokemon/?offset=20&limit=20'
const result = fetch(url)
.then( (res)=>{
if(res.ok) {
return res.json()
} else {
console.log("Error!!")
}
}).then( data => {
console.log(data)
const main = document.getElementById('main');
main.innerHTML=`<p><a href='${data.next}'>Proxima Página</a></p>`;
Promise.all(data.results.map(async ({url, name}) => {
const realUrl = await getImageURL(url);
return `<div><a href=${realUrl}>${name}</a></div>`;
}))
.then(paragraphs => {
main.innerHTML=main.innerHTML+paragraphs;
})
.catch(error => {
console.log(error);
});
})
}
async function getImageURL(imgUrl) {
const res = await fetch(imgUrl);
if(!res.ok) {
throw new Error(`HTTP Error ${res.status}`)
}
const resultImg = await res.json();
return resultImg.sprites.other.dream_world.front_default
}

Object property coming up as undefined after assigning with an async fetch

I'm having some trouble with adding a key to an object as seen here:
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
})
})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(result.items.item)
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);
Here are the results from the console.logs:
image
I fear the issue relates with the async call, but I'm confused as to why the first console.log() works fine then. If it is an async issue, how do I go about resolving this?
Your first console.log works because the "game" variable already exists and contains data before you even make the async fetch request. You could call it before the fetch and it would still be ok.
Your second console.log trying to output "game. statistics" is being run before the fetch has returned with any data. This is because async calls do not stop and wait for the code to complete the async task before moving on to the next lines of code. That is the intended purpose of an asynchronous code block. It will run the code inside the callback with the response once it's returned to perform anything that relies on the returned data. But without blocking the browser from continuing through the code to run the rest of the lines of code.
To achieve what you seem to be attempting to do, you could either place the task that you need to run after getting the data in a separate function and then calling it with the response.
games.forEach(game => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id='+game.$.objectid+'&stats=1')
.then(response => {
processData(response, game);
})
});
const processData = (response, game) => {
const xml = response.text();
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
})
console.log('game', game);
console.log('STATS!', game.statistics);
}
or you could explicitly tell it to wait for the completion of the async task to complete before moving on. This would require you to either use promises or wrap the entire games foreach loop in an async function. This is because only an async function knows how to handle processing an await on another async function called inside of itself.
Code for updated question
The editor wont let me format code properly anymore, but essentially the simplest solution is all your data handling logic should be executed within the XML callback. From your shared code I can't see any requirement for it to exist outside of the callback where the data is handled after it has been retrieved.
const recursiveFetchAndWait = useCallback(
(url) => {
setLoading(true);
fetch(url)
.then(async response => {
if (response.status === 200) { // Checking for response code 200
const xml = await response.text();
setLoading(false);
return XML2JS.parseString(xml, (err, result) => { // xml2js: converts XML to JSON
if (result.items.$.totalitems !== '0') { // Only processing further if there are returned results
result.items.item.forEach(game => {
/* Fetching the statistics from a separate API, because the base API doesn't include these */
const gameId = game.$.objectid;
fetch('https://cors-anywhere.herokuapp.com/https://www.boardgamegeek.com/xmlapi2/thing?id=' + gameId + '&stats=1')
.then(async response => {
const xml = await response.text();
return XML2JS.parseString(xml, (err, result) => {
// BEGINNING OF "XML2JS.parseString"
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0];
// setStatistics(...{statistics}, ...{gameId: result.items.item[0].statistics[0].ratings[0]})
console.log('game', game); // This returns the object with the newly statistics key.
console.log('STATS!', game.statistics); // This doesn't recognize the statistics key?!
/* Going through the array and changing default values and converting string numbers to actual numbers */
if (game.stats[0].rating[0].ranks[0].rank[0].$.value === 'Not Ranked')
game.stats[0].rating[0].ranks[0].rank[0].$.value = 'N/A';
else {
game.stats[0].rating[0].ranks[0].rank[0].$.value = Number(game.stats[0].rating[0].ranks[0].rank[0].$.value);
}
game.stats[0].$.minplayers = Number(game.stats[0].$.minplayers);
if (isNaN(game.stats[0].$.minplayers))
game.stats[0].$.minplayers = '--';
game.stats[0].$.maxplayers = Number(game.stats[0].$.maxplayers);
if (isNaN(game.stats[0].$.maxplayers))
game.stats[0].$.maxplayers = '--';
game.stats[0].$.maxplaytime = Number(game.stats[0].$.maxplaytime);
if (isNaN(game.stats[0].$.maxplaytime))
game.stats[0].$.maxplaytime = '--';
if (game.yearpublished === undefined)
game.yearpublished = ['--'];
});
setGameList(game); // The forEach means that result.items.item == game
// END OF "XML2JS.parseString"
})
})
}
});
} else if (response.status === 202) { // If the status response was 202 (API still retrieving data), call the fetch again after a set timeout
setTimeoutAsCallback(() => recursiveFetchAndWait(url));
} else
console.log(response.status);
})
},
[],
);
Move console.log('STATS!', game.statistics); immediately below game.statistics =.
Or, do everything inside an async function:
(async () => {
for (const game of games) {
const response = fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1');
const xml = await response.text();
await new Promise(resolve => {
XML2JS.parseString(xml, (err, result) => {
game.statistics = result.items.item[0].statistics[0].ratings[0];
resolve();
});
});
}
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();
If you want to create a sequencial flow, you should follow as below:
await Promise.all(games.map(async game => {
await new Promise((resolve) => {
fetch('https://www.boardgamegeek.com/xmlapi2/thing?id=' + game.$.objectid + '&stats=1')
.then(async response => {
const xml = await response.text(); // XML2JS boilerplate
xml2js.parseString(xml, (err, result) => { // XML2JS boilerplate
console.log('result', result); // This returns data.
game.statistics = result.items.item[0].statistics[0].ratings[0]; // Creating a new statistics key on the game object and assigning it the statistics from the API call
resolve();
});
});
});
}));
games.forEach(game => {
console.log('game', game);
console.log('STATS!', game.statistics);
});
})();

How to use async, await and promises?

I am building a web scraper to get all of user's submissions on codeforces.
I don't know much about async, await, promises.
I have used axios (promise based) to request codeforces and cheerio to parse HTML .
app.post("/", (req, res) => {
const usernameorhandle = req.body.userName;
getstatus(usernameorhandle).then ( ()=> {
var output = fs.createWriteStream(__dirname + '/Data/solutions.zip');
var archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
output.on('end', function() {
console.log('Data has been drained');
});
res.attachment(__dirname + "/Data/Problems", 'Codeforces-Solutions');
archive.pipe(res);
archive.directory(__dirname + "/Data/Problems", 'Codeforces-Solutions');
archive.finalize();
}) })
I am using to accept post request.
I am putting all the solutions into a folder and creating zip folder and then send to res.
Below is my getstatus function.
async function getstatus(handle){
return new Promise(async (resolve, reject)=> {
console.log("HELLLLLLLOOOOOOOO");
await axios.get("https://codeforces.com/api/user.status?handle=" + handle + "&from=1")
.then(response => {
if(response.data.status === 'OK'){
let results = response.data.result;
console.log("AAAAAAAAAAAAAAAAAAAAAAAa");
scrape(results).then( () =>{
console.log("DONE");
resolve();
})
.catch(err => console.log(err));
// resolve();
}
else console.log(submissions.comment);
})
})
}
I use scrape function to obtain HTML data and put to folder named Problems.
async function scrape (results){
console.log("inside scrape");
// console.log("HELLO");
return new Promise( async (resolve, reject) => {
await results.forEach(async (result)=> {
if(result.verdict === 'OK'){
await axios.get("https://codeforces.com/contest/" + result.contestId + "/submission/" + result.id)
.then(solutionPage => {
const $ = cheerio.load(solutionPage.data);
const path = "/home/srujan/Desktop/crawlerapp/Data/Problems/" + result.problem.name + ".cpp";
fs.writeFile(path, $('#program-source-text').text(), function(err){
if(err){
console.log(err);
}
else{
console.log("Saved file");
}
})
})
.catch( error => {
console.log("HTML PARSE ERROR" + error);
})
}
})
console.log("hey");
resolve();
})
The problem is I am getting
HELLLLLLLOOOOOOOO
AAAAAAAAAAAAAAAAAAAAAAAa
inside scrape
hey
DONE
saved file
saved file
...
Browser downloads after DONE and then files are saved.
I am new to js and don't know why I am getting this.
PS : I know this is very long question. I tried reading a lot about this. Didn't understand properly how to do that. I copy pasted some code which I didn't understand like how to zip a folder.
forEach(callback) executes callback. If callback returns a promise (ie, it's an async function), the promise won't be resolved before calling the callback on the next element of the array.
So, basically, you can't use async functions inside forEach... But you can use for-loops or Promise.all instead!
Also, fs.writeFile works with sync + callback, but there exists a fs.promise.writeFile that uses promises instead.
Here's a scrape function that should work better:
async function scrape(results) {
for (const result of results) {
if(result.verdict === 'OK') {
const solutionPage = await axios.get("https://codeforces.com/contest/" + result.contestId + "/submission/" + result.id);
const $ = cheerio.load(solutionPage.data);
const path = "/home/srujan/Desktop/crawlerapp/Data/Problems/" + result.problem.name + ".cpp";
try {
await fs.promises.writeFile(path, $('#program-source-text').text());
} catch(err) { console.log(err) }
}
}
}
The problem is to use result.forEach
Try to use a simple for(let i = 0; i < result.length; i++) without async.
If that doesn't work, try to return anything inside the then.
This is how I would construct getstatus function with await async
async function getstatus(handle) {
const response = await axios.get("https://codeforces.com/api/user.status?handle=" + handle + "&from=1")
if(response.data.status === 'OK') {
let results = response.data.result;
try {
await scrape(results);
console.log("DONE");
}
catch(error) {
}
}
}
and scrape function accordingly...
const fs = require('fs').promises;
async function scrape (results) {
results.forEach(async (result)=> {
if(result.verdict === 'OK') {
const solutionPage = await axios.get("https://codeforces.com/contest/" + result.contestId + "/submission/" + result.id)
const $ = cheerio.load(solutionPage.data);
const path = "/home/srujan/Desktop/crawlerapp/Data/Problems/" + result.problem.name + ".cpp";
try {
await fs.writeFile(path, $('#program-source-text').text())
console.log("Saved file");
}
catch(error) {
}
}
}
}

How do I make a long list of http calls in serial?

I'm trying to only make one http call at time but when I log the response from getUrl they are piling up and I start to get 409s (Too many requests)
function getUrl(url, i, cb) {
const fetchUrl = `https://api.scraperapi.com?api_key=xxx&url=${url.url}`;
fetch(fetchUrl).then(async res => {
console.log(fetchUrl, 'fetched!');
if (!res.ok) {
const err = await res.text();
throw err.message || res.statusText;
}
url.data = await res.text();
cb(url);
});
}
let requests = urls.map((url, i) => {
return new Promise(resolve => {
getUrl(url, i, resolve);
});
});
const all = await requests.reduce((promiseChain, currentTask) => {
return promiseChain.then(chainResults =>
currentTask.then(currentResult => [...chainResults, currentResult]),
);
}, Promise.resolve([]));
Basically I don't want the next http to start until the previous one has finished. Otherwise I hammer their server.
BONUS POINTS: Make this work with 5 at a time in parallel.
Since you're using await, it would be a lot easier to use that everywhere instead of using confusing .thens with reduce. It'd also be good to avoid the explicit Promise construction antipattern. This should do what you want:
const results = [];
for (const url of urls) {
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
}
Then your results variable will contain an array of response texts (or an error will have been thrown, and the code won't reach the bottom).
The syntax for an async function is an async keyword before the argument list, just like you're doing in your original code:
const fn = async () => {
const results = [];
for (const url of urls) {
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
}
// do something with results
};
To have a limited number of requests at a time, make a queue system - when a request completes, recursively call a function that makes another request, something like:
const results = [];
const queueNext = async () => {
if (!urls.length) return;
const url = urls.shift();
const response = await fetch(url);
if (!response.ok) {
throw new Error(response); // or whatever logic you need with errors
}
results.push(await response.text());
await queueNext();
}
await Promise.all(Array.from({ length: 5 }, queueNext));
// do something with results
You cannot use Array methods to sequentually run async operations because array methods are all synchronous.
The easiest way to achieve sequential async tasks is through a loop. Otherwise, you will need to write a custom function to imitate a loop and run .then after a async task ends, which is quite troublesome and unnecessary.
Also, fetch is already returning a Promise, so you don't have to create a Promise yourself to contain that promise returned by fetch.
The code below is a working example, with small changes to your original code (see comments).
// Fake urls for example purpose
const urls = [{ url: 'abc' }, { url: 'def', }, { url: 'ghi' }];
// To imitate actual fetching
const fetch = (url) => new Promise(resolve => {
setTimeout(() => {
resolve({
ok: true,
text: () => new Promise(res => setTimeout(() => res(url), 500))
});
}, 1000);
});
function getUrl(url, i, cb) {
const fetchUrl = `https://api.scraperapi.com?api_key=xxx&url=${url.url}`;
return fetch(fetchUrl).then(async res => { // <-- changes here
console.log(fetchUrl, 'fetched!');
if (!res.ok) {
const err = await res.text();
throw err.message || res.statusText;
}
url.data = await res.text();
return url; // <--- changes here
});
}
async function getAllUrls(urls){
const result = [];
for (const url of urls){
const response = await getUrl(url);
result.push(response);
}
return result;
}
getAllUrls(urls)
.then(console.log);
async/await is perfect for this.
Assuming you have an array of URLs as strings:
let urls = ["https://example.org/", "https://google.com/", "https://stackoverflow.com/"];
You simply need to do:
for (let u of urls) {
await fetch(u).then(res => {
// Handle response
}).catch(e => {
// Handle error
});
}
The loop will not iterate until the current fetch() has resolved, which will serialise things.
The reason array.map doesn't work is as follows:
async function doFetch(url) {
return await fetch(url).then(res => {
// Handle response
}).catch(e => {
// Handle error
});
}
let mapped = urls.map(doFetch);
is equivalent to:
let mapped;
for (u of urls) {
mapped.push(doFetch(u));
}
This will populate mapped with a bunch of Promises immediately, which is not what you want. The following is what you want:
let mapped;
for (u of urls) {
mapped.push(await doFetch(u));
}
But this is not what array.map() does. Therefore using an explicit for loop is necessary.
Many people provided answers using for loop. But in some situation await in for loop is not welcome, for example, if you are using Airbnb style guide.
Here is a solution using recursion.
// Fake urls for example purpose
const urls = [{ url: 'abc' }, { url: 'def', }, { url: 'ghi' }];
async function serialFetch(urls) {
return await doSerialRecursion(
async (url) => {
return result = await fetch(url)
.then((response) => {
// handle response
})
.catch((err) => {
// handle error
});
},
urls,
0
);
}
async function doSerialRecursion(fn, array, startIndex) {
if (!array[startIndex]) return [];
const currResult = await fn(array[startIndex]);
return [currResult, ...(await doSerialRecursion(array, fn, startIndex + 1))];
}
const yourResult = await serialFetch(urls);
The doSerialRecursion function will serially execute the function you passed in, which is fetch(url) in this example.

AbortController not terminating fetch request

I'm attempting to create a helper function to automatically timeout fetch requests after 2000 ms. The following code does not abort the fetch request, and instead prints the request as normal after 4000 ms. The code appears to be working in browser but not in node.
require('isomorphic-fetch');
const AbortController = require('abort-controller');
const fetchTimeout = async url => {
const controller = new AbortController();
setTimeout(() => {
controller.abort();
}, 2000);
return fetch(url, { signal: controller.signal })
.then(response => {
return response;
})
.catch(error => {
throw new Error(error.message);
});
};
const getStock = async () => {
return await fetchTimeout('https://httpbin.org/delay/4').then(data =>
data.json()
);
};
( async () =>
console.log(await getStock())
)();
I was able to fix this issue by using the node-fetch library instead of isomorphic-fetch with no other implementation issues. I've logged a ticket here, hope this can help someone else experiencing this frusturating issue.

Categories