Firebase function timeout when calling external api - javascript

I'm trying to call an external API in Firebase Functions but i always get a timeout.
What can be the issue causing this?
Here is my code
exports.getCountryData = functions.https.onRequest((request, response) => {
const https = require('https');
const options = {
hostname: "api-football-v1.p.rapidapi.com/v3",
path: '/fixtures?next=5',
headers: {
"x-rapidapi-host": "api-football-v1.p.rapidapi.com/v3",
"x-rapidapi-key": "my-api-key"
}
};
var req = https.get(options, (resp) => {
let data = '';
resp.on('data', (chunk) => { data += chunk; });
resp.on('end', () => {
var result = JSON.parse(data);
console.log("Api fetched successfully");
console.log(result);
response.send({ fulfillmentText: result});
});
}).on("error", (err) => { console.log("Error: " + err.message); });
});

An event-driven function may fail to successfully complete due to errors thrown in the function code itself. Some of the reasons this might happen are as follows:
The function contains a bug and the runtime throws an exception.
The function cannot reach a service endpoint, or times out while
trying to reach the endpoint.
The function intentionally throws an exception (for example, when a
parameter fails validation).
When functions written in Node.js return a rejected promise or pass a
non-null value to a callback.
In any of the above cases, the function stops executing by default and the event is discarded. If you want to retry the function when an error occurs, you can change the default retry policy by setting the "retry on failure" property. This causes the event to be retried repeatedly for up to multiple days until the function successfully completes.
In this question, the service endpointi ‘api-football-v1.p.rapidapi.com/v3’ itself took so much time to load ( not reachable ), that was the issue. Changing the API endpoint to v3.football.api-sports.io and then calling the external API in Firebase Functions solved the issue for our user #tate_xy

It turns out using their Rapid Api url (api-football-v1.p.rapidapi.com/v3) was was resulting in a timeout. Using a direct Api url (v3.football.api-sports.io) with their domain name in it did the trick for me.
Here is my working code.
exports.getCountryData = functions.https.onRequest((request, response) => {
const https = require('https');
const options = {
hostname: "v3.football.api-sports.io",
path: '/fixtures?next=5',
headers: {
"x-rapidapi-host": "v3.football.api-sports.io",
"x-apisports-key": "my-api-key"
}
};
var req = https.get(options, (resp) => {
let data = '';
resp.on('data', (chunk) => { data += chunk; });
resp.on('end', () => {
var result = JSON.parse(data);
console.log("Api fetched successfully");
console.log(result);
response.send({ fulfillmentText: result});
});
}).on("error", (err) => { console.log("Error: " + err.message); });
});

Related

How to handle HTTPs json format errors from APIs in Node.js?

INTRODUCTION
I am implementing a function for making any kind of https request to any endpoint (using the https native module). When I make a request to a specific API I get an error response in JSON format. Like this:
{
"error": {
"code": 404,
"message": "ID not found"
}
}
How can I handle this kind of errors? At a first moment, I supposed that they were handled in
request.on("error", (err) => {
reject(err);
});
HTTPs Request function code
I have comment '<---------' in the relevant parts of the code
const https = require("https");
exports.httpsRequest = function (options, body = null) {
/*
This function is useful for making requests over the HTTPs protocol
*/
return new Promise((resolve, reject) => {
const request = https.request(options, (response) => {
// Get the response content type
const contentType =
response.headers["content-type"] &&
response.headers["content-type"].split(";")[0];
// Cumulate data
let chuncks = [];
response.on("data", (chunck) => {
chuncks.push(chunck);
});
response.on("end", () => {
// Concat all received chunks
let response = Buffer.concat(chuncks);
// Some responses might be in JSON format...
if (contentType === "application/json") {
// Jsonify the response
response = JSON.parse(response);
}
// (For the future) TODO - Check and parse more content types if needed.
// Resolve the promise with the HTTPs response
resolve(response); // <--------- The JSON format error responses are resolved too!!
});
});
// Reject on request error
request.on("error", (err) => {
// <------------- At a first moment, I supposed that all error responses were handled in this part of the code
reject(err);
});
// Write the body
if (body) {
request.write(body);
}
// Close HTTPs connection.
request.end();
});
};
Question
Why the error response is not handled in request.on("error", ...) ?
Thank you. I would appreciate any help or suggestion.
You need to create a different code path for when the content type isn't what you were expecting in which you call reject() and you also need to try/catch around JSON parsing errors so you can properly catch them and reject on them too. You can solve those issues with this code:
exports.httpsRequest = function (options, body = null) {
/*
This function is useful for making requests over the HTTPs protocol
*/
return new Promise((resolve, reject) => {
const request = https.request(options, (response) => {
// Get the response content type
const contentType =
response.headers["content-type"] &&
response.headers["content-type"].split(";")[0];
// Cumulate data
let chuncks = [];
response.on("data", (chunck) => {
chuncks.push(chunck);
});
response.on("end", () => {
// Concat all received chunks
let response = Buffer.concat(chuncks);
// Some responses might be in JSON format...
if (contentType === "application/json") {
try {
// Jsonify the response
response = JSON.parse(response);
resolve(response);
return;
} catch(e) {
reject(e);
return;
}
}
reject(new Error("Not JSON content-type"))
});
});
// Reject on request error
request.on("error", (err) => {
reject(err);
});
// Write the body
if (body) {
request.write(body);
}
// Close HTTPs connection.
request.end();
});
};
FYI, libraries such as got() and others listed here, all do this work for you automatically and have a lot of other useful features. You don't really need to build this yourself.

Axios create shorter duration for ETIMEDOUT

I have a server application (we'll call ServerApp1) which is going to be running on an Azure VM instance. I have a separate server (which we'll call ServerApp2) on a different machine which will be communicating with ServerApp1 and a separate client. The Azure VM is going to be spun up and/or down depending on need, so it's quite possible that the VM (and thus ServerApp1) aren't even alive to respond to request from ServerApp2. My client is polling ServerApp2 to ask for the status of ServerApp1, but if the VM is currently down then that request hangs for like 20 seconds before issuing an error with code ETIMEDOUT. What I'd like is for ServerApp2 to make the request to ServerApp1 to see if it's alive, but after about 1 or 2 seconds of not getting a response to then simply stop and tell the client that's it's not currently running. I thought I could get away with adding a {timeout:2000} parameter to my axios call, but this doesn't seem to change the behavior in any noticeable way.
Here's the function that gets called when the client asks ServerApp2 what the status is of ServerApp1:
router.get('/getCurrentConsoleStatus', function(req, res) {
async function getStatus() {
try {
const result = await consoleDataService.getConsoleStatus();
if (result.message === 'Begin listen for job.') {
console.log('The app is ready!');
} else {
console.log('The console app is not ready');
}
} catch (error) {
console.log(`Error communicating with console app: ${error}`);
}
}
getStatus();
});
I have a function which creates the root Axios object:
var axios = require('axios');
module.exports = axios.create({
baseURL: 'baseURL',
timeout: 1000,
headers: {
'Content-type': 'application/json'
}
});
And then the function that gets called in consoleDataService.getConsoleStatus() looks like this:
exports.getConsoleStatus = async function() {
const res = await axios({
method: 'get',
url: '/status'
});
return res.data;
};
Thanks to #JonEdwards for the suggestion to use Promise.race. I ended up solving the issue with this function which tries to take the first promise which resolves first.
exports.getConsoleStatus = () => {
var sleep = function() {
return new Promise(resolve => {
setTimeout(function() {
resolve({ status: false });
}, 2000);
});
};
var fetch = async function() {
const res = await http({
method: 'get',
url: '/status'
});
return new Promise(resolve => {
resolve(res.data);
});
};
async function getStatus() {
const asyncFunctions = [sleep(), fetch()];
const result = await Promise.race(asyncFunctions);
return result;
}
return getStatus();
};

IBM Cloud Function produce no output

I have some troubles while running this IBM Cloud Function:
/**
*
* main() will be run when you invoke this action
*
* #param Cloud Functions actions accept a single parameter, which must be a JSON object.
*
* #return The output of this action, which must be a JSON object.
*
*/
function main(params) {
const https = require('https');
https.get('https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
console.log(JSON.parse(data).explanation);
});
}).on("error", (err) => {
console.log("Error: " + err.message);
});
}
My problem is that the first invokes of this function (at least the first 3-4) produce no output. The subsequent calls run properly and the log is correctly shown. How can I fix this unpredictable behaviour? I'd like, of course, to retrieve my data at first call of this function. Thanks.
Node.js uses an non-blocking asynchronous programming model. This main function returns before the HTTP response is available.
Returning a Promise will allow you to wait on the HTTP response.
function main(params) {
return new Promise((resolve, reject) => {
const https = require('https');
https.get('https://api.nasa.gov/planetary/apod?api_key=DEMO_KEY', (resp) => {
let data = '';
// A chunk of data has been recieved.
resp.on('data', (chunk) => {
data += chunk;
});
// The whole response has been received. Print out the result.
resp.on('end', () => {
const explanation = JSON.parse(data).explanation
console.log(explanation);
resolve({ explanation })
});
}).on("error", (err) => {
console.log("Error: " + err.message);
reject({ error: err.message })
});
})
}
Two additional things to check:
Make sure to append .json to your endpoint
Example: https://<ibm-domain>/api/v1/web/<username>/default/<function>.json
Make sure to select Enable as Web Action in the Endpoints sidebar menu.
Also, you should be able to return an async main function in lieu of the Promise object.
async function main(params) {
try {
// some `await` function
} catch (e) {
// catch `await` errors
}
}
module.exports = main;

Increase timeout time in React - neo4j app

I am trying to increase the timeout time of my React app. I am using axios, so initially I tried:
axios.post('/gene_info', postData, {timeout: timeoutVal});
It did not work, and there is the respective thread that deals with it:
https://github.com/axios/axios/issues/647
So, I tried the following code:
let CancelToken = axios.CancelToken;
const source = CancelToken.source();
try {
let response = null;
setTimeout(() => {
if (response === null) {
source.cancel();
}
}, 60 * 1500 * 1000);
response = await axios.post('/gene_info', postData, {cancelToken: source.token});
console.log(response);
} catch (error) {
console.log(error);
}
And it is not working either. The request times out and I see the empty response error, even though on the Node.js backend I see that the result is returned correctly. On the backend I am making a very long running request to Neo4j database. I got a suspicion that maybe it timeouts, so I added to neo4j.config file the following lines:
unsupported.dbms.executiontime_limit.enabled=true
unsupported.dbms.executiontime_limit.time=99999999999999s
That I found here:
How to configure a query timeout in Neo4j 3.0.1
and restarted neo4j but it did not help either. Here is what I see in the terminal:
I am not sure what this POST /gene_info - - ms - - means, whether the problem is still on the front end, or the back end, but I have a suspicion that neo4j now times out, but it is still calculating the result which I see using console.log() statements. Any suggestions would be greatly appreciated.
Update
I tried using Reacts fetch, but still not working. Here is the code:
fetchWithTimeout = (url, postData, timeout) => {
let didTimeOut = false;
new Promise(function(resolve, reject) {
const timeout = setTimeout(function() {
didTimeOut = true;
reject(new Error('Request timed out'));
}, timeout);
fetch(url, {
method: 'POST',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
timeout: timeout,
body: JSON.stringify(postData)
})
.then(function(response) {
// Clear the timeout as cleanup
clearTimeout(timeout);
if(!didTimeOut) {
console.log('fetch good! ', response);
resolve(response);
}
})
.catch(function(err) {
console.log('fetch failed! ', err);
// Rejection already happened with setTimeout
if(didTimeOut) return;
// Reject with error
reject(err);
});
})
.then(function() {
// Request success and no timeout
console.log('good promise, no timeout! ');
})
.catch(function(err) {
// Error: response error, request timeout or runtime error
console.log('promise error! ', err);
});
}
Then I am calling this function like that:
let postData = {"jsonData": geneNameArr,
"datasetName": this.props.datasetName};
this.fetchWithTimeout('/gene_info', postData, timeout).then((response) => {
console.log("fetchWithTimeout is done!");
console.log(response);
});
Update
I tried using axios.create() function with no success:
const axiosInstance = axios.create({
baseURL: '/gene_info',
timeout: timeout
});
axiosInstance.post('', postData).then((response) => {
console.log("axios request is done with create() method");
console.log(response);
});
If nothing seems to work on the front end, I would think it is the timeout that comes from the neo4j driver, even though somehow the results are returned. Here is the code I am using for the driver:
router.post('/gene_info', function(req, res) {
...
...
var driver = dbUtils.driver;
const session = driver.session();
session.run(
full_query,
{}
).then(result => {
const exprData = chartService.prepareGeneInfoData(result, '');
res.json({
exprData
});
session.close();
});
})
Or maybe it can also be express.Router(); that I am using for treating get and post requests on the backend with Node.js
If you want to configure your timeout in axios, you can use,
const axiosInstance = axios.create({
baseURL: "http://example.com/api/",
timeout: 5000
});
Replace 5000 with your timeout value needed.
Ultimately I found the solution that worked here:
Node Express specific timeout value per route
And I used the setConnectionTimeout() function in the following way:
router.post('/gene_info', setConnectionTimeout('12h'), function(req, res) {
...
})

Node.js HTTP response streams

Using the native http.get() in Node.js, I'm trying to pipe a HTTP response to a stream that I can bind data and end events to.
I'm currently handling this for gzip data, using:
http.get(url, function(res) {
if (res.headers['content-encoding'] == 'gzip') {
res.pipe(gunzip);
gunzip.on('data', dataCallback);
gunzip.on('end', endCallback);
}
});
Gunzip is a stream and this just works. I've tried to create streams (write streams, then read streams) and pipe the response, but haven't been having much luck. Any suggestions to replicate this same deal, for non-gzipped content?
The response object from a HTTP request is an instance of readable stream. Therefore, you would collect the data with the data event, then use it when the end event fires.
var http = require('http');
var body = '';
http.get(url, function(res) {
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
// all data has been downloaded
});
});
The readable.pipe(dest) would basically do the same thing, if body in the example above were a writable stream.
Nowadays the recommended way of piping is using the pipeline function. It is supposed to protect you from memory leaks.
const { createReadStream} = require('fs');
const { pipeline } = require('stream')
const { createServer, get } = require('http')
const errorHandler = (err) => err && console.log(err.message);
const server = createServer((_, response) => {
pipeline(createReadStream(__filename), response, errorHandler)
response.writeHead(200);
}).listen(8080);
get('http://localhost:8080', (response) => {
pipeline(response, process.stdout, errorHandler);
response.on('close', () => server.close())
});
Another way of doing it that has more control would be to use async iterator
async function handler(response){
let body = ''
for await (const chunk of response) {
let text = chunk.toString()
console.log(text)
body += text
}
console.log(body.length)
server.close()
}
get('http://localhost:8080', (response) => handler(response).catch(console.warn));

Categories