I can't get my head around why this is not working. I have a module which sends a HTTP POST request containing some payload using the native nodejs http module. I am stubbing the request method with sinon and pass the PassThrough stream for the request and response streams.
DummyModule.js
const http = require('http');
module.exports = {
getStuff: function(arg1, arg2, cb) {
let request = http.request({}, function(response) {
let data = '';
response.on('data', function(chunk) {
data += chunk;
});
response.on('end', function() {
// spy should be called here
cb(null, "success");
});
});
request.on('error', function(err) {
cb(err);
});
// payload
request.write(JSON.stringify({some: "data"}));
request.end();
}
};
test_get_stuff.js
const sinon = require('sinon');
const http = require('http');
const PassThrough = require('stream').PassThrough;
describe('DummyModule', function() {
let someModule,
stub;
beforeEach(function() {
someModule = require('./DummyModule');
});
describe('success', function() {
beforeEach(function() {
stub = sinon.stub(http, 'request');
});
afterEach(function() {
http.request.restore()
});
it('should return success as string', function() {
let request = new PassThrough(),
response = new PassThrough(),
callback = sinon.spy();
response.write('success');
response.end();
stub.callsArgWith(1, response).returns(request);
someModule.getStuff('arg1', 'arg2', callback);
sinon.assert.calledWith(callback, null, 'success');
});
});
});
The spy does not get called and the test fails with AssertError: expected spy to be called with arguments. So the response.on('end', ...) does not get called and therefore the test failure. Does the end event on the response stream needs to be triggered somehow?
It's working now. First, the events need to be emitted using the emit method.
Second, the events need to be emitted right after the someModule.getStuff(...) method call:
...
someModule.getStuff('arg1', 'arg2', callback);
response.emit('data', 'success');
response.emit('end');
sinon.assert.calledWith(callback, null, 'success');
...
Related
I'm trying to call an external API in Firebase Functions but i always get a timeout.
What can be the issue causing this?
Here is my code
exports.getCountryData = functions.https.onRequest((request, response) => {
const https = require('https');
const options = {
hostname: "api-football-v1.p.rapidapi.com/v3",
path: '/fixtures?next=5',
headers: {
"x-rapidapi-host": "api-football-v1.p.rapidapi.com/v3",
"x-rapidapi-key": "my-api-key"
}
};
var req = https.get(options, (resp) => {
let data = '';
resp.on('data', (chunk) => { data += chunk; });
resp.on('end', () => {
var result = JSON.parse(data);
console.log("Api fetched successfully");
console.log(result);
response.send({ fulfillmentText: result});
});
}).on("error", (err) => { console.log("Error: " + err.message); });
});
An event-driven function may fail to successfully complete due to errors thrown in the function code itself. Some of the reasons this might happen are as follows:
The function contains a bug and the runtime throws an exception.
The function cannot reach a service endpoint, or times out while
trying to reach the endpoint.
The function intentionally throws an exception (for example, when a
parameter fails validation).
When functions written in Node.js return a rejected promise or pass a
non-null value to a callback.
In any of the above cases, the function stops executing by default and the event is discarded. If you want to retry the function when an error occurs, you can change the default retry policy by setting the "retry on failure" property. This causes the event to be retried repeatedly for up to multiple days until the function successfully completes.
In this question, the service endpointi ‘api-football-v1.p.rapidapi.com/v3’ itself took so much time to load ( not reachable ), that was the issue. Changing the API endpoint to v3.football.api-sports.io and then calling the external API in Firebase Functions solved the issue for our user #tate_xy
It turns out using their Rapid Api url (api-football-v1.p.rapidapi.com/v3) was was resulting in a timeout. Using a direct Api url (v3.football.api-sports.io) with their domain name in it did the trick for me.
Here is my working code.
exports.getCountryData = functions.https.onRequest((request, response) => {
const https = require('https');
const options = {
hostname: "v3.football.api-sports.io",
path: '/fixtures?next=5',
headers: {
"x-rapidapi-host": "v3.football.api-sports.io",
"x-apisports-key": "my-api-key"
}
};
var req = https.get(options, (resp) => {
let data = '';
resp.on('data', (chunk) => { data += chunk; });
resp.on('end', () => {
var result = JSON.parse(data);
console.log("Api fetched successfully");
console.log(result);
response.send({ fulfillmentText: result});
});
}).on("error", (err) => { console.log("Error: " + err.message); });
});
I have this function in my code :
let request = require("request");
let getDrillDownData = function (userId, query, callback) {
query.id = userId;
let urlQuery = buildUrlFromQuery(query);
request.get({
url: urlQuery,
json: true
}, function (error, response, data) {
if (!error && response.statusCode === 200) {
return callback(null, calculateExtraData(data));
} else if (error) {
return callback(error, null);
}
});
};
and I wish to write some unit test which verify that when the function is called with correct parameters, it is running OK,
and if there is an error, it did return the error
I wrote this unit test code :
describe.only('Server Service Unit Test', function(){
var sinon = require('sinon'),
rewire = require('rewire');
var reportService;
var reportData = require('./reportData.json');
beforeEach(function(){
reportService = rewire('../../services/reports.server.service');
});
describe('report methods', function(){
var reportData;
var query = { id: "test"};
var userId = 'testuser';
var getDrillDownData;
var request;
beforeEach(function(){
getDrillDownData = reportService.__get__('getDrillDownData');
});
it ('should get drill down data by userId and query', function(done){
var getCallback = sinon.stub();
request = {
get: sinon.stub().withArgs({
url: query,
json: true
}, getCallback.withArgs("error", {statusCode: 200}, reportData))
};
reportService.__set__('request', request);
getDrillDownData(userId, query, function(err, returnData){
(err === null).should.eql(true);
//(getCallback.withArgs(undefined, {statusCode: 200}, reportData).calledOnce).equal(true);
done();
});
});
});
But I keep getting this error:
Error: timeout of 2000ms exceeded. Ensure the done() callback is being called in this test.
Can someone help?
Thanks
I would stub request.get() directly:
describe('report methods', function() {
// Make `request.get()` a Sinon stub.
beforeEach(function() {
sinon.stub(request, 'get');
});
// Restore the original function.
afterEach(function() {
request.get.restore();
});
it ('should get drill down data by userId and query', function(done) {
// See text.
request.get.yields(null, { statusCode : 200 }, { foo : 'bar' });
// Call your function.
getDrillDownData('userId', {}, function(err, data) {
...your test cases here...
done();
});
});
});
Using request.get.yields() (which calls the first function argument that Sinon can find in the argument list; in this case, it's the (error, response, data) callback that gets passed to request.get() in your function) you can tell Sinon which arguments to use to call the callback.
That way, you can check if the callback to request.get() handles all arguments properly.
You can use .withArgs() too (request.get.withArgs(...).yields(...)), although you have to be sure that you're using it correctly; otherwise, if the exact arguments don't match, Sinon will call the original request.get() instead of using the stubbed version.
Instead, I prefer using stub.calledWith() to check for the correct arguments after the call has been made. That integrates much better with Mocha as well, as you can use explicit assertions.
I am trying to create a helper function that makes an async call which is a part of my data setup for the protactor test. is there a way i can wait for the response of the function and then proceed with the tests, here is what I am trying to do.
so basically the test should wait until the async call lo loaddata() is finished. I have read about use of promises but couldn't get to implement it sucessfully.
"use strict";
describe('sample passing test spec', function() {
describe('sample passing test suite', function() {
loaddata();
it('sample passing test', function () {
datall();
expect("1").toEqual("2");
});
});
});
loaddata() is basically making a socket connection
function loaddata(){
var net = require('net');
var client = new net.Socket();
client.connect(20000, '127.0.0.1', function() {
console.log('Connected');
client.write('Hello, server! Love, Client\n');
});
client.on('data', function(data) {
console.log('Received: ' + data);
client.destroy(); // kill client after server's response
});
client.on('close', function() {
console.log('Connection closed');
});
return "function execution over"
}
You would need to tweak loaddata to return a promise which would Protractor put on the Control Flow - a queue of pending promises to resolve:
function loaddata() {
var deferred = protractor.promise.defer();
var net = require('net');
var client = new net.Socket();
client.connect(20000, '127.0.0.1', function() {
console.log('Connected');
client.write('Hello, server! Love, Client\n');
});
client.on('data', function(data) {
console.log('Received: ' + data);
client.destroy(); // kill client after server's response
deferred.fulfill(true);
});
client.on('close', function() {
console.log('Connection closed');
});
return deferred.promise;
}
If this is something you need to do globally before your test run, put it into onPrepare() and return. If loaddata returns a promise, Protractor would first resolve it and only then run tests:
onPrepare: function () {
return loaddata();
},
By using async module I have something like this and it works perfectly.
But when i try to restructure the code or make it reusable, it finish executing before it finished the HTTP requests. Nodejs do lots of things in asynchronous way so finding a solution to this is bit hard to me.
What I have up to now.
var async = require('async'),
http = require('http');
exports.unitedStates = function(req, res) {
var texas = {
//GET method data here / ex: host, path, headers....
};
var washington = {
//GET method data here / ex: host, path, headers....
};
async.parallel({
getSource: function(callback) {
http.request(texas, function(respond) {
//Http request
}).end();
},
getScreen: function(callback) {
http.request(washington, function(respond) {
//Http request
}).end();
}
},
function(err, results) {
//Return the results
/* REPLY TO THE REQUEST */
res.send( /* data here */ );
});
}
is there a perfect way to make this piece of code to reusable?
Example
exports.unitedStates = function(req, res) {
var tokyo = japan();
//send the result to front end
res.send(tokyo);
}
function japan(){
//async calls comes here and return the value...
return result;
}
Instead of returning value from function, pass a callback.
exports.unitedStates = function (req, res) {
// pass callback here
japan(function (value) {
res.send(value);
});
}
function japan(cb) {
//async call here
cb(result);
}
Using the native http.get() in Node.js, I'm trying to pipe a HTTP response to a stream that I can bind data and end events to.
I'm currently handling this for gzip data, using:
http.get(url, function(res) {
if (res.headers['content-encoding'] == 'gzip') {
res.pipe(gunzip);
gunzip.on('data', dataCallback);
gunzip.on('end', endCallback);
}
});
Gunzip is a stream and this just works. I've tried to create streams (write streams, then read streams) and pipe the response, but haven't been having much luck. Any suggestions to replicate this same deal, for non-gzipped content?
The response object from a HTTP request is an instance of readable stream. Therefore, you would collect the data with the data event, then use it when the end event fires.
var http = require('http');
var body = '';
http.get(url, function(res) {
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
// all data has been downloaded
});
});
The readable.pipe(dest) would basically do the same thing, if body in the example above were a writable stream.
Nowadays the recommended way of piping is using the pipeline function. It is supposed to protect you from memory leaks.
const { createReadStream} = require('fs');
const { pipeline } = require('stream')
const { createServer, get } = require('http')
const errorHandler = (err) => err && console.log(err.message);
const server = createServer((_, response) => {
pipeline(createReadStream(__filename), response, errorHandler)
response.writeHead(200);
}).listen(8080);
get('http://localhost:8080', (response) => {
pipeline(response, process.stdout, errorHandler);
response.on('close', () => server.close())
});
Another way of doing it that has more control would be to use async iterator
async function handler(response){
let body = ''
for await (const chunk of response) {
let text = chunk.toString()
console.log(text)
body += text
}
console.log(body.length)
server.close()
}
get('http://localhost:8080', (response) => handler(response).catch(console.warn));