How to mock a server error in hapi - javascript

I have set up a hapi server as follows:
'use strict'
// node modules
const Hapi = require('hapi')
const Inert = require('inert')
const Path = require('path')
// server config
const server = new Hapi.Server()
const port = 4000
server.connection({
port: port
})
// Hapi plugins
const plugins = [
Inert
]
server.register(plugins, (err) => {
if (err) {
throw err
}
server.route([
{
method: 'GET',
path: '/',
handler: (request, reply) => {
reply.file(Path.join(__dirname, '../front/production/index.html'))
}
},
{
method: 'GET',
path: '/{param*}',
handler: {
directory: {
path: 'front/production'
}
}
}
])
})
module.exports = server
I have written tests for it, but have a problem being able to cover this line:
if (err) {
throw err <----
}
how can I mock a server error in server.register, so far I have tried:
server.inject({method: 'GET', url: '/notanendpoint'}...
server.inject({method: 'NOTMETHOD', url: '/'}...
server.inject({method: 'GET', url: '/', simulate: {error: true}}...
I've been looking through the hapi api docs and cant work out how to cover this line, any help would be greatly appreciated.
Let me know if I can add more description to my question or include what I have tried in my tests (let me know if you want to see more of my test file)

Related

I'm getting a "githubClientID" is not defined when calling the "config" package using the GitHub API

I'm creating a feature that to retrieves GitHub user names via the GitHub API.
When sending the GET request from Postman, I got a server error that reads:
"Configuration property "githubClientId" is not defined"
Despite the fact that I called the config package with githubClientID defined in config/default.json, like this:
{
"mongoURI": "mongodb+srv://massas:oir#socialapp-2dg3r.mongodb.net/test?retryWrites=true&w=majority",
"jwtToken" : "oecret",
"githubClientId:": "ID",
"githubSecret": "SECRET"
}
// change the values for public viewing
Here's the code that's calling the API:
const express = require('express');
const request = require('request');
const config = require('config');
// #route GET api/profile/github/:username
// #desc Get user repos from username
// #acess Private
router.get('/github/:username', (req, res) => {
try {
const options = {
uri: `https://api.github.com/users/${
req.params.username
}/repos?per_page=5&sort=created:asc&client_id=${config.get(
'githubClientId'
)}&client_secret=${config.get('githubSecret')}`,
method: 'GET',
headers: {
'user-agent': 'node.js'
}
};
request(options, (error, response, body) => {
if (error) console.error(error);
if (response.statusCode !== 200) {
return res.status(404).json({
msg: 'No Github profile found'
});
}
res.json(JSON.parse(body));
});
} catch (err) {
console.error(err.message);
res.status(500).send('Server Error');
}
});
How can I resolve this error?
1- You need to change the options object from:
const options = {
uri: `https://api.github.com/users/${
req.params.username
}/repos?per_page=5&sort=created:asc&client_id=${config.get(
'githubClientId'
)}&client_secret=${config.get('githubSecret')}`,
method: 'GET',
headers: {
'user-agent': 'node.js'
}
};
to:
const uri = encodeURI(
`https://api.github.com/users/${req.params.username}/repos?per_page=5&sort=created:asc`
);
const headers = {
'user-agent': 'node.js',
Authorization: `token ${config.get('githubToken')}`
};
const gitHubResponse = await axios.get(uri, { headers });
2- Make sure to add a default.json file in config folder with your GitHub secret access token
{ "githubToken": "<yoursecrectaccesstoken>" }
3- Install axios in the root of the project, if you didn't install it
npm i axios

Next.js:_next/webpack-hmr request 404

This issue's demo repo is https://github.com/hh54188/happy-server/tree/issue-demo
I try to integrate Next.js with Hapi.js as a plugin. Here is my next.js plugin project folder main structure:
--plugins
|--app
|--pages
|--app
|--a.js
|--handlers
|--public
|--dist
|--index.js
|--next.config.js
And here is index.js main content, most for route register
const nextRenderService = next({
dir: path.join(__dirname)
});
module.exports = {
name: "AppService",
version: "0.0.1",
register: async function(server, options) {
await nextRenderService.prepare();
server.route({
method: "GET",
path: `/app/${assetPrefix}/_next/webpack-hmr`,
handler: nextHandlerWrapper(nextRenderService)
});
server.route({
method: "GET",
path: "/app/{param*}",
handler: defaultHandler(nextRenderService)
});
server.route({
method: "GET",
path: `/app/${assetPrefix}/_next/on-demand-entries-ping`,
handler: nextHandlerWrapper(nextRenderService)
});
server.route({
method: "GET",
path: `/app/${assetPrefix}/_next/-/page/{param*}`,
handler: {
directory: {
path: path.join(__dirname, pagesPath),
listing: true
}
}
});
server.route({
method: "GET",
path: `/app/${assetPrefix}/_next/{param*}`,
handler: {
directory: {
path: path.join(__dirname, distPath),
listing: true
}
}
});
}
};
However, when I run the server, and visit http://127.0.0.1:4000/app/a, the page could render success, and most script file could load successful. But the _next/webpack-hmr and the _next/on-demand-entries-ping requests status is 404. And I notice the 404 status is from Next.js, not Hapi.js
So what's wrong with my code ? How can I solve this problem ?
The issue has arisen right after upgrading nextjs 11 > 12.
This helped me:
npm install webpack-dev-server -g
source: https://stackoverflow.com/a/31627310/
The assetPrefix configuration is for using a CDN only and is global to NextJs (documentation). You don't want to set that for something else, like modifying the NextJs router paths. If you don't plan on using a CDN, just ignore this setting.
// in constants/index.js
const assetPrefix = process.env.NODE_ENV === "production"
? "https://cdn.mydomain.com"
: "";
You also do not want to list all NextJs internal routes and use the NextJs request handler to handle all calls:
// index.js
const next = require("next");
const path = require("path");
const nextRenderService = next({
dir: path.join(__dirname),
dev: process.env.NODE_ENV !== "production"
});
const { defaultHandler, nextHandlerWrapper } = require("./hanlders");
module.exports = {
name: "AppService",
version: "0.0.1",
register: async function(server, options) {
// https://github.com/zeit/next.js/blob/master/examples/custom-server-hapi/server.js
await nextRenderService.prepare();
// handle NextJs application requests
const handler = nextRenderService.getRequestHandler();
server.route({
method: "GET",
path: "/app/{p*}",
handler: async ({ raw, url }, h) => {
await handler(raw.req, raw.res, url);
return h.close;
}
});
// handle NextJs private routes
server.route({
method: "GET",
path: "/_next/{p*}" /* next specific routes */,
handler: nextHandlerWrapper(nextRenderService)
});
// Other routes
server.route({
method: "GET",
path: "/{p*}" /* catch all route */,
handler: defaultHandler(nextRenderService)
});
}
};

Seneca-web timeout configuration

First of all I would like to say that I am new in senecajs.
I am testing this configuration.
I have configured Senecjs microservice running on port 9007, which is running and handling request correctly. When I request this service directly I receive response after cca 10s (it is request for oracle db data).
But when I request for same data but through the Hapi + Seneca-web I receive this error: "statusCode":504,"error":"Gateway Time-out"
["client","invalid_origin",{"port":9007,"pin":"mc:bankgtw","pg":"mc:bankgtw","type":"web","id":"pg:mc:bankgtw,pin:mc:bankgtw,port:9007","role":"transport","hook":"client","plugin$":{"name":"client$"},"fatal$":true,"meta$":{"mi":"wbn8u45tb7uh","tx":"o3f8eyia3f4n","id":"wbn8u45tb7uh/o3f8eyia3f4n","pattern":"hook:client,role:transport,type:web","action":"(q1yytemztu3k)","plugin_name":"transport","plugin_tag":"-","prior":{"chain":[],"entry":true,"depth":0},"start":1487199713842,"sync":true},"tx$":"o3f8eyia3f4n","host":"0.0.0.0","path":"/act","protocol":"http","timeout":5555,"max_listen_attempts":11,"attempt_delay":222,"serverOptions":{}},{"kind":"res","res":null,"error":{"isBoom":true,"isServer":true,"output":{"statusCode":504,"payload":{**"statusCode":504,"error":"Gateway Time-out**","message":"Client request timeout"},"headers":{}}},"sync":true,"time":{"client_recv":1487199799177}}]
A few seconds before microservice return data.
And this is my configuration:
const Hapi = require('hapi');
const Seneca = require('seneca');
const SenecaWeb = require('seneca-web');
const config = {
adapter: require('seneca-web-adapter-hapi'),
context: (() => {
const server = new Hapi.Server();
server.connection({
port: 3001,
routes: {
cors: true,
payload:{timeout:60000},
timeout:{server: 60000, socket:90000}
}
});
server.route({
path: '/routes',
method: 'get',
handler: (request, reply) => {
const routes = server.table()[0].table.map(route => {
return {
path: route.path,
method: route.method.toUpperCase(),
description: route.settings.description,
tags: route.settings.tags,
vhost: route.settings.vhost,
cors: route.settings.cors,
jsonp: route.settings.jsonp,
server: server.info
}
})
reply(routes)
}
});
return server;
})()
};
const seneca = Seneca({timeout: 99999})
.use(SenecaWeb, config)
.use(require('./hapi_api.js'))
.client({ port:9007, pin:'mc:bankgtw' })
.ready(() => {
const server = seneca.export('web/context')();
server.start(() => {
server.log('server started on: ' + server.info.uri);
});
});
What I am doing wrong or what timeout is causing this?
I've had the same issue, fixed it, but its VERY BAD PRACTICE.
Go to 'transport.js' at seneca-transport folder.
You will see 'timeout: 5555'
Go ahead and change that to whatever you need.
I'm not sure why this is not getting USER defaults.
To the best of my knowledge, this is referring to client timeout. make sure you still use server timeout.

node.js hapi.js socket memory leak on server AWS

I have a stack of servers on AWS, and it seems to be leaking memory. A heapdump is showing that I have ever increasing Sockets in an Array, with the peer of those Sockets to be the ELBs that sit in front of the server instances.
The ELBs ping with a health check every 6 seconds.
How do I close these sockets if I am using hapi.js? How do I fix this memory leak?
const config = require('./config');
const Hapi = require('hapi');
const Path = require('path');
const Fs = require('fs');
server = new Hapi.Server({
debug: { request: ['error'] },
connections: {
routes: {
files: {
relativeTo: Path.join(__dirname, 'public')
}
}
}
})
var connectionDict = {
port: config.port,
host: config.host}
server.connection(connectionDict);
module.exports = server;
server.start(function () {
setImmediate(function(){
server.log([CONTROLLER_NAME, "server-start"], 'Server started at: ' + server.info.uri);
});
});
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
return reply('Welcome to your API!\n').header("Connection","Close");
}
});
setInterval(function (){
global.gc();
}, 60000);
One interesting tidbit is that this server has virtually no load - the only thing hitting the servers are the ELBs.
The sockets seen above are essentially repeats of the same peer connection. I'm not sure why the server is not reusing the existing socket and creating new ones.
The .header on the reply also seems to be doing nothing. Sockets will leak whether or not "Connection:close" is on the return header.
Unfortunately, the global.gc() also doesn't clean up the sockets.
EDIT
Not that it matters, but I am using a t2.micro instance.
Simpler code, still leaking:
const Hapi = require('hapi');
const Fs = require('fs');
server = new Hapi.Server();
var connectionDict = {
port: 8443,
host: '0.0.0.0',
tls: {
key: Fs.readFileSync('./cert/https/projectchange.pem'),
cert: Fs.readFileSync('./cert/https/projectchange.cert'),
passphrase: 'somepassword'
}
}
server.connection(connectionDict);
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
return reply('Welcome to Change API!\n').header("Connection","Close");
}
});
server.start(function (err) {
console.log('Server started');
});
require('heapdump');
Fs.readdirSync('.').map(function (filename) {
if (filename.match(/^heapdump-/)) {
console.log(filename);
Fs.unlinkSync(filename);
}
});
setInterval(function heapDumper() {
process.kill(process.pid, 'SIGUSR2');
}, 60000);

Hapi.js Proxy - change the response before reply

I'm trying to test Hapi.js for a new project I'm working on
My goal is to trigger a request on my Hapi server and this request will trigger another request on a remote server using Hapi proxy, and the response will be only specific properties from the remote response
To test this a request to /api/type3/name must be made which will invoke an API call to GitHub's API to get the user object
Code:
server.route({
method: 'GET',
path: '/api/type3/{name}',
handler: {
proxy: {
mapUri: function(request,callback){
callback(null,'https://api.github.com/users/'+request.params.name);
},
onResponse: function(err, res, request, reply, settings, ttl){
console.log(res);
reply(res);
},
passThrough: true,
xforward: true
}
}
});
The response from the above code is the response object from GitHub
Now I need to save this response so I can manipulate it and return only what I need
But when I debug the response object it contains a lot of data and I can't find the response payload inside of it
So how to extract only the response from GitHub from the response object ?
Thanks a lot
The res object is an http.IncomingMessage. If you want to work with the body of the response you need to read all the data off it first.
You have 2 choices for doing this.
1) Vanilla Node
onResponse: function(err, res, request, reply, settings, ttl){
var body = '';
res.on('data', function (chunk) {
body += chunk;
});
res.on('end', function () {
console.log(body);
reply(body);
});
}
2) Using Wreck (a module for working with HTTP in Node made by the Hapi folks)
var Wreck = require('wreck');
onResponse: function(err, res, request, reply, settings, ttl){
Wreck.read(res, null, function (err, payload) {
console.log(payload);
reply(payload);
});
}
In the wreck case above, you could do
payload = payload.toString();
To convert the buffer to actual data.
I want to load xml data from remote server and convert the response to json.
This thread helped me lot to find a solution that worked for me. But in my case the code above doesn't worked too, because the response from the remote server was gzipped. In my console there were only unreadable data.
I tried to enable automatic payload parsing for the proxy but it was not successful. Finally i had to unzip the response by myself with 'zlib'.
This code is working for me:
'use strict'
const url = require('url')
const h2o2 = require('h2o2')
const Hapi = require('hapi')
const parseString = require('xml2js').parseString
var zlib = require('zlib')
const _ = require('lodash')
var remoteServerUrl = 'http://some-url.com:2434'
var originUrl = 'http://localhost:3000'
// Create a server with a host and port
const server = new Hapi.Server()
server.connection({
host: 'localhost',
port: 3001,
routes: {
cors: {
origin: [originUrl],
},
},
})
server.register({
register: h2o2,
}, function (err) {
server.start(function (err) {
console.log('Proxy Server started at: ' + server.info.uri)
})
})
server.route({
method: '*',
path: '/proxy/{path*}',
config: {
payload: {
// parse: true, // didn't worked for me
// parse: 'gzip', // didn't worked for me
output: 'stream',
},
},
handler: {
proxy: {
passThrough: true,
timeout: 15000,
acceptEncoding: false,
mapUri (request, callback) {
callback(null, url.format({
protocol: url.parse(remoteServerUrl).protocol,
host: url.parse(remoteServerUrl).host,
port: url.parse(remoteServerUrl).port,
pathname: request.params.path,
query: request.query,
}))
},
onResponse (err, res, request, reply, settings, ttl) {
if (err) {
console.error('Hapi server error: ', err)
}
// let's unzip the response
var gunzip = zlib.createGunzip()
var xmlStr = ''
gunzip.on('data', function (data) {
xmlStr += data.toString()
})
gunzip.on('end', function () {
// do something with the string
// in this case convert xml to json string
parseString(xmlStr, {}, function (err, result) {
// send result back
reply(result)
})
})
res.pipe(gunzip)
},
},
},
})
Maybe this helps someone else ;)

Categories