Seneca-web timeout configuration - javascript

First of all I would like to say that I am new in senecajs.
I am testing this configuration.
I have configured Senecjs microservice running on port 9007, which is running and handling request correctly. When I request this service directly I receive response after cca 10s (it is request for oracle db data).
But when I request for same data but through the Hapi + Seneca-web I receive this error: "statusCode":504,"error":"Gateway Time-out"
["client","invalid_origin",{"port":9007,"pin":"mc:bankgtw","pg":"mc:bankgtw","type":"web","id":"pg:mc:bankgtw,pin:mc:bankgtw,port:9007","role":"transport","hook":"client","plugin$":{"name":"client$"},"fatal$":true,"meta$":{"mi":"wbn8u45tb7uh","tx":"o3f8eyia3f4n","id":"wbn8u45tb7uh/o3f8eyia3f4n","pattern":"hook:client,role:transport,type:web","action":"(q1yytemztu3k)","plugin_name":"transport","plugin_tag":"-","prior":{"chain":[],"entry":true,"depth":0},"start":1487199713842,"sync":true},"tx$":"o3f8eyia3f4n","host":"0.0.0.0","path":"/act","protocol":"http","timeout":5555,"max_listen_attempts":11,"attempt_delay":222,"serverOptions":{}},{"kind":"res","res":null,"error":{"isBoom":true,"isServer":true,"output":{"statusCode":504,"payload":{**"statusCode":504,"error":"Gateway Time-out**","message":"Client request timeout"},"headers":{}}},"sync":true,"time":{"client_recv":1487199799177}}]
A few seconds before microservice return data.
And this is my configuration:
const Hapi = require('hapi');
const Seneca = require('seneca');
const SenecaWeb = require('seneca-web');
const config = {
adapter: require('seneca-web-adapter-hapi'),
context: (() => {
const server = new Hapi.Server();
server.connection({
port: 3001,
routes: {
cors: true,
payload:{timeout:60000},
timeout:{server: 60000, socket:90000}
}
});
server.route({
path: '/routes',
method: 'get',
handler: (request, reply) => {
const routes = server.table()[0].table.map(route => {
return {
path: route.path,
method: route.method.toUpperCase(),
description: route.settings.description,
tags: route.settings.tags,
vhost: route.settings.vhost,
cors: route.settings.cors,
jsonp: route.settings.jsonp,
server: server.info
}
})
reply(routes)
}
});
return server;
})()
};
const seneca = Seneca({timeout: 99999})
.use(SenecaWeb, config)
.use(require('./hapi_api.js'))
.client({ port:9007, pin:'mc:bankgtw' })
.ready(() => {
const server = seneca.export('web/context')();
server.start(() => {
server.log('server started on: ' + server.info.uri);
});
});
What I am doing wrong or what timeout is causing this?

I've had the same issue, fixed it, but its VERY BAD PRACTICE.
Go to 'transport.js' at seneca-transport folder.
You will see 'timeout: 5555'
Go ahead and change that to whatever you need.
I'm not sure why this is not getting USER defaults.
To the best of my knowledge, this is referring to client timeout. make sure you still use server timeout.

Related

Why is the fetch function saying, that I have to use absolute urls, even if I have set a proxy?

At the moment I am coding a Shopify application. I want to fetch all the products from my store in server.js but every time it outputs a message, that says that only absolute urls are supported. A registered Webhook should get all the products inside my shop.
Error: only absolute urls are supported
Here is my javascript (server.js)
const { default: proxy } = require('#shopify/koa-shopify-graphql-proxy');
const { ApiVersion } = require('#shopify/koa-shopify-graphql-proxy');
app.prepare().then(() => {
const server = new Koa();
const router = new Router();
server.use(session(server));
server.keys = [/** Shopify Keys */];
server.use(
createShopifyAuth({
/**
* Webhook
*/
}),
);
const webhook = receiveWebhook({ secret: SHOPIFY_API_SECRET_KEY });
router.post('/webhooks/products/create', webhook, async (ctx) => {
await fetch('/graphql', {
credentials: 'include',
body: allProducts
})
.then((data) => {
console.log(data)
})
.catch((err) => {
console.log(err)
})
console.log('received Webhook: ', ctx.state.webhook);
})
server.use(router.allowedMethods());
server.use(router.routes());
console.log(proxy({ version: ApiVersion.Unstable }))
server.use(proxy({ version: ApiVersion.Unstable }))
server.listen(port, () => {
console.log(`> Ready on localhost:${port}`)
})
})
I was using the example from the npm package shopify koa proxy link here
How can I send http request with the proxy I am using?
The issue is exactly what the error says, fetch requires absolute urls.
Whether you have a proxy or not is really irrelevant to the fetch api, it doesn't know about that.
Just give it an absolute URL

Socket IO namespace not working with Express

I have tried setting up a namespace on the backend,
const server = require("http").createServer(app);
const connectedUsers = {};
const io = require("socket.io")(server, {
path: "/socket",
serveClient: false,
// below are engine.IO options
pingInterval: 10000,
pingTimeout: 5000,
cookie: false
});
const singularConnection = io.of("/singular-socket");
singularConnection.on("connection", socket => {
console.log("unique user connected with socket ID " + socket);
}
And on my client, I try connecting with,
const socket = io(GATEWAY, {
path: "/socket/singular-socket",
transports: ["websocket"],
jsonp: false
});
socket.connect();
socket.on("connect", () => {
console.log("connected to socket server");
});
I've tried different variation of the URL, getting rid of /socket and moving other stuff around, but I can't seem to get it working. What am I doing wrong here?
I don't have any experience in using socket.io, but from the docs...
To connect to a namespace, the client code would look like.
const socket = io('http://localhost/admin', {
path: '/mypath'
});
Here, the socket connects to the admin namespace, with the custom path
mypath.
The request URLs will look like:
localhost/mypath/?EIO=3&transport=polling&sid= (the namespace is
sent as part of the payload).
Following the above lines, your code should look like..
const socket = io("http://localhost/singular-socket", {
path: "/socket",
transports: ["websocket"],
jsonp: false
})
Where /singular-socket is the namespace and /socket is the path.
Try this repl

Hapi.js - adding mechanism to check every route

I am trying to implement a mechanism that will be run before any route is hit. In that mechanism I want to take a value from the header and check for authentication.
I have come up with this:
server.js:
// Create a server with a host and port
'use strict';
var Hapi = require('hapi');
var mongojs = require('mongojs');
var plugins = [
require('./routes/entities')
];
var server = new Hapi.Server();
server.connection({
port: 3000
});
//Connect to db
server.app.db = mongojs('hapi-rest-mongo', ['entities']);
server.app.checkHeader = function (request) {
var header = request.headers['x-authorization'];
if(header === "letmein"){
return true
}
return false
};
//Load plugins and start server
server.register(plugins, function (err) {
if (err) {
throw err;
}
// Start the server
server.start(function (err) {
console.log('Server running at:', server.info.uri);
});
});
and in routes.entities:
'use strict';
var Boom = require('boom');
var uuid = require('node-uuid');
var Joi = require('joi');
exports.register = function (server, options, next) {
var db = server.app.db;
server.route({
method: 'GET',
path: '/entities',
handler: function handler(request, reply) {
if(!server.app.checkHeader(request))
{
return reply(Boom.unauthorized());
};
//request.server.myFunc();
db.entities.find(function (err, docs) {
if (err) {
return reply(Boom.wrap(err, 'Internal MongoDB error'));
}
reply(docs);
});
}
});
So in short while starting the server I have registered my function server.app.checkHeader
And in the routes I am calling it and sending a request object to it. Request object contains information about the headers.
While this works, I am having a feeling I am not following the best practices with the Hapi.
How could I do it more elegantly?
There are a few options.
You can, of course, tap into the request lifecycle - note the events that occur in the pipeline prior to the route handler.
Although, I'd urge you to consider implementing an auth strategy that can be set as the default for all routes or selectively on appropriate routes.
The best way to require authentication for all or selected route is to use hapi’s integrated functionality.
You should set a default authentication strategy that is applied to each route handler. The sample below uses basic auth. You’d want to create a custom authentication strategy for hapi to check your x-authentication header.
const Hapi = require('hapi')
const BasicAuth = require('hapi-auth-basic')
const server = new Hapi.Server()
server.register(BasicAuth, function (err) {
if (err) {
console.log('error', 'failed to install plugins')
throw err
}
// TODO: add authentication strategy & set as default
server.auth.strategy('simple', 'basic', true, { validateFunc: basicValidationFn })
// or set strategy separately as default auth strategy
server.auth.strategy('simple', 'basic', { validateFunc: basicValidationFn })
server.auth.default('simple')
// TODO: add routes
server.start(function (err) {
})
})
You can also inject hapi’s request lifecycle and extend it at given points. Extending the request lifecycle should be done by using plugins:
register: function (server, options, next) {
// do some processing before 'onPreAuth'
// or pick another extension point
server.ext('onPreAuth', (request, reply) => {
// your functionality
})
}
Hope that helps!

node.js hapi.js socket memory leak on server AWS

I have a stack of servers on AWS, and it seems to be leaking memory. A heapdump is showing that I have ever increasing Sockets in an Array, with the peer of those Sockets to be the ELBs that sit in front of the server instances.
The ELBs ping with a health check every 6 seconds.
How do I close these sockets if I am using hapi.js? How do I fix this memory leak?
const config = require('./config');
const Hapi = require('hapi');
const Path = require('path');
const Fs = require('fs');
server = new Hapi.Server({
debug: { request: ['error'] },
connections: {
routes: {
files: {
relativeTo: Path.join(__dirname, 'public')
}
}
}
})
var connectionDict = {
port: config.port,
host: config.host}
server.connection(connectionDict);
module.exports = server;
server.start(function () {
setImmediate(function(){
server.log([CONTROLLER_NAME, "server-start"], 'Server started at: ' + server.info.uri);
});
});
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
return reply('Welcome to your API!\n').header("Connection","Close");
}
});
setInterval(function (){
global.gc();
}, 60000);
One interesting tidbit is that this server has virtually no load - the only thing hitting the servers are the ELBs.
The sockets seen above are essentially repeats of the same peer connection. I'm not sure why the server is not reusing the existing socket and creating new ones.
The .header on the reply also seems to be doing nothing. Sockets will leak whether or not "Connection:close" is on the return header.
Unfortunately, the global.gc() also doesn't clean up the sockets.
EDIT
Not that it matters, but I am using a t2.micro instance.
Simpler code, still leaking:
const Hapi = require('hapi');
const Fs = require('fs');
server = new Hapi.Server();
var connectionDict = {
port: 8443,
host: '0.0.0.0',
tls: {
key: Fs.readFileSync('./cert/https/projectchange.pem'),
cert: Fs.readFileSync('./cert/https/projectchange.cert'),
passphrase: 'somepassword'
}
}
server.connection(connectionDict);
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
return reply('Welcome to Change API!\n').header("Connection","Close");
}
});
server.start(function (err) {
console.log('Server started');
});
require('heapdump');
Fs.readdirSync('.').map(function (filename) {
if (filename.match(/^heapdump-/)) {
console.log(filename);
Fs.unlinkSync(filename);
}
});
setInterval(function heapDumper() {
process.kill(process.pid, 'SIGUSR2');
}, 60000);

Hapi.js Proxy - change the response before reply

I'm trying to test Hapi.js for a new project I'm working on
My goal is to trigger a request on my Hapi server and this request will trigger another request on a remote server using Hapi proxy, and the response will be only specific properties from the remote response
To test this a request to /api/type3/name must be made which will invoke an API call to GitHub's API to get the user object
Code:
server.route({
method: 'GET',
path: '/api/type3/{name}',
handler: {
proxy: {
mapUri: function(request,callback){
callback(null,'https://api.github.com/users/'+request.params.name);
},
onResponse: function(err, res, request, reply, settings, ttl){
console.log(res);
reply(res);
},
passThrough: true,
xforward: true
}
}
});
The response from the above code is the response object from GitHub
Now I need to save this response so I can manipulate it and return only what I need
But when I debug the response object it contains a lot of data and I can't find the response payload inside of it
So how to extract only the response from GitHub from the response object ?
Thanks a lot
The res object is an http.IncomingMessage. If you want to work with the body of the response you need to read all the data off it first.
You have 2 choices for doing this.
1) Vanilla Node
onResponse: function(err, res, request, reply, settings, ttl){
var body = '';
res.on('data', function (chunk) {
body += chunk;
});
res.on('end', function () {
console.log(body);
reply(body);
});
}
2) Using Wreck (a module for working with HTTP in Node made by the Hapi folks)
var Wreck = require('wreck');
onResponse: function(err, res, request, reply, settings, ttl){
Wreck.read(res, null, function (err, payload) {
console.log(payload);
reply(payload);
});
}
In the wreck case above, you could do
payload = payload.toString();
To convert the buffer to actual data.
I want to load xml data from remote server and convert the response to json.
This thread helped me lot to find a solution that worked for me. But in my case the code above doesn't worked too, because the response from the remote server was gzipped. In my console there were only unreadable data.
I tried to enable automatic payload parsing for the proxy but it was not successful. Finally i had to unzip the response by myself with 'zlib'.
This code is working for me:
'use strict'
const url = require('url')
const h2o2 = require('h2o2')
const Hapi = require('hapi')
const parseString = require('xml2js').parseString
var zlib = require('zlib')
const _ = require('lodash')
var remoteServerUrl = 'http://some-url.com:2434'
var originUrl = 'http://localhost:3000'
// Create a server with a host and port
const server = new Hapi.Server()
server.connection({
host: 'localhost',
port: 3001,
routes: {
cors: {
origin: [originUrl],
},
},
})
server.register({
register: h2o2,
}, function (err) {
server.start(function (err) {
console.log('Proxy Server started at: ' + server.info.uri)
})
})
server.route({
method: '*',
path: '/proxy/{path*}',
config: {
payload: {
// parse: true, // didn't worked for me
// parse: 'gzip', // didn't worked for me
output: 'stream',
},
},
handler: {
proxy: {
passThrough: true,
timeout: 15000,
acceptEncoding: false,
mapUri (request, callback) {
callback(null, url.format({
protocol: url.parse(remoteServerUrl).protocol,
host: url.parse(remoteServerUrl).host,
port: url.parse(remoteServerUrl).port,
pathname: request.params.path,
query: request.query,
}))
},
onResponse (err, res, request, reply, settings, ttl) {
if (err) {
console.error('Hapi server error: ', err)
}
// let's unzip the response
var gunzip = zlib.createGunzip()
var xmlStr = ''
gunzip.on('data', function (data) {
xmlStr += data.toString()
})
gunzip.on('end', function () {
// do something with the string
// in this case convert xml to json string
parseString(xmlStr, {}, function (err, result) {
// send result back
reply(result)
})
})
res.pipe(gunzip)
},
},
},
})
Maybe this helps someone else ;)

Categories