Unable to receive upstream GCM messages with Node.js + XMPP - javascript

I'm new to node.js and XMPP but not Javascript or GCM. I'm unable to receive upstream messages using node-xmpp and none of the callbacks are called, not even error. I've looked through the other SO threads but none of the solutions have worked. Here is my entire route:
var express = require('express');
var router = express.Router();
var xmpp = require('node-xmpp');
router.get('/', function(req, res, next) {
var options = {
type: 'client',
jid: 'project-12345#gcm.googleapis.com',
password: 'apiKey12345',
port: 5235,
host: 'gcm.googleapis.com',
legacySSL: true,
preferredSaslMechanism : 'PLAIN'
};
// this prints correctly
console.log('Creating xmpp app');
var cl = new xmpp.Client(options);
cl.connection.socket.setTimeout(0);
cl.connection.socket.setKeepAlive(true, 10000);
// None of these callbacks are called
cl.on('online', function() {
console.log('online');
});
cl.on('connection', function() {
console.log('online');
});
cl.on('authenticate', function(opts, cb) {
console.log('authenticated');
});
cl.on('error',function(e) {
console.error(e);
});
cl.on('stanza', function(stanza) {
console.log(stanza);
});
res.render('index', { title: 'GCM upstream test' });
});
module.exports = router;
Thanks

OP here: The issue was because the route was terminating the XMPP action upon reaching res.render. Upon removing the XMPP code from the route, I get an XMPP authentication failure, which is most likely due to an incorrect jid/password. The project requirements have changed and I no longer need upstream messaging, so I will not attempt to fix the auth failure.
Thanks for the responses

Related

I am trying to connection my HTML frontend to node js backend using sockets

I am trying to connect my html frontend to handle the socket.on event on my frontend. My nodejs server is running on 5858 port. And I am trying to run the socketio on same port but its not connecting. my nodejs code is below
const express = require('express');
const app = express();
const connectdb = require('./db/connect.js');
const port = process.env.PORT || 5858
const DATABASE_URL = "mongodb://localhost:27017";
const server = require('http').createServer(app);
const io = require('socket.io')(server , {
cors: {
origin: false,
withCredentials: false
}
}).listen(server);
//connection to database
connectdb(DATABASE_URL);
// apply to all requests
app.use(cors())
app.use(function (req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
// app.use(express.limit('50M'));
app.use(bodyParser.json()) //Body Parser MiddleWare
app.use(express.json())
io.on('connection', (socket) => {
console.log("new connection")
socket.on('otpnumberevent', (data) => {
console.log(data);
let invid = data.id
invoicesModel.findOneAndUpdate(
invid,
data,
{
"fields" : {"otpverified" : 1},
"new": true
}
).exec((err , doc) =>{
console.log(doc)
if (err) {
let response = {
message: "Failed",
doc: err
}
io.emit('messageSent', response)
}
else {
let response = {
message: "Success",
doc: doc
}
io.emit('getotpresponse', response)
}
});
});
});
app.get('/' , (req , res) => {
res.send("Welcome to Dma backend");
});
server.listen(port, () => {
console.log('Server listening at port %d', port);
});
and from front end I am using the html code with .php extension so I am using the Online socket CDN to connect with socket my frontend code is here.
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.5.4/socket.io.js" integrity="sha512-YeeA/Qxn5hYdkukScTCNNOhTrv1C2RubAGButJ1rmgQwZf/HdRaCGl+JAVkqsqaNRaYNHdheiuKKuPf9mDcqKg==" crossorigin="anonymous" referrerpolicy="no-referrer"></script>
var socket = io('http://localhost:5858', { transports: ['websocket', 'polling', 'flashsocket'] }); //socket connection code, my nodejs server is running on 5858 port
socket.on('getotpresponse' , (data) =>{
console.log(data);
if(data.doc.otpverified == "Yes"){
swal({
title: "Success!",
text: "Otp verification Successfull",
icon: "success",
button: "Okay!",
}).then(function() {
location.reload(true);
});
}else{
swal({
title: "Failed!",
text: "OTP verification failed!",
icon: "error",
button: "Okay!",
});
}
})
})
});
and whenever I try to connect from front end I get this error in my console screen.
I have tried everything. Added the CORS in the socket connection which can be seen in the above code. Still getting the same issue
Change cors -> origin to true
const io = require('socket.io')(server , {
cors: {
origin: true,
withCredentials: false
}
}).listen(server);
If it doesn't fix follow the following:
Check if the socket is connected to the same port as the nodejs server.
If the socket is connected to the same port as the node server, check if the CORS is enabled on the socket connection.
If the CORS is enabled, check if the headers are set correctly, such as origin, X-Requested-With, Content-Type, and Accept.
If the headers are set correctly, check if the socket is. The event is correctly configured on the front end.
If the socket. If it is correctly configured, could you please check if the invoice model.findOneAndUpdate function is correctly configured on the backend?
If the invoice model.findOneAndUpdate function is correctly configured, then check if the io. Emit function is configured on the backend.
If the io. Emit function is correctly configured, then check if the socket is. The event is correctly configured on the front end.
If the socket. If the event is correctly configured, could you please check if the swap function is configured on the front end?
Could you check the location of the swap function is correctly configured? Reload function is correctly configured on the front end.
If the location. Reload function is correctly configured, and then the issue should be resolved.

Getting cannot POST / error in Express

I have a RESTful API that I am using postman to make a call to my route /websites. Whenever I make the call, postman says "Cannot POST /websites". I am trying to implement a job queue and I'm using Express, Kue(Redis) and MongoDB.
Here is my routes file:
'use strict';
module.exports = function(app) {
// Create a new website
const websites = require('./controllers/website.controller.js');
app.post('/websites', function(req, res) {
const content = req.body;
websites.create(content, (err) => {
if (err) {
return res.json({
error: err,
success: false,
message: 'Could not create content',
});
} else {
return res.json({
error: null,
success: true,
message: 'Created a website!', content
});
}
})
});
}
Here is the server file:
const express = require('express');
const bodyParser = require('body-parser');
const kue = require('kue');
const websites = require('./app/routes/website.routes.js')
kue.app.listen(3000);
var app = express();
const redis = require('redis');
const client = redis.createClient();
client.on('connect', () =>{
console.log('Redis connection established');
})
app.use('/websites', websites);
I've never used Express and I have no idea what is going on here. Any amount of help would be great!!
Thank you!
The problem is how you are using the app.use and the app.post. You have.
app.use('/websites', websites);
And inside websites you have:
app.post('/websites', function....
So to reach that code you need to make a post to localhost:3000/websites/websites. What you need to do is simply remove the /websites from your routes.
//to reach here post to localhost:3000/websites
app.post('/' , function(req, res) {
});

Socket.io only successfully connects if window opens before server starts

I've been trying to resolve a really strange Socket.io bug.
If I open the page on the client while the server is running, it will fail to connect with the message:
universalModuleDefinition:3 WebSocket connection to
'ws://localhost:4000/socket.io/?EIO=3&transport=websocket&sid=f6LwPIDZubiPKE-TAAAA'
failed: Connection closed before receiving a handshake response
If I then restart the server, while leaving the page open, it connects without issue.
app.js
const app = express();
const server = require('http').Server(app);
require('./socket')(server);
// More code here
server.listen(app.get('port'))
socket.js
const io = require('socket.io');
const jackrabbit = require(`jackrabbit`);
const rabbit = jackrabbit(process.env.RABBIT_URI);
const exchange = rabbit.default();
function Socket (app) {
this.io = io(app);
this.io.on('connection', socket => {
socket.emit('sync');
socket.on('room', room => {
socket.join(room);
});
})
this.queue = exchange.queue({ name: 'worker.socket' });
this.queue.consume(this.onMessage.bind(this), { noAck: true });
}
Socket.prototype.onMessage = function (message) {
this.io.to(message.report).emit('photo', message.photo);
}
module.exports = function (app) {
return new Socket(app);
}
client
var socket = io.connect();
socket.on('connect', function () {
// This gets triggered every time (after the error above)
console.log('Connected');
// This is never logged by the server
socket.emit('room', value); // value set by template engine
});
socket.on('sync', function(){
// will not execute first time I connect, but if I restart
// the server, it runs no problem
alert('Synced with server');
})
socket.on('photo', function(data) {
// also will not be run the first time, but works if the
// server is restarted when the page is open
})
Edit:
I've tried rewriting it to
Initialise socket.io within app.js, then pass it to the socket controller
Run server.listen before requiring socket.js
Initialising the client after a timeout
Setting the transport method on the client strictly to websocket
None of these methods have worked
Found the solution to my problem (actually not an issue with any of the code I posted). I was using the compression middleware for Express, which appears to break socket.io. Solution was to add the following:
app.use((req, res, next) => {
// Disable compression for socket.io
if (req.originalUrl.indexOf('socket.io') > -1) {
return next();
}
compression()(req, res, next);
});

Hapi.js - adding mechanism to check every route

I am trying to implement a mechanism that will be run before any route is hit. In that mechanism I want to take a value from the header and check for authentication.
I have come up with this:
server.js:
// Create a server with a host and port
'use strict';
var Hapi = require('hapi');
var mongojs = require('mongojs');
var plugins = [
require('./routes/entities')
];
var server = new Hapi.Server();
server.connection({
port: 3000
});
//Connect to db
server.app.db = mongojs('hapi-rest-mongo', ['entities']);
server.app.checkHeader = function (request) {
var header = request.headers['x-authorization'];
if(header === "letmein"){
return true
}
return false
};
//Load plugins and start server
server.register(plugins, function (err) {
if (err) {
throw err;
}
// Start the server
server.start(function (err) {
console.log('Server running at:', server.info.uri);
});
});
and in routes.entities:
'use strict';
var Boom = require('boom');
var uuid = require('node-uuid');
var Joi = require('joi');
exports.register = function (server, options, next) {
var db = server.app.db;
server.route({
method: 'GET',
path: '/entities',
handler: function handler(request, reply) {
if(!server.app.checkHeader(request))
{
return reply(Boom.unauthorized());
};
//request.server.myFunc();
db.entities.find(function (err, docs) {
if (err) {
return reply(Boom.wrap(err, 'Internal MongoDB error'));
}
reply(docs);
});
}
});
So in short while starting the server I have registered my function server.app.checkHeader
And in the routes I am calling it and sending a request object to it. Request object contains information about the headers.
While this works, I am having a feeling I am not following the best practices with the Hapi.
How could I do it more elegantly?
There are a few options.
You can, of course, tap into the request lifecycle - note the events that occur in the pipeline prior to the route handler.
Although, I'd urge you to consider implementing an auth strategy that can be set as the default for all routes or selectively on appropriate routes.
The best way to require authentication for all or selected route is to use hapi’s integrated functionality.
You should set a default authentication strategy that is applied to each route handler. The sample below uses basic auth. You’d want to create a custom authentication strategy for hapi to check your x-authentication header.
const Hapi = require('hapi')
const BasicAuth = require('hapi-auth-basic')
const server = new Hapi.Server()
server.register(BasicAuth, function (err) {
if (err) {
console.log('error', 'failed to install plugins')
throw err
}
// TODO: add authentication strategy & set as default
server.auth.strategy('simple', 'basic', true, { validateFunc: basicValidationFn })
// or set strategy separately as default auth strategy
server.auth.strategy('simple', 'basic', { validateFunc: basicValidationFn })
server.auth.default('simple')
// TODO: add routes
server.start(function (err) {
})
})
You can also inject hapi’s request lifecycle and extend it at given points. Extending the request lifecycle should be done by using plugins:
register: function (server, options, next) {
// do some processing before 'onPreAuth'
// or pick another extension point
server.ext('onPreAuth', (request, reply) => {
// your functionality
})
}
Hope that helps!

Centralized error handling for Resitify

I'm having trouble getting centralized error handling set up in my restify app. I'd like to trap certain Mongo errors, such as "E11000 duplicate key error" and then map them to a restify ConflictError.
If I just let the error bubble up from my Mongo call in a route, the client gets a 500 error.
I figured I should trap InternalServerError, but the below handler never gets called:
app.on('InternalServerError', function (req, res, err, cb) {
console.log('++++++++++++++++', err);
return cb(err);
});
I thought I could just use the express approach:
app.use(function (err, req, res, next){...
But restify handlers don't seem to take an error argument. I'm stumped after searching all the usual places. It seems my first approach should have just worked.
This might work for you. Set up a bunyan logger in your app.js file…
var bunyan = require('bunyan');
var log = new bunyan({
name: 'my_api',
streams: [
{
path: './error.log',
level: 'warn'
}
],
serializers: {req: restify.bunyan.serializers.req},
src: false
});
var server = restify.createServer({
log: log
});
Then in your controller do something like this….
var restify = require('restify');
try {
Model.findAll().then(function(vals){
res.send(vals);
next();
});
}
catch(e) {
req.log.error({req_id: req.id()}, 'Error attempting find.');
res.send(409, new restify.ConflictError("Problem executing search."));
next();
}

Categories