How to make connection with MQTT mosquito broker in React? - javascript

I am trying to make a connection between my React web app and a mosquito broker which runs on docker. To do this i am using a MQTT.js libary.
Here is my code so far:
const mqtt = require('mqtt')
const client = mqtt.connect('tcp://172.19.0.4:1883')
console.log('mounted');
client.on('connect', function () {
console.log('connected')
client.subscribe('/powertest', function (err) {
if (!err) {
// client.publish('presence', 'Hello mqtt')
console.log('error')
}
})
})
client.on('message', function (topic, message) {
// message is Buffer
console.log(message.toString())
// client.end()
})
This code is in the componentDidMount lifecycle event. When I run it, it shows no errors and it does not enter the on connect area. All I see in the console is the "mounted" log message.
How do I make it connect with the broker?

Let's put the subscriber and the publisher in the same file:
var mqtt = require('mqtt')
var client = mqtt.connect('mqtt://test.mosquitto.org')
client.on('connect', function () {
client.subscribe('presence', function (err) {
if (!err) {
client.publish('presence', 'Hello mqtt')
}
})
})
client.on('message', function (topic, message) {
// message is Buffer
console.log(message.toString())
client.end()
})

Related

Node `net` module IPC server intermitent

Following the official node documentation of net and child_process modules, I archived this: a server that spawns a child and connect through net module. But the connection is intermittent. The code is self explanatory, but I've add details in the code comments:
// server.js
const childProcess = require('child_process').fork('child.js');
const server = require('net').createServer((socket) => {
console.log('got socket connection'); // this callback is intermitent
socket.on('data', (stream) => {
console.log(stream.toString());
})
});
server.on('connection', () => {
console.log('someone connected to server'); // this is running only if the code above runs (but its intermitent)
});
server.on('listening', () => {
console.log('server is listening'); // this is the first log to execute
childProcess.send('server', server); // send the server connection to forked child
});
server.listen(null, function () {
console.log('server listen callback'); // this is the second log to execute
});
// child.js
console.log('forked'); // this is the third log to execute
const net = require('net');
process.on('message', (m, server) => {
if (m === 'server') {
const socket = net.connect(server.address());
socket.on('ready', () => {
console.log('child is ready'); // this is the fourth log to execute
socket.write('child first message'); // this is always running
})
}
});
the expected log when execute node server is:
server is listening
server listen callback
forked
child is ready
got socket connection
someone connected to server
child first message
but as the socket callback (at createServer) is intermitent, we get this 50% of times:
server is listening
server listen callback
forked
child is ready
IDK what to do anymore, already tried everything I could... What am I doing wrong?
Just found what was the problem... when I read the documentation I misunderstood that literally the net server is being sent to the child process to share the "connections" to divide the processing in more than one process, and what I was trying to archive was just an 2 way communication with the forked child. I'll let this answer here if someone arrive at the same problem as me. This is the final code:
// server.js
const childProcess = require('child_process').fork('child.js');
const server = require('net').createServer((socket) => {
console.log('got socket connection');
socket.on('data', (stream) => {
console.log(stream.toString());
})
});
server.on('connection', () => {
console.log('someone connected to server');
});
server.listen(null, function () {
childProcess.send(server.address());
});
// child.js
console.log('forked');
const net = require('net');
process.on('message', (message) => {
if (message.port) {
const socket = net.connect(message);
socket.on('ready', () => {
console.log('child is ready');
socket.write('child first message');
})
}
});

Writing and reading data between two nodejs servers using node serial port

I would like to send "Hello world" from one nodejs server to another using node-serialport. I have verified that the radios connecting the two are connected and sending info because they keep displaying buffer information after running my current code.
here is what I have so far.
server1
// Import dependencies
const SerialPort = require("serialport");
const Readline = require("#serialport/parser-readline");
var sf = require('sf');
//SerialPort.list(function (err, results) {
// if (err) {
// throw err;
// }
SerialPort.list().then(ports => {
ports.forEach(function(port) {
console.log(port.path);
console.log(port.pnpId);
console.log(port.manufacturer);
});
});
// Defining the serial port
const port = new SerialPort('COM3',{baudRate: 9600}, function (err) {
if (err) {
return console.log('Port Error: ', err.message)
}
})
port.write('main screen turn on', function(err) {
if (err) {
return console.log('Error on write: ', err.message)
}
console.log('message written')
})
// Read data that is available but keep the stream in "paused mode"
port.on('readable', function () {
console.log('Data:', port.read())
})
// Switches the port into "flowing mode"
port.on('data', function (data) {
console.log('Data:', data)
})
// Pipe the data into another stream (like a parser or standard out)
const lineStream = port.pipe(new Readline())
lineStream.on('data', console.log)
server 2
// Import dependencies
// in Ubuntu need to run command: sudo chmod 666 /dev/ttyS0 to open port for use
const SerialPort = require("serialport");
const Readline = require("#serialport/parser-readline");
var stoploop = true;
// Defining the serial port
const port = new SerialPort('/dev/ttyUSB0', function (err) {
if (err) {
return console.log('Error: ', err.message)
}
})
port.write('chicken butt', function(err) {
if (err) {
return console.log('Error on write: ', err.message)
}
console.log('message written')
})
// port.write("hello?");
// Read data that is available but keep the stream in "paused mode"
port.on('readable', function () {
console.log('Data:', port.read())
})
// Switches the port into "flowing mode"
port.on('data', function (data) {
console.log('Data:', data)
})
// Pipe the data into another stream (like a parser or standard out)
const lineStream = port.pipe(new Readline())
any help or even an example of how to send hello world between the two would be greatly appreciated! please let me know if any more info is needed.
edit : I recently tried doing something like
port.on('data', (data) => {
try {
console.log(data.toString());
} catch (err) {
console.log('Oops');
}
});
this is taking data that used to appear as <buffer # # # # #> and turning it into an odd string like "(
)))) ) ) )))
!)☺)!))) ) )
)(☺!�"
I found the answer myself!
I was using the wrong baudRate, and also needed to stringify the data being sent as a JSON string

Calling a function defined inside a callback

I am having a difficulty understanding Javascript-style function definitions.
I have a Websocket server:
var WebSocketServer = require('ws').Server,
wss = new WebSocketServer();
wss.on('connection', function (ws) {
ws.on('message', function (message) {
console.log('received: %s from client', message);
})
ws.send('hello client');
})
And I want to send a message to the connected clients when another function importantEvent() is called.
function importantEvent(message) {
//send message to the connected client
ws.send('hello client');
}
What is the good way of calling ws.send() inside importantEvent()?
I use EventEmitter to similar things to avoid a strong module strength.
// eventManager.js
const EventEmitter = require('events');
class EventManager extends EventEmitter {}
const eventManager = new EventManager();
module.exports = eventManager;
// websocket.js
var eventManager = require('eventManager');
var WebSocketServer = require('ws').Server,
wss = new WebSocketServer();
wss.on('connection', function (ws) {
...
})
function broadcast(data, filter) {
wss.clients.forEach( ... // depends on library using to web-socket
}
eventManager.on('my-event', function (arg1, arg2) {
broadcast({arg1, arg2})
});
// in any other files
var eventManager = require('eventManager');
eventManager.emit('my-event', 'hello', 'world');
It's a matter of what clients you want to reach. In wss.on('connection'... event, the ws received by the function is the client that is connecting to the server in this event.
As you want send a message to all the clients connected, you need to use the broadcast method
var WebSocketServer = require('ws').Server,
wss = new WebSocketServer();
function importantEvent(message) {
// Broadcast to all.
wss.broadcast = function broadcast(data) {
wss.clients.forEach(function each(client) {
if (client.readyState === WebSocket.OPEN) {
client.send('hello client');
}
});
};
}

Unsubscribe from Rethink DB Outside function in Node.JS

Im using Socket.io and Rethink DB to push realtime data on Node.js.
Subscribing to the stream works but when the user disconnects I can figure out how to unsubscribe to the rethink db.
Here's my code:
Part of app.js:
// Adding socket.io
app.io = require('socket.io')();
var feed;
// On connection to the socket, just invoking the function.
app.io.on('connection',function(socket) {
console.log('Client connected...');
feed = require('./feed')(socket);
socket.on('disconnect', function() {
console.log('Got disconnect!');
# Here I'd like to unsubscribe
});
});
feed.js:
var r = require('rethinkdb');
var dbConfig = require('./config/database');
module.exports = function(socket) {
var connection = r.connect(dbConfig)
.then(function (connection) {
r.db('Minicall').table('Message').changes().run(connection,function(err,cursor) {
if(err) {
console.log(err);
}
cursor.each(function(err,row) {
console.log(JSON.stringify(row));
if(Object.keys(row).length > 0) {
console.log("send");
socket.emit("msgFeed",{"timestamp" : row.new_val.timestamp, "message" : row.new_val.message ,"ric" : row.new_val.ric});
}
});
});
});
};
So, how can I stop the subscribing (connection.stop()) when socket.on('disconnect') gets called? Probably a easy solution since I'm totally new to node and js.
You can have more than one event listener to an event, so in your cursor you'll add a disconnect event listener that can call cursor.close():
r.db('Minicall')
.table('Message')
.changes()
.run(connection, function(err, cursor) {
if(err) {
console.log(err);
}
cursor.each(function(err,row) {
console.log(JSON.stringify(row));
if(Object.keys(row).length > 0) {
console.log("send");
socket.emit("msgFeed",{"timestamp" : row.new_val.timestamp, "message" : row.new_val.message ,"ric" : row.new_val.ric});
}
});
socket.on('disconnect', function() {
cursor.close();
});
});

Issue with socket.io updating component to show new message in chatroom

I am building a chat app with React, Node/Express and socket.io. I have my sockets successfully set to my express server via http.createServer. I have a listener on client and server listening for new messages coming into the chat room. Ideally, I want each instance of the chat to be updated when there is an additional message, like any chat room that ever existed :)
Now I have a successful listen between client and server. I know because of a console.log server-side. However, I am not re-rendering the chat component when I submit a new message from a different instance.
So my code in my client-side (again React) component is as follows and I am using the socket CDN with script tags in my index.html (script tags not shown):
Socket CDN here
var socket = io('')
So that is the socket you see client side :
componentDidMount() {
return axios.get(`api/messages`)
.then((result) => {
if (result.data.length) {
this.setState({
messages: [ ...this.state.messages, ...result.data]
} , () => {
console.log("The state after messages are mounted : ", this.state)
})
}
})
.catch((err) => { throw err})
socket.on('new message', msg => {
this.newMessage(msg);
})
};
newMessage(msg) {
this.setState({
messages: [...this.state.messages, msg]
}, () => {
this.setState({ message: '' })
return this.scrollToBottom()
});
};
onSubmitMessage(event) {
event.preventDefault();
const content = this.state.message;
const msg = {
content,
createdAt : new Date(),
userId : "one",
chatRoomId : "two"
}
axios.post(`api/messages/`, msg)
.then(() => {
this.newMessage(msg);
socket.emit('new message', msg); //HERE'S THE SOCKETS IN ACTION
})
};
Here is the server-side code Node/Express:
//in server.js
const io = new socketIo(server)
require('./socketEvents')(io);
const connections = [];
Then a separate file for my socket events
//in socketEvents.js
module.exports = (io) => {
io.on('connection', (socket) => {
console.log("Beautiful sockets are connected")
socket.once('disconnect', () => {
console.log("socket is disconnected");
});
//DOESN'T DO ANYTHING YET
socket.on('join global', (username) => {
socket.join(username);
console.log("New user in the global chat : ", username)
});
socket.on('new message', (msg) => {
console.log("The new message from sockets : ", msg);
socket.emit('new message', msg.content);
});
});
}
My sockets server side are linked up with the client. I'm just not seeing new messages in different instances. Is it because I'm not re-rendering after the server receives the message?
Thanks in advance, please let me know if you need me to clarify anything.
Cheers!
I figured it out... I'm going to leave this post up with a walkthrough in an attempt to help others who are having trouble with sockets. I may post a blog about it. Will update if I do.
So the code listens on the client side for a message to be sent inside of my onSubmitMessage function.
onSubmitMessage(event) {
event.preventDefault(); //prevents HTML <form> from going on its own post
const content = this.state.message;
//Create message object
const msg = {
content,
createdAt : new Date(),
userId : "one",
chatRoomId : "two"
}
//HERE'S THE IMPORTANT PART!!!
axios.post(`api/messages/`, msg)
.then(() => {
// wrapped in a promise, send a handler to server called
// ('new message') with the message object
this.newMessage(msg);
socket.emit('new message', msg);
})
.then(() => {
//Another promise then waits for the handler to come back from server
//*****IMPORTANT*************
//Then invoke newMessage function to get the post on all sockets
socket.on('message', (msg) => {
this.newMessage(msg);
})
})
};
Now on the server side this is what's happening:
// This is where the listener is for the client side handle
socket.on('new message', (msg) => {
// broadcast.emit will send the msg object back to client side and
// post to every instance expcept for the creator of the message
socket.broadcast.emit('message', msg);
});
SO the data path is (C) for client, (S) for server:
receive message object from user and -------->
(C)socket.emit('new message') -----> (S) socket.on('new message') -------> (S) socket.broadcast.emit('message') --------> (C)socket.on('message')
Back in the client side, I can invoke my newMessage function, which will set the message to state so I can display it.
I hope someone finds this useful! Surprisingly, this seems to go relatively unanswered on Stack. If anyone has any questions feel free to ask!

Categories