Socket.io Multiple messages emitted from one event - javascript

I've been looking for quite a while for a solution but haven't found anything yet.
I'm trying to emit a message from a server every time the server sees that a file has changed in a specified directory. However, instead of only emitting one message, it insists on emitting the same message three times. I am using chokidar to watch the directory, and inside of the 'change' event I emit the message.
Server side code:
var express = require('express')
, app = express()
, http = require('http')
, server = http.Server(app)
, io =require('socket.io')(server)
, chokidar = require('chokidar');
server.listen(1234);
app.use('/public', express.static( __dirname + '/public'));
app.get('/', function(request, response){
var ipAddress = request.socket.remoteAddress;
console.log("New express connection from: " + ipAddress);
response.sendfile(__dirname + '/public/index.html'); //Server client
});
var watcher = chokidar.watch("temp", {ignored: /[\/\\]\./, persistent: true});
watcher.on('change', function(path){
console.log(path + " has changed.");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = JSON.stringify(compiled)
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("Sent message");
}
});
});
watcher.on('unlink', function(path){
console.log('File: ', path, ' has been removed');
});
watcher.on('add', function(path){
console.log("hi");
fs.readFile(path,'utf8', function(err, data){
if(err) {
return console.log(err);
}
else
{
var json = JSON.parse(data), recPsec, type;
recPsec = json.data[0].values[0];
type = json.data[0].values[16];
var compiled = {
"recPsec" : recPsec,
"type" : type
}
var jsonMessage = compiled;
io.sockets.emit('message', JSON.stringify(jsonMessage));
console.log("message sent");
}
//fs.unlinkSync(path);
});
});
Client Side:
var socket = io.connect('http://localhost');
socket.on('message', function(data){
console.log(data);
var parsed = JSON.parse(data);
recPsecNew = parsed.recPsec;
typeNew = parsed.type;
analyze(recPsecNew, typeNew);
});
I am using socket.io in conjunction with express 4.
Chokidar is found here: https://github.com/paulmillr/chokidar
Logs from the console if I change the name of a file twice are shown here: http://s000.tinyupload.com/?file_id=95726281991906625675

Have you tried lodash's Function?
Probably you can use lodash.debounce function
According to its docs:
_.debounce(func, [wait=0], [options])
Creates a debounced function that delays invoking func until after wait milliseconds have elapsed since the last time the debounced function was invoked. The debounced function comes with a cancel method to cancel delayed invocations. Provide an options object to indicate that func should be invoked on the leading and/or trailing edge of the wait timeout. Subsequent calls to the debounced function return the result of the last func invocation.

Related

Running node-rdkafka code in server

I'm running the below node-rdkafka code in Eclipse as Node.js application. This is the sample code from https://blizzard.github.io/node-rdkafka/current/tutorial-producer_.html
I want to run this in a test server and call from iOS Mobile application.
I knew about running node.js app in AWS.
Question I: Is there any other options to run in a free test server environment like Tomcat?
Question II: Even If I am able to run this node.js app in a server, how do i call from a mobile application? Do I need to call producer.on('ready', function(arg) (or) What function i need to call from Mobile app?
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
//counter to stop this sample after maxMessages are sent
var counter = 0;
var maxMessages = 10;
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
}
//need to keep polling for a while to ensure the delivery reports are received
var pollLoop = setInterval(function() {
producer.poll();
if (counter === maxMessages) {
clearInterval(pollLoop);
producer.disconnect();
}
}, 1000);
});
/*
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});*/
//starting the producer
producer.connect();
First of all, you need an HTTP server. ExpressJS can be used. Then, just tack on the Express code basically at the end, but move the producer loop into the request route.
So, start with what you had
var Kafka = require('node-rdkafka');
//console.log(Kafka.features);
//console.log(Kafka.librdkafkaVersion);
var producer = new Kafka.Producer({
'metadata.broker.list': 'localhost:9092',
'dr_cb': true
});
var topicName = 'MyTest';
//logging debug messages, if debug is enabled
producer.on('event.log', function(log) {
console.log(log);
});
//logging all errors
producer.on('event.error', function(err) {
console.error('Error from producer');
console.error(err);
});
producer.on('delivery-report', function(err, report) {
console.log('delivery-report: ' + JSON.stringify(report));
counter++;
});
//Wait for the ready event before producing
producer.on('ready', function(arg) {
console.log('producer ready.' + JSON.stringify(arg));
});
producer.on('disconnected', function(arg) {
console.log('producer disconnected. ' + JSON.stringify(arg));
});
//starting the producer
producer.connect();
Then, you can add this in the same file.
var express = require('express')
var app = express()
app.get('/', (req, res) => res.send('Ready to send messages!'))
app.post('/:maxMessages', function (req, res) {
if (req.params.maxMessages) {
var maxMessages = parseInt(req.params.maxMessages);
for (var i = 0; i < maxMessages; i++) {
var value = new Buffer('MyProducerTest - value-' +i);
var key = "key-"+i;
// if partition is set to -1, librdkafka will use the default partitioner
var partition = -1;
producer.produce(topicName, partition, value, key);
} // end for
} // end if
}); // end app.post()
app.listen(3000, () => console.log('Example app listening on port 3000!'))
I don't think the poll loop is necessary since you don't care about the counter anymore.
Now, connect your mobile app to http://<your server IP>:3000/ and send test messages with a POST request to http://<your server IP>:3000/10, for example, and adjust to change the number of messages to send
I might be late on this but this is how I did using promises and found it better than have a time out etc.
const postMessageToPublisher = (req, res) => {
return new Promise((resolve, reject) => {
producer.connect();
producer.setPollInterval(globalConfigs.producerPollingTime);
const actualBody = requestBody.data;
const requestBody = req.body;
const topicName = req.body.topicName;
const key = requestBody.key || uuid();
const partition = requestBody.partition || undefined;
const data = Buffer.from(JSON.stringify(udpatedBody));
/**
* Actual messages are sent here when the producer is ready
*/
producer.on(kafkaEvents.READY, () => {
try {
producer.produce(
topic,
partition,
message,
key // setting key user provided or UUID
);
} catch (error) {
reject(error);
}
});
// Register listener for debug information; only invoked if debug option set in driver_options
producer.on(kafkaEvents.LOG, log => {
logger.info('Producer event log notification for debugging:', log);
});
// Register error listener
producer.on(kafkaEvents.ERROR, err => {
logger.error('Error from producer:' + JSON.stringify(err));
reject(err);
});
// Register delivery report listener
producer.on(kafkaEvents.PUBLISH_ACKNOWLEDGMENT, (err, ackMessage) => {
if (err) {
logger.error(
'Delivery report: Failed sending message ' + ackMessage.value
);
logger.error('and the error is :', err);
reject({ value: ackMessage.value, error: err });
} else {
resolve({
teamName: globalConfigs.TeamNameService,
topicName: ackMessage.topic,
key: ackMessage.key.toString()
});
}
});
});
};
Please note that kafkaEvents contains my constants for the events we listen to and it is just a reference such as kafkaEvents.LOG is same as event.log
and also the calling function is expecting this to a promise and accordingly we user .then(data => 'send your response to user from here') and .catch(error => 'send error response to user
this is how I achieved it using promises

How to mock http.ServerResponse and http.IncomingMessage for express.static

I've had no trouble testing my own route handlers but in this case I want to test express's static handler. I can't for the life of me figure out why it's hanging. Clearly there's some callback I'm missing or some event I need to emit.
I tried to make the smallest example I could.
var events = require('events');
var express = require('express');
var stream = require('stream');
var util = require('util');
function MockResponse(callback) {
stream.Writable.call(this);
this.headers = {};
this.statusCode = -1;
this.body = undefined;
this.setHeader = function(key, value) {
this.headers[key] = value;
}.bind(this);
this.on('finish', function() {
console.log("finished response");
callback();
});
};
util.inherits(MockResponse, stream.Writable);
MockResponse.prototype._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
function createRequest(req) {
var emitter = new events.EventEmitter();
req.on = emitter.on.bind(emitter);
req.once = emitter.once.bind(emitter);
req.addListener = emitter.addListener.bind(emitter);
req.emit = emitter.emit.bind(emitter);
return req;
};
describe('test', function() {
var app;
before(function() {
app = express();
app.use(express.static(__dirname));
});
it('gets test.js', function(done) {
var req = createRequest({
url: "http://foo.com/test.js",
method: 'GET',
headers: {
},
});
var res = new MockResponse(responseDone);
app(req, res);
function responseDone() {
console.log("done");
done();
}
});
});
Setup,
mkdir foo
cd foo
mkdir test
cat > test/test.js # copy and paste code above
^D
npm install express
npm install mocha
node node_modules/mocha/bin/mocha --recursive
it just times out.
What am I missing?
I also tried making the request a Readable stream. No change
var events = require('events');
var express = require('express');
var stream = require('stream');
var util = require('util');
function MockResponse(callback) {
stream.Writable.call(this);
this.headers = {};
this.statusCode = -1;
this.body = undefined;
this.setHeader = function(key, value) {
this.headers[key] = value;
}.bind(this);
this.on('finish', function() {
console.log("finished response");
callback();
});
};
util.inherits(MockResponse, stream.Writable);
MockResponse.prototype._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
function MockMessage(req) {
stream.Readable.call(this);
var self = this;
Object.keys(req).forEach(function(key) {
self[key] = req[key];
});
}
util.inherits(MockMessage, stream.Readable);
MockMessage.prototype._read = function() {
this.push(null);
};
describe('test', function() {
var app;
before(function() {
app = express();
app.use(express.static(__dirname));
});
it('gets test.js', function(done) {
var req = new MockMessage({
url: "http://foo.com/test.js",
method: 'GET',
headers: {
},
});
var res = new MockResponse(responseDone);
app(req, res);
function responseDone() {
console.log("done");
done();
}
});
});
I've still been digging. Look inside static-server I see it creates a Readable stream by calling fs.createReadStream. It does effectively
var s = fs.createReadStream(filename);
s.pipe(res);
So trying that myself works just fine
it('test stream', function(done) {
var s = fs.createReadStream(__dirname + "/test.js");
var res = new MockResponse(responseDone);
s.pipe(res);
function responseDone() {
console.log("done");
done();
}
});
I thought maybe it's something about express waiting for the input stream to finish but that doesn't seem to be it either. If I consume the mock input stream with the response it works just fine
it('test msg->res', function(done) {
var req = new MockMessage({});
var res = new MockResponse(responseDone);
req.pipe(res);
function responseDone() {
console.log("done");
done();
}
});
Any insight what I might be missing would be helpful
Note: while suggestions for 3rd party mocking libraries are appreciated I'm still really looking to understand what I'm missing to do it myself. Even if I eventually switch to some library I still want to know why this isn't working.
I found two issues that prevent the finish callback from being executed.
serve-static uses send module which is used to create file readstream from the path and pipe it to res object. But that module uses on-finished module which checks if finished attribute is set to false in response object, otherwise it destroys the file readstream. So filestream never gets a chance to emit data event.
express initialization overwrites the response object prototype. So the default stream methods like end() method is overwritten by http response prototype:
exports.init = function(app){
return function expressInit(req, res, next){
...
res.__proto__ = app.response;
..
};
};
To prevent this, I added another middleware right before static middleware to reset it back to MockResponse prototype:
app.use(function(req, res, next){
res.__proto__ = MockResponse.prototype; //change it back to MockResponse prototype
next();
});
Here are the changes made to make it work with MockResponse:
...
function MockResponse(callback) {
...
this.finished = false; // so `on-finished` module doesn't emit finish event prematurely
//required because of 'send' module
this.getHeader = function(key) {
return this.headers[key];
}.bind(this);
...
};
...
describe('test', function() {
var app;
before(function() {
app = express();
//another middleware to reset the res object
app.use(function(req, res, next){
res.__proto__ = MockResponse.prototype;
next();
});
app.use(express.static(__dirname));
});
...
});
EDIT:
As #gman pointed out, it is possible to use direct property instead of prototype method. In that case the extra middleware to overwrite prototype isn't necessary:
function MockResponse(callback) {
...
this.finished = false; // so `on-finished` module doesn't emit finish event prematurely
//required because of 'send' module
this.getHeader = function(key) {
return this.headers[key];
}.bind(this);
...
//using direct property for _write, write, end - since all these are changed when prototype is changed
this._write = function(chunk, encoding, done) {
if (this.body === undefined) {
this.body = "";
}
this.body += chunk.toString(encoding !== 'buffer' ? encoding : undefined);
done();
};
this.write = stream.Writable.prototype.write;
this.end = stream.Writable.prototype.end;
};
It appears my answer is not complete. For some reason the app works only if the file is not found. First thing to debug is do the following in your shell (or cmd):
export DEBUG=express:router,send
then run the test, you'll get more info.
Meanwhile I am still looking into this, for now, ignore my answer below.
----------- ignore this till I verify that it does work -----------
It seems like express static does not favor the absolute path you give it (__dirname).
Try:
app.use(express.static('.'));
and it will work. Note that your current dir for the mocha runner is 'test/'
I have to admit this is quite a mistery. I tried 'fulling' it by doing:
app.use(express.static(__dirname + '/../test')
but still it didn't work. Even specifying a full path did not solve this. Strange.

calling a socket inside another socket using node.js and socket.io

I am a trying to use socket.io and node.js like this :
The browser sends an event to socket.io, in the data event I call another server to get some data, and I would like to send back a message to the browser using a socket.emit.
This looks like that :
socket.on('ask:refresh', function (socket) {
const net = require("net");
var response = new net.Socket();
response.setTimeout(1000);
response.get_response = function (command, port, host) {
this.connect(port, host, function () {
console.log("Client: Connected to server");
});
this.write(JSON.stringify({ "command": command }))
console.log("Data to server: %s", command);
};
response.on("data", function (data) {
var ret = data.toString();
var tmp_data = JSON.parse(ret.substring(0, ret.length - 1).toString());
var data = new Object();
var date = new Date(tmp_data.STATUS[0].When * 1000 );
data.date = date.toString();
socket.emit('send:refresh', JSON.stringify(data) );
});
response.get_response("version", port, host);
});
};
The thing is that I cannot access "socket.emit" inside response.on.
Could you please explain me how I can put a hand on this ?
Thanks a lot
You appear to be overwriting the actual socket with the one of the callback parameters:
socket.on('ask:refresh', function(socket) {
// socket is different
});
Change the name of your callback variable, and you won't have this problem.

Node.JS run Sandbox in REST service

I am using Node Restify Module to create a REST service that accepts POST. Within the service I am trying to create a Sandboxed process using Node Sandbox module because we will be running dynamically inserted Javascript and if something goes wrong, I dont want it to affect the main Node instance.
When I try to create the Sandbox, something goes wrong and causes the REST service to come back empty.
Here is my code
var restify = require('restify');
var Sandbox = require("sandbox");
var logic;
function createSandbox(body) {
var s = new Sandbox();
s.run("1 + 1", function(output) {
logic = body.names + " has " + output.result;
});
}
function respond(req, res, next) {
createSandbox(req.body);
res.send(logic);
}
var server = restify.createServer();
server.use(restify.bodyParser({
mapParams: false
}));
server.post('/hello/:name', respond);
server.head('/hello/:name', respond);
server.listen(8080, function() {
console.log('%s listening at %s', server.name, server.url);
});
In my http request I have {"names":"rob"} in the body
I am expecting the following response
rob has 2
------------UPDATE-------------------
This works
var restify = require('restify');
var Sandbox = require("sandbox");
var logic;
function respond(req, res, next) {
var s = new Sandbox();
s.run("1 + 1", function(output) {
logic = req.body.names + " has " + output.result;
res.send(logic);
});
}
var server = restify.createServer();
server.use(restify.bodyParser({
mapParams: false
}));
server.post('/run/:id', respond);
server.head('/run/:id', respond);
server.listen(8080, function() {
console.log('%s listening at %s', server.name, server.url);
});
Sandbox.run() is asyncronous. It just sets the sandbox up to run at a later time and returns immediately before the code it sandboxes is actually run, so you're reading logic before it's set.
A quick demo;
var Sandbox = require("sandbox");
function createSandbox() {
var s = new Sandbox();
s.run("1 + 1", function(output) {
console.log("inside");
});
}
createSandbox();
console.log("outside");
> outside
> inside

Dynamic Namespaces Socket.IO

How can I use dynamic namespaces in socket.io.
I'm looking in the (poor) documentation, and it says that namespaces must be used like this:
io.of('/news')
io.of('/akfda')
To use a namespace you do io.of("/namespace").
Do I need to register every single namespace in the server? Maybe I want a namespace for dynamic content.
How can I do something like :
io.of('/:somethign/:id')
Socket.IO supports 'rooms' (https://github.com/LearnBoost/socket.io/wiki/Rooms), you can use it instead of namespaces. Also when you need dynamic in routes (and you using express in your app) - best way is to use use route-engine from express box.
Best way to do dynamic routing with Express.js (node.js)
Using routes in Express-js
http://expressjs.com/api.html#app.routes
http://shtylman.com/post/expressjs-re-routing/
http://jordanhoff.com/post/22602013678/dynamic-express-routing
However, if you still think that you need dynamic in namespaces in socket.io, here is small example how it can be implemented:
User-side:
var connect = function (ns) {
return io.connect(ns, {
query: 'ns='+ns,
resource: "socket.io"
});
}
var socket = connect('/user/12');
Server-side:
var url = require('url');
, ev = new events.EventEmitter()
// <ns name>: <ns regexp>
var routes = {
// /user/:id
'user': '^\\/user\\/(\\d+)$',
// /:something/:id
'default': '^\\/(\\\w+)\\/(\\d+)$'
};
// global entry point for new connections
io.sockets.on('connection', function (socket) {
// extract namespace from connected url query param 'ns'
var ns = url.parse(socket.handshake.url, true).query.ns;
console.log('connected ns: '+ns)
//
for (var k in routes) {
var routeName = k;
var routeRegexp = new RegExp(routes[k]);
// if connected ns matched with route regexp
if (ns.match(routeRegexp)) {
console.log('matched: '+routeName)
// create new namespace (or use previously created)
io.of(ns).on('connection', function (socket) {
// fire event when socket connecting
ev.emit('socket.connection route.'+routeName, socket);
// #todo: add more if needed
// on('message') -> ev.emit(...)
});
break;
}
}
// when nothing matched
// ...
});
// event when socket connected in 'user' namespace
ev.on('socket.connection route.user', function () {
console.log('route[user] connecting..');
});
// event when socket connected in 'default' namespace
ev.on('socket.connection route.default', function () {
console.log('route[default] connecting..');
});
I hope this will help you!
I would use "rooms" to support your dynamic content.
Server Side
var server = require('http').createServer(),
io = require('socket.io')(server);
io.on('connection', function(socket){
var room = socket.handshake['query']['r_var'];
socket.join(room);
console.log('user joined room #'+room);
socket.on('disconnect', function() {
socket.leave(room)
console.log('user disconnected');
});
socket.on('chat message', function(msg){
io.to(room).emit('chat message', msg);
});
});
server.listen(3000);
Client Side
var socket_connect = function (room) {
return io('localhost:3000', {
query: 'r_var='+room
});
}
var random_room = Math.floor((Math.random() * 2) + 1);
var socket = socket_connect(random_room);
socket.emit('chat message', 'hello room #'+random_room);
....
As of version 2.1.1 I was able to make it work with this:
wss.of((nsp, query, next) => {
const { token } = query;
// Do your authentication or whatever here...
// If success
next(null, true);
}).on('connect', (socket) => {
// socket connected to your namespace
});
Server
var MAX_CLIENTS = 5;
var namespace_queue = [];
function searchObjectOnArray(nameKey, myArray) {
for (var i = 0; i < myArray.length; i++) {
if (myArray[i].id === nameKey) {
return myArray[i];
}
}
}
function createNamespace(data){
var ns = {
//id: require('node-uuid')(),
id : data.name,
clients: 0,
};
namespace_queue.push(ns);
return ns;
}
createNamespace({name: 'primer'});
io.of('').on('connection', function(socket){
console.log('-' + socket.id);
/// Welcome to the new client
socket.emit('Welcome', {SocketId : socket.id});
socket.on('JoinToApp', function (data, callback) {
var namespaceToConnect = searchObjectOnArray(data.namespace, namespace_queue)
if(namespaceToConnect.clients <= MAX_CLIENTS){
var dynamicNamespace = io.of('/' + namespaceToConnect.id);
dynamicNamespace.on('connection', function(ns_socket){
console.log('user connected to ' + namespaceToConnect.id);
dynamicNamespace.emit('hi', 'everyone!');
});
namespaceToConnect.clients++;
}
callback({namespaces:namespace_queue});
})
socket.on('createNamespace',function(data,join_cb){
createNamespace(data);
join_cb({message:'Namespace created'});
});
});
Client
<input id="namespaceInput" type="text" placeholder="New namespace name">
<input id="namespaceToConnect" type="text" placeholder="namespace to connect">
<button onclick="javascript: createNamespace()">Create Namespace</button>
<button onclick="javascript: joinToNamespace()">Connect to Namespace</button>
<script src="https://cdn.socket.io/socket.io-1.4.5.js"></script>
<script>
var socket = null;
(function(){
socket = io.connect('http://localhost:3000/');
})()
function createNamespace(){
var namespaceName = document.getElementById("namespaceInput").value;
socket.emit('createNamespace', {name : namespaceName}, function(data){
alert(data.message);
})
}
function joinToNamespace(){
var name = document.getElementById("namespaceToConnect").value;
socket.emit('JoinToApp', {namespace: name}, function(data){
console.log('Namespaces created:');
console.log(data)
var ns_socket = io.connect('http://localhost:3000/' + name);
ns_socket.on('connect',function(){
console.log('joined namespace ' + name);
});
ns_socket.on('hi', function(data){
console.log('hi ' + data)
})
});
}
</script>
More details on: https://ingcamilorodriguez.wordpress.com/2016/06/21/como-hacer-namespaces-dinamicos-en-socket-io/
Here is one way. Here is a socket.io subclass I created to solve the problem:
https://github.com/PencilCode/dynamic.io
That subclass adds dynamic namespaces as well as virtual hostname support (each host can go into its own namespace tree if you like). That repo has some examples.
Here is a universal socket.io listener that listens to every namespace requested, and logs a message for every socket that connects. You could listen to a different regexp to listen to any subset of namespaces.
It works with the standard socket.io client library without any modification.
var DynamicServer = require('dynamic.io');
io = DynamicServer({
host: true, // Enable virtual host handling
publicStatus: true // Enable /socket.io/status page.
});
// Any number of namespace patterns can be set up.
// This is an example of a single catch-all pattern.
io.setupNamespace(/.*/, function(nsp) {
nsp.on('connect', function(socket) {
console.log('a socket connected on', nsp.fullname());
});
nsp.expire(function() {
console.log(nsp.fullname(), 'is expiring');
});
});
io.listen(8888);
On newer versions you can use something like io.of(/^\/\w+$/).on('connection', (socket) where /^\/\w+$/ is a regular expression that will allow connection if it is a match.
Here is a full example on how to use this to setup many namespaces, here I suppose the only concerne is preventing emit diffusion from reaching other namespaces.
const workspaces = {}
io.of(/^\/\w+$/).on('connection', (socket) => {
const workspace = socket.nsp;
const namespace = workspace.name;
console.log("New Connection NameSpace", namespace);
// you can test here if "namespace" is allowed to be used
// if event handlers are set no need to got further
if (workspaces[namespace]) return;
// save workspace to prevent setting event handlers on each connection
workspaces[namespace] = workspace;
workspace.on("connection", (socket) => {
console.log(`${namespace} > connection from ${socket.id}`);
// set the event handlers same as normal socket
socket.on('event-1', (msg) => {
console.log("event-1", msg);
})
})
})

Categories