Im using Socket.io and Rethink DB to push realtime data on Node.js.
Subscribing to the stream works but when the user disconnects I can figure out how to unsubscribe to the rethink db.
Here's my code:
Part of app.js:
// Adding socket.io
app.io = require('socket.io')();
var feed;
// On connection to the socket, just invoking the function.
app.io.on('connection',function(socket) {
console.log('Client connected...');
feed = require('./feed')(socket);
socket.on('disconnect', function() {
console.log('Got disconnect!');
# Here I'd like to unsubscribe
});
});
feed.js:
var r = require('rethinkdb');
var dbConfig = require('./config/database');
module.exports = function(socket) {
var connection = r.connect(dbConfig)
.then(function (connection) {
r.db('Minicall').table('Message').changes().run(connection,function(err,cursor) {
if(err) {
console.log(err);
}
cursor.each(function(err,row) {
console.log(JSON.stringify(row));
if(Object.keys(row).length > 0) {
console.log("send");
socket.emit("msgFeed",{"timestamp" : row.new_val.timestamp, "message" : row.new_val.message ,"ric" : row.new_val.ric});
}
});
});
});
};
So, how can I stop the subscribing (connection.stop()) when socket.on('disconnect') gets called? Probably a easy solution since I'm totally new to node and js.
You can have more than one event listener to an event, so in your cursor you'll add a disconnect event listener that can call cursor.close():
r.db('Minicall')
.table('Message')
.changes()
.run(connection, function(err, cursor) {
if(err) {
console.log(err);
}
cursor.each(function(err,row) {
console.log(JSON.stringify(row));
if(Object.keys(row).length > 0) {
console.log("send");
socket.emit("msgFeed",{"timestamp" : row.new_val.timestamp, "message" : row.new_val.message ,"ric" : row.new_val.ric});
}
});
socket.on('disconnect', function() {
cursor.close();
});
});
Related
I am trying to create a chat lobby where users can create a private room, and others can join through a uuid. This is built with node.js and socket.io, and it seems as if io.to(room).emit('event', 'data') and io.sockets.to(room).emit('event', 'data') do not work, while io.emit('event', 'data') works. My code can be found below:
io.sockets.on('connection', (socket) => {
socket.on('createRoom', function(data) {
let room = new Room(data).create();
let id = room.data.uuid
socket.join(id)
io.to(id).emit('roomcreated', {data: data, msg: 'Room Created'}) //Does not work!
io.emit('roomcreated', {data: data, msg: 'Room Created'}) //This Works
});
});
A common problem with .join() is that it is actually asynchronous. It does not immediately complete. This is probably because it's designed to work with the multiple process, redis-based adapter that supports clustering and there the join process has to be asynchronous because it's communicating with other processes.
You can fix your issue, by sending a callback to .join() and only emitting to the room AFTER it has finished joining.
io.sockets.on('connection', (socket) => {
socket.on('createRoom', function(data) {
let room = new Room(data).create();
let id = room.data.uuid
socket.join(id, (err) => {
if (err) {
// do something here if the join fails
console.log(err);
return;
}
// call this only after the join has completed
io.to(id).emit('roomcreated', {data: data, msg: 'Room Created'});
});
});
});
I'm not sure entirely what's not working with io.in(id).emit('roomCreated', {d: data}); but I found a work around method.
I created an object of all sockets called SOCKET_LIST, and an array for each room that holds the id of that socket:
var SOCKET_LIST = {};
io.sockets.on('connection', (socket) => {
socket.id = Math.random();
SOCKET_LIST[socket.id] = socket;
socket.on('createRoom', function(data) {
let room = new Room(data).create();
room.data.users.push(socket.id)
let id = room.data.uuid
socket.join(id, (err) => {
if(err) {
return console.log(err);
}
room.data.users.forEach(i => {
SOCKET_LIST[i].emit('roomcreated', {data: data, msg: 'Room Created'})
})
})
});
});
I do a select statement on sqlite3 in node.js. I'd like to have the result in the variable "data" which is defined outside the sqlite code block but it stays empty. Within the sqlite code block the data variable has the correct value. Does anybody know what I'm doing wrong?
Thank you.
/* Client connects via socket.io */
io.on('connection', function(client) {
console.log('Client connected');
/* Client needs data... */
client.on('needData', function(fields) {
var data = [];
var sql_stmt = "SELECT ...";
if(fs.existsSync(db_file)) {
try {
var db = new sqlite3.Database(db_file);
db.all(sql_stmt, function(err, all) {
data = all;
console.log(data); //--> data has valid values
});
db.close();
console.log(data); //--> data is empty
}
catch(e) {
console.log("Error with database. Error: ", e);
}
}
else {
console.log("Database file not found.");
}
client.emit('data', data);
});
});
It happening just because of asynchronous nature of Node.js you can handle it with by promises
I recommend to use waterfall method of async module
var async=require('async');
/* Client connects via socket.io */
io.on('connection', function(client) {
console.log('Client connected');
/* Client needs data... */
client.on('needData', function(fields) {
async.waterfall([function(next) {
var sql_stmt = "SELECT ...";
if (fs.existsSync(db_file)) {
try {
var db = new sqlite3.Database(db_file);
db.all(sql_stmt, function(err, all) {
next(null,all)
});
db.close();
} catch (e) {
console.log("Error with database. Error: ", e);
next();
}
} else {
console.log("Database file not found.");
next();
}
}], function(err, data) {
if (!err) {
client.emit('data', data);
}
})
});
});
I am struggling to send a stream of data being consumed via pusher-client-node to the client using Socket.IO.
I am receiving my data in Node.JS like this:
var API_KEY = 'cb65d0a7a72cd94adf1f';
var pusher = new Pusher(API_KEY, {
encrypted: true
});
var channel = pusher.subscribe("ticker.160");
channel.bind("message", function (data) {
//console.log(data);
});
My data, which comes in continuously, looks like that:
{ channel: 'ticker.160',
trade:
{ timestamp: 1420031543,
datetime: '2014-12-31 08:12:23 EST',
marketid: '160',
topsell: { price: '0.00007650', quantity: '106.26697381' }}
My Socket.IO code looks like this:
/**
* Socket.io
*/
var io = require("socket.io").listen(server, {log: true});
var users = [];
var stream = channel.bind("message", function (data) {
console.log(data);
});
io.on("connection", function (socket) {
// The user it's added to the array if it doesn't exist
if(users.indexOf(socket.id) === -1) {
users.push(socket.id);
}
// Log
logConnectedUsers();
socket.emit('someevent', { attr: 'value' } )
stream.on("newdata", function(data) {
// only broadcast when users are online
if(users.length > 0) {
// This emits the signal to the user that started
// the stream
socket.emit('someevent', { attr: 'value' } )
}
else {
// If there are no users connected we destroy the stream.
// Why would we keep it running for nobody?
stream.destroy();
stream = null;
}
});
// This handles when a user is disconnected
socket.on("disconnect", function(o) {
// find the user in the array
var index = users.indexOf(socket.id);
if(index != -1) {
// Eliminates the user from the array
users.splice(index, 1);
}
logConnectedUsers();
});
});
// A log function for debugging purposes
function logConnectedUsers() {
console.log("============= CONNECTED USERS ==============");
console.log("== :: " + users.length);
console.log("============================================");
}
I am quite new to Node.JS and Socket.IO and struggle to use my pusher stream in Node.JS. Therefore my question: How to connect my Socket.IO code with my Pusher code?
You need to use socket/io rooms ...
server:
var channel = pusher.subscribe("ticker.160"); //subscribe to pusher
//pass messages from pusher to the matching room in socket.io
channel.bind("message", function (data) {
io.to('ticker.160').emit('room-message', {room:'ticker.160', data:data});
});
...
io.on("connection", function (socket) {
...
socket.on('join', function(room){
socket.join(room);
});
socket.on('leave', function(room){
socket.leave(room);
});
});
client:
io.emit('join','ticker.160');
io.on('room-message', function(message){
switch(message.room) {
case 'ticker.160': return doSomething(message.data);
...
}
});
I am attempting to use NodeJS with the Tedious (http://pekim.github.io/tedious/) sql server plugin to make multiple database calls. My intent is to:
1. Open a connection
2. Start a transaction
3. Make multiple database (stored procedure) calls, which will not return any data.
4. Commit transaction (or roll back on error).
5. Close connection
Here is an example .js file, (without using a transaction) for NodeJS where I am attempting to make multiple database calls and it is failing with the error "Requests can only be made in the LoggedIn state, not the SentClientRequest state." Nothing I try resolves this issue.
Does anyone know how to resolve this?
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var config = {
userName: 'login',
password: 'password',
server: '127.0.0.1',
options: { rowCollectionOnDone: true }
};
var max = 1;
for (var i = 0; i < max; i++) {
var connection = new Connection(config);
function executeStatement() {
request = new Request("select 42, 'hello world'", function (err, rowCount) {
if (err) {
console.log(err);
} else {
console.log(rowCount + ' rows');
}
});
request.on('row', function (columns) {
columns.forEach(function (column) {
console.log(column.value);
});
});
request.on('doneInProc', function (rowCount, more, rows) {
});
request.on('doneProc', function (rowCount, more, rows) {
console.log('statement completed!')
connection.execSql(request);
});
request.on('returnStatus', function (status) {
console.log('statement completed!')
});
connection.execSql(request);
}
connection.on('connect', function (err) {
// If no error, then good to go...
executeStatement();
});
}
console.log('Done!');
You're trying to execute a statement on a connection that is not established. You're missing an error handler before you call executeStatement.
connection.on('connect', function (err) {
if (err) {
console.log(err); // replace with your code
return;
};
// If no error, then good to go...
executeStatement();
});
Edit:
How to execute multiple statements in a transaction in serial:
var statements = ["select 1", "select 2", "select 3"];
var transaction = new sql.Transaction(connection);
transaction.begin(function(err) {
// ... error checks
async.mapSeries(statements, function(statement, next) {
var request = new sql.Request(transaction);
request.query(statement, next);
}, function(err, results) {
// ... error checks
transaction.commit(function(err, recordset) {
// ... error checks
console.log("Transaction commited.");
});
});
});
You should use tedious connection pools to create a pool of multiple connections.
For node js, a npm module is available at : https://www.npmjs.com/package/tedious-connection-pool
For every new value inside for loop you can acquire a new connection and use connection.reset on doneInProc event.
The case which you have been doing is performing 1st iteration of for loop correctly(LoggedIn State) and as you have proceeded without closing or releasing the connection you are using same connection object (SentClientRequest state).
Hence the same object is at final state when the code reaches second iteration of for loop.
Hope it resolves your issue
you can use Tedious Connection pools https://github.com/pekim/tedious-connection-pool
As #zevsuld and #mannutech said, tedious-connection-pool will enable multiple connections, and prevent erring out when simultaneous requests come into your server.
Below is a generic example that allows you to write multiple queries within one connection pool, and expose them for use in your api. I'm just adding this in case others come along who are trying to accomplish this type of implementation.
const ConnectionPool = require('tedious-connection-pool');
const path = require('path');
require('dotenv').config({
path: path.join(__dirname, '../../.env')
})
let Request = require('tedious').Request;
let poolConfig = {
min: 10,
max: 50,
log: true
}
let connectionConfig = {
userName: process.env.user,
password: process.env.password,
server: process.env.server
};
//create the pool
let pool = new ConnectionPool(poolConfig, connectionConfig);
pool.on('error', function(err) {
console.error(err);
});
// At this point in the code, we have established a connection pool. If you run node, you'll see it log out all then connections to your database.
// Let's add some methods which your server might use in fulfilling requests to various endpoints.
let query1 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column1, colum2 from TABLE WHERE column1 LIKE '${query.param}%%' ORDER BY column1 ASC`
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
let query2 = (cb, res, query) => {
// acquire a connection:
pool.acquire(function(err, connection) {
if (err) {
console.error(err);
return;
} else {
// form your query
let sql_query = `SELECT column3, colum4 from TABLE2 WHERE column3 LIKE '${query.param}%%' ORDER BY column3 ASC`;
// use the connection as usual:
request = new Request(sql_query, (err, rowCount) => {
if (err) {
console.log(err);
return;
} else {
// console.log('rowCount:', rowCount);
}
//release the connection back to the pool when finished
connection.release();
});
let records = [];
request.on("row", function(columns) {
let rowArray = [];
columns.forEach(function(column) {
rowArray.push(column.value);
});
records.push(rowArray);
});
request.on("doneInProc", function() {
cb(records, res);
});
// lastly exectue the request on the open connection.
connection.execSql(request);
}
});
};
// Let's expose these two functions to the rest of your API:
module.exports = {
query1,
query2
}
Im using connect-domain and connect-redis. Below code checks for redis cache in Redis database.
function redis_get(key, req, res) {
var redisClient = redis.createClient();
redisClient.get(redisKey, function (err, data) {
if (err) {
console.log("Error in RedisDB");
}
else if (data == null) {
// Calling external function
}
else {
// Calling external function
}
redisClient.quit(); // Not working
});
}
When cache is not avaiable Im calling external function. I want redis connection to be closed once the cache check has been done.
redisClient.quit() // Not working
Any help on this will be really helpful.
Thanks
Below code is working fine without any problem.So check your status reply in the quit method if you get status as 'OK' means that method is working fine.
var redis=require('redis');
var redisClient = redis.createClient();
redisClient.get('name', function (err, data) {
if (err) {
console.log("Error in RedisDB");
}
else if (data == null) {
console.log('null');
}
else {
console.log(data);
}
redisClient.quit(redis.print);
});