How to stop Node.js from printing data to console? - javascript

I have this Code in node
var io = require('socket.io'), connect = require('connect');
var pg = require('pg');
var connectionString = "Some connection string";
var app = connect().use(connect.static('public')).listen(3000);
var chat_room = io.listen(app);
var DataArrayRows=[];
setInterval(function(){
pg.connect(connectionString, function(err, client, done) {
client.query('select * from jctsl_get_all_notifications_v1()', function(err, result) {
DataArrayRows=result.rows;
done();
});
});
},10000);
chat_room.sockets.on('connection',function(socket){
chat_room.sockets.emit('entrance',{message:DataArrayRows});
setInterval(function(){
chat_room.sockets.emit('entrance',{message:DataArrayRows});
},10000);
});
In the code, I've not written any console.log() statement but still on every request to database, the returned rows are displayed on the console window.
How can I stop node from doing that?

Set the log level lower
var chat_room = io.listen(app);
chat_room.set('log level', 0);
The log level defaults to 3
The amount of detail that the server should output to the logger.
0 - error
1 - warn
2 - info
3 - debug

Related

NodeJS - how to write to a file all the console.log?

On my CentOS 7.x server I am running Node (v6.7.0 and v0.10.36).
forever start /home/www/html/server/mynode.js
which runs following:
/usr/bin/node /home/www/html/server/mynode.js
CODE of mynode.js:
var http = require('http');
var net = require('net');
var url = require('url');
var io = require('socket.io').listen(3004);
io.set('log level', 1);
io.sockets.on('connection', function (socket) {
socket.on('disconnect', function () {
try{
console.log(JSON.stringify(db));
} catch(dis) {
console.log(dis);
}
});
});
How do i tell NodeJS or Linux to keep log? So that i can listen whats going on by tail -f /var/log/mynode.log ?
You can overwrite your console.log
var fs = require('fs');
var trueLog = console.log;
console.log = function(msg) {
fs.appendFile("/tmp/log.log", msg, function(err) {
if(err) {
return trueLog(err);
}
});
//trueLog(msg); //uncomment if you want logs
}
Just put this snippet on top of your nodejs code.
There's an option for capturing logs
forever -o path/to/logfile start /home/www/html/server/mynode.js
From the docs -o OUTFILE Logs stdout from child script to OUTFILE

get count collection with socket.io-mongodb

I'm trying to make a simple task.
In the first place, on client side, i'm sending data to server and then i insert these data into my mongodb database.
Then i try to get count of clients from my database.
var express = require('express');
var MONGO_URL = "mongodb://localhost:27017/mydatabase";
var app = express(),
server = require('http').createServer(app),
io = require('socket.io').listen(server),
mongo = require('mongodb').MongoClient,
fs = require('fs');
var countUserSuscribed =0;
//here i insert data
/* Connection events */
io.on('connection', function (client) {
console.log("User connected");
client.on('InsertNewUser', function (newUser) {
console.log("we ar in InsertNewUser event");
//////////////////////////////////////////////////////////
mongo.connect(MONGO_URL, function (err, db) {
console.log("we are connected to mongodb");
var Users = db.collection('User');
console.log("on crée la collection et on fait l'ajout");
Users.insert({ player: myP }, function (err, o) {
if (err) { console.warn(err.message); }
else { console.log("user inserted into db: user"); }
});
});
})
});
//GET COUNT USER
console.log("here we get count user");
mongo.connect(MONGO_URL, function (err, db) {
countUserSuscribed = Users.count();
console.log("we got " + countUserSuscribed + " user in mongoDB");
});
With this code i can create collections and insert documents but the count function doesn't work and i didn't find much explanations on npm documentation.
Is it possible to use others mongodb functions than insert and collection with socket.io-mongodb ?
If it is, can someone give an example or explain me how to use it?
The count function works but is async function and takes a callback.
here's the fix:
countUserSuscribed = Users.count(function (err,c) { console.log(c) });
https://www.npmjs.com/package/mongodb-autoincrement consider using that. It keeps a track of all inserted document. Plus it has a handy feature to get the next count. Example let's say you inserted two records. If you call next count it will show 3. There fore to get the total documents inserted call get next count - 1. Make sense?
Sorry here is the correct one. https://www.npmjs.com/package/mongoose-auto-increment

Node js database connection in a separate module

I made node.js app that includes some REST services. Those services connect to a database (for example Oracle or DB2) to execute some query.
Since I'm a beginner in node.js programming, I have a question about my case:
What's the right way to access to a database? Is it better to have one connection reference while the app is running and use the same connection instance when REST services are called?
I found some examples that includes database connection in a separate module and use that module in app, something like that:
db2.js:
var db2 = require('ibm_db');
var db2ConnSettings = "DRIVER={DB2};DATABASE=mydb;HOSTNAME=localhost;UID=db2test;PWD=db2test;PORT=50000;PROTOCOL=TCPIP";
var db2Conn = db2.open(db2ConnSettings, function(err, conn) {
if (err)
return console.log(err);
});
module.exports = db2Conn;
server.js:
var express = require('express');
var app = express();
var db2Connection = require('./db2.js');
app.get('/data', function(req, res) {
console.log(db2Connection );
// make some query
});
When this service is called, db2connection is undefined. How come? How should I retrieve a db2 connection from db2.js file?
As said by #Sirko:
db2.js
var db2 = require('ibm_db');
var db2ConnSettings = "DRIVER={DB2};DATABASE=mydb;HOSTNAME=localhost;UID=db2test;PWD=db2test;PORT=50000;PROTOCOL=TCPIP";
var err, conn;
var callbacks = [];
module.exports = function(callback) {
// db2 module is called
if (err || conn) {
// connection has already been established
// (results of db2.open have been stored)
// callback immediately
callback(err, conn);
}
else {
// connection has not been established
// store the callback for when db connects
callbacks.push(callback);
}
};
db2.open(db2ConnSettings, function(_err, _conn){
// db has connected
err = _err; conn = _conn; // store results
var next_callback;
// array.pop() removed the last item from the array
// and returns it. if no items are left, returns null.
// so this loops through all stored callbacks.
while(next_callback = callbacks.pop()) {
// the removed item is stored in next_callback
next_callback(err, conn); // send connection results to callback
}
// no more items in callbacks to trigger
});
server.js
var express = require('express');
var app = express();
var db2Connection = require('./db2.js')(function(err, conn) {
// triggered if the connection has already been established
// or as soon as it HAS been established
app.get('/data', function(req, res) {
console.log(conn);
// ...
});
});
For Oracle with node-oracledb it's simple to create and use a connection pool. Your app would just get a free connection from the pool whenever it handles an HTTP REST request. Look at webapp.js and webapppromises.js in the examples. Node-oracledb has a 'connection pool queue' (see doc) which handles connection load spikes. It also has a 'connection pool cache' (also see the doc) which makes it easy to access a pool created in a different module.

File transfer hangs (but without error) between node.js server and socket.io client

I want to transfer a file with node.js using socket.io (socket.io-client) and delivery.
I tried to do this on server-side:
//// server.js
var socket = require('socket.io').listen(5000);
var dl = require('delivery');
var fs = require('fs');
socket.on('connect', function() {
log( "Sockets connected" );
delivery = dl.listen(socket);
delivery.connect();
delivery.on('delivery.connect',function(delivery){
delivery.send({
name: 'file1.zip',
path : './file1.zip'
});
delivery.on('send.success',function(file){
console.log('File sent successfully!');
});
});
});
And this on client-side:
//// client.js
var io = require('socket.io-client');
var dl = require('delivery');
var fs = require('fs');
var socket = io.connect('http://localhost:5000');
socket.on('connection', function(socket){
var delivery = dl.listen(socket);
delivery.on('receive.success',function(file){
fs.writeFile(file.name, file.buffer, function(err) {
if(err) {
console.log('File could not be saved: ' + err);
} else {
console.log('File ' + file.name + " saved");
};
});
});
});
On execution, there is no error, but it hangs.
Server-side:
$ node server.js
info - socket.io started
debug - client authorized
info - handshake authorized Bbzo928wAyTjDX8v06Ic
debug - setting request GET /socket.io/1/websocket/Bbzo928wAyTjDX8v06Ic
debug - set heartbeat interval for client Bbzo928wAyTjDX8v06Ic
debug - client authorized for
debug - websocket writing 1::
debug - emitting heartbeat for client Bbzo928wAyTjDX8v06Ic
debug - websocket writing 2::
debug - set heartbeat timeout for client Bbzo928wAyTjDX8v06Ic
debug - got heartbeat packet
debug - cleared heartbeat timeout for client Bbzo928wAyTjDX8v06Ic
debug - set heartbeat interval for client Bbzo928wAyTjDX8v06Ic
And on client-side there is no output:
$ node client.js
Do anyone know, what goes wrong?
First of all, the version of delivery in the NPM repository is out of date, and contains some bugs. You should install the latest version from GitHub:
$ npm install git+https://github.com/liamks/Delivery.js.git
Next, you're mixing up some client and server parts, specifically the events that each has to handle. This (somewhat, see below) works for me:
//// server.js
var io = require('socket.io').listen(5000);
var dl = require('delivery');
var fs = require('fs');
io.sockets.on('connection', function(socket) {
console.log('server: a new client connected');
var delivery = dl.listen(socket);
delivery.on('delivery.connect', function(delivery) {
delivery.send({
name: 'file1.zip',
path : './file1.zip'
});
delivery.on('send.success', function(file) {
console.log('File sent successfully!');
});
});
});
//// client.js
var io = require('socket.io-client');
var dl = require('delivery');
var fs = require('fs');
var socket = io.connect('http://localhost:5000');
socket.on('connect', function() {
console.log('client: connected to server');
var delivery = dl.listen(socket);
delivery.connect();
delivery.on('receive.success', function(file) {
// TODO: fs.writeFile(...);
});
});
However, it seems that the receive.success event is delivered to the client twice, at least for me. I'm not sure why (I'm not overly familiar with delivery).

node: custom emitter backing up when socket.io connection is established

I have a spawn function that is continuously outputting new information. I'm monitoring the output of that spawn via the 'data' event and emitted a custom emitter that I call 'updated' that emits whenever new data is received from the spawn. After each 'data' event occurs the data stored in partialData is emitted with the 'updated' emitter and then cleared.
This seems to work fine but when socket.io is implemented, the 'data' event seems to be ran but the results in partialData don't get processed, pile up on each other, and then several occurrences of the 'data' event get emitted at once. Why is this occuring and how do I resolve this issue? In the real application I'm using this in JSON strings are being handled and the pileup is causing node to crash.
This example is a simplified version of a larger application but the symptoms are the same. The following example consists of a bash script to emit a timestamp every 1/5th of second which should be ran from the same directory as the node code. After you launch the node command the terminal will output the timestamp and length of partialData. You will notice a change in the length of partialData whenever you browse to 127.0.0.1:3000. This is the issue.
emitter.sh:
#!/bin/bash
while [[ 1 ]]; do
echo `date +%s`
usleep 20000
done
emitter.js:
var express = require('/usr/local/lib/node_modules/express');
var http = require('http');
var spawn = require('child_process').spawn;
var events = require('events');
var util = require('util');
var app = express();
var server = http.createServer(app);
var io = require('/usr/local/lib/node_modules/socket.io').listen(server);
runCommand = function (arg1) {
var self = this;
var partialData = '';
var cmd = spawn(arg1);
cmd.stdout.setEncoding('utf8');
cmd.stdout.on('data', function(data) {
partialData += data.substr(0,data.length-1);
console.log('data: '+partialData.trim());
console.log('length: '+partialData.length);
partialData = '';
self.emit('updated', data);
});
}
util.inherits(runCommand, events.EventEmitter);
var result = new runCommand('./emitter.sh');
app.get('/', function(req, res){
res.send(
"<script src='/socket.io/socket.io.js'></script>\n"+
"<script>\n"+
"var socket=io.connect('http://127.0.0.1:3000');\n"+
"</script>\n"
);
});
server.listen(3000);
io.sockets.on('connection', function(webSocket) {
console.log('socket established');
});
There's no guarantee that the data event will be triggered every time your shell script has outputted a line; when other I/O takes place (like your socket.io example), the output is allowed to be buffered.
If you want line-support, you can use the readline module:
var cmd = spawn(arg1);
cmd.stdout.setEncoding('utf8');
var linereader = require('readline').createInterface(cmd.stdout, cmd.stdin);
linereader.on('line', function(line) {
...
});

Categories