I am currently working on an auction script using node.js and socket.io.But site was developed by using PHP & MySQL. Here I'm using node.js + socket.io for auction bidding process only. The site will have 500-1000 logged in users viewing a single page during the auction. Only one item will be on auction and it will be sold at one day once.
I will be broadcasting(emitting) a countdown timer to all of the users from the server to the client. On the server side I will be using setInterval(),recursive setTimeout() of 1 second to countdown to the auction end time. Apart from this the only other message being sent across will be the current bid being passed from a single client to the server then broadcast to all. This way to do be a reliable? And will it be able to handle the usage on the server?. Here I've tested with 500 users means in browsers getting hanging the timer.
Server.js
var cluster = require('cluster');
var app = require('express')();
//var http = require('http');
var https = require('https');
var socket = require('socket.io');
var redis = require('redis');
var redisAdapter = require('socket.io-redis');
var request = require('request');
var fs = require('fs');
var options = {
key: fs.readFileSync('keys/e1317_0f2c9_71565598d419e37e376ccef5c2827113.key'),
cert: fs.readFileSync('certs/e1317_0f2c9_1468152279_2dc46c1f2cc135a.crt'),
ca: fs.readFileSync('cabundles/90490a5c829d2aca24f22b5820864c6e_1935558000.cabundle')
};
//var server = http.createServer( app );
var server = https.createServer(options, app);
var io = socket.listen(server);
var port = process.env.PORT || 8080;
var workers = process.env.WORKERS || require('os').cpus().length;
var redisUrl = process.env.REDISTOGO_URL || 'redis://127.0.0.1:6379';
var redisOptions = require('parse-redis-url')(redis).parse(redisUrl);
var pub = redis.createClient(redisOptions.port, redisOptions.host, {
detect_buffers: true,
return_buffers: true,
auth_pass: redisOptions.password
});
var sub = redis.createClient(redisOptions.port, redisOptions.host, {
detect_buffers: true,
return_buffers: true,
auth_pass: redisOptions.password
});
io.adapter(redisAdapter({
pubClient: pub,
subClient: sub
}));
console.log('Redis adapter started with url: ' + redisUrl);
io.sockets.on('connection', function(client) {
//console.log('first');
client.on('nauction', function(data) {
io.sockets.emit('nauction', data);
});
});
io.on('connection', function(socket) {
//console.log('in');
console.log('connected client count:' + io.sockets.sockets.length);
var recursive = function() {
//console.log("It has been one second!");
if (io.sockets.sockets.length > 0) {
request('https://www.example.com/file.php', function(error, response, body) {
if (!error && response.statusCode == 200) {
data = JSON.parse(body);
socket.volatile.emit('auction_data', {
'auction_data': data
});
//console.log(data);
} else {
//console.log('else');
console.log(error);
}
});
} //else{
//console.log('No clients connected now');
//}
setTimeout(recursive, 1000);
}
recursive();
socket.on("disconnect", function() {
console.log('clear interval')
//clearInterval(interval);
clearTimeout(recursive);
});
});
if (cluster.isMaster) {
console.log('start cluster with %s workers', workers - 1);
workers--;
for (var i = 0; i < workers; ++i) {
var worker = cluster.fork();
console.log('worker %s started.', worker.process.pid);
}
cluster.on('death', function(worker) {
console.log('worker %s died. restart...', worker.process.pid);
});
} else {
start();
}
function start() {
server.listen(port, function() {
console.log('listening on *:' + port);
});
}
Client.js
socket.on('auction_data', function(auction_details) {
//console.log(auction_details);
$.each(auction_details, function(keys, values) {
//countdwon formation
var tm, days, hrs, mins, secs;
days = value.auction_data.time.days;
if (value.auction_data.time.hours < 10) {
hrs = ("0" + value.auction_data.time.hours);
} else {
hrs = value.auction_data.time.hours;
}
if (value.auction_data.time.mins < 10) {
mins = ("0" + value.auction_data.time.mins);
} else {
mins = value.auction_data.time.mins;
}
if (value.auction_data.time.secs < 10) {
secs = ("0" + value.auction_data.time.secs);
} else {
secs = value.auction_data.time.secs;
}
if (days == 0) {
tm = '' + hrs + '' + '' + mins + '' + '' + secs + '';
} else {
tm = '' + days + '' + '' + hrs + '' + '' + mins + '' + '' + secs + '';
}
$('#auction_' + value.auction_data.product_id + " .countdown").html(tm);
});
});
I'm waiting for your answers to fix the browser hanging problem.
First Question: Is This way to do be a reliable?
Sending the time every Second to EVERY client is not necessary. Simply send them the time at their first visit and use a local timer (at their local page) to reduce the time every second.
You also need to check for server-time on every bid (more secure).
If this is not "secure" enough for you, send the time with the changing bid. You only have to send the actual Bid, when it changed (using Broadcast) or when the user joins the site (just send it to him).
Second Question: And will it be able to handle the usage on the server?
Yes and No.
If your Sever is good enough (every 5$ server with endless traffic would fit),
you should not get in trouble. Only, if your script is very very bad and seeded with Memory Leaks.
Now a few tips:
Never trust the user input - parse it before you use it!
Recalculate everything you get from the client on the Server.
Send the Client only what he needs. He does not need information about stuff that he does not use.
If this was the answer you hoped for, please select the green arrow on the left.
If not, write a comment here and I will give more tips.
Related
i'm having a serious issue with an app i'm building with node.js and express.js.
the app converts videos to mp3. when the video is small upto 5min length everything work as expected, the http server respond with a download button to the client.
but when the video is too big the server prematurely closes connection, and because i'm using http protocol, the client retry the request and this time receives ERR_EMPTY_RESPONSE after a certain amount of time of waiting.
app.post('/', function(req, res) {
var obj_dlConvert = apart_dl_cv.dlConvert(req.body.yt_url,140,apart_dl_cv.generateDir()); //the function that download from youtube and convert
var lien = obj_dlConvert.link;
var dossier = obj_dlConvert.dossier;
var video_stream = obj_dlConvert.streame;
obj_dlConvert.processus.on('end', () =>{
fs.rename(path.join(__dirname,'uploads',dossier,dossier+'.mp3'), path.join(__dirname,'uploads',dossier,'video.mp3'), function(err) {
if (err) {
res.render('dlpage.hbs',{
renameError: true
});
}else res.render('dlpage.hbs',{
dossier: dossier,
fullLink: lien
});
});
}
}
req.on("close", function() {
obj_dlConvert.processus.kill();
obj_dlConvert.processus.on('error', () => {
if (fs.existsSync(path.join(__dirname,'uploads',dossier))){
fse.removeSync(path.join(__dirname,'uploads',dossier));
}
});
});
});
Serving video is not a one time deal. There is a hand-shake between the browser and server. The server needs to be able to provide the 'next' chunk when asked by the browser. Following may by used as an inspiration:
var fs = require("fs"),
http = require("http"),
url = require("url");
exports.serveVideo = function(req, res, file) {
var range = req.headers.range;
var positions = range.replace(/bytes=/, "").split("-");
var start = parseInt(positions[0], 10);
fs.stat(file, function(err, stats) {
var total = stats.size;
var end = positions[1] ? parseInt(positions[1], 10) : total - 1;
var chunksize = (end - start) + 1;
res.writeHead(206, {
"Content-Range": "bytes " + start + "-" + end + "/" + total,
"Accept-Ranges": "bytes",
"Content-Length": chunksize,
"Content-Type": "video/mp4"
});
var stream = fs.createReadStream(file, { start: start, end: end })
.on("open", function() {
stream.pipe(res);
}).on("error", function(err) {
res.end(err);
});
});
}
I was able to capture raw request in Node.js using source below.
var http = require('http');
var express = require('express');
var remote = express();
remote.use(function(req, res, next) {
req.socket.once('data', function(data) {
console.log(data.toString());
});
next();
});
remote.use(function(req, res, next) {
res.end('end');
});
http.createServer(remote).listen(8080);
But this source could capture raw request after(including) the second request because the first request was consumed before binding event handler. If a client do not use keep alive, I cannot capture any request.
How can I capture raw request including first request?
I found a way using 'connection' event on http.Server.
var http = require('http');
var express = require('express');
var remote = express();
remote.use(function(req, res, next) {
res.end('end');
});
var server = http.createServer(remote);
server.on('connection', function(socket) {
socket.on('data', function(chunk) {
console.log(chunk.toString());
});
});
server.listen(8080);
Hey not sure if you're still having the problem. I recently had to solve this, and I played around with a lot of things and this is what I ended up with:
https://github.com/vincenzorm117/http-capture/blob/master/index.js
Here is the code just in case the link has any issues:
var PORT = process.env.PORT || 3000
const net = require('net')
const fs = require('fs')
if( !fs.existsSync('./payloads') ) {
fs.mkdirSync('./payloads');
}
var server = net.createServer((sock) => {
console.log(`Connected: ${sock.remoteAddress}`)
let filename = FileName(sock);
let wstream = fs.createWriteStream(`./payloads/${filename}`);
sock.on('data', wstream.write.bind(wstream));
sock.on('end', () => {
wstream.end();
delete wstream;
console.log(`Disconnected: ${sock.remoteAddress}`)
});
setTimeout(() => {
if( !sock.destroyed ) {
sock.write('HTTP/1.1 200 OK\r\n');
sock.end();
}
}, 3000);
});
server.listen(PORT, 'localhost');
function FileName() {
var d = new Date(),
year = d.getFullYear(),
month = d.getMonth(),
date = d.getDate();
hour = d.getHours();
minutes = d.getMinutes();
seconds = d.getSeconds();
if( month < 10 ) month = '0' + month;
if( date < 10 ) date = '0' + date;
if( hour < 10 ) hour = '0' + hour;
if( minutes < 10 ) minutes = '0' + minutes;
if( seconds < 10 ) seconds = '0' + seconds;
return `request__${year}-${month}-${date}__${hour}-${minutes}-${seconds}.http`;
}
I was a bit lazy and set the server to kill the connection after 3 seconds instead of parsing the HTTP request. You can update it to 1 second and it should be fine though.
I am trying to learn node JS and am currently attempting to extend this article.
http://www.gianlucaguarini.com/blog/push-notification-server-streaming-on-a-mysql-database/
I am having major issue because I am getting multiple updates in the SQL query.
I only want to send one socket update.
This issue is in the transactions loop I get multiple socket updates.
I have been struggling with this for over a month and can't seem to figure it out on my own (or with google searches)
Can someone please tell me how I can make this work so I only get one socket update per client.
What I would like to happen is when there is a change in one of the transactions that the two parties (buyer and seller) are the only ones that get the socket update.
How can I make this work? It is so close to what I want it do but I can't get over this last challenge.
Please help.
Thank you in advance.
<html>
<head>
<title>GAT UPDATER</title>
<script src="/socket.io/socket.io.js"></script>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<link rel="stylesheet" href="//maxcdn.bootstrapcdn.com/bootstrap/3.3.2/css/bootstrap.min.css">
<script src = "http://maxcdn.bootstrapcdn.com/bootstrap/3.3.2/js/bootstrap.min.js"></script>
<script type="text/javascript" src="http://board.gameassettrading.com/js/jquery.cookie.js"></script>
</head>
<body>
<script>
var nodeuserid;
function getUserId() {
var url = window.location.href;
var user_id = url.replace('http://heartbeat.gameassettrading.com:4000/id/', '');
return user_id;
}
user_id = getUserId();
$.cookie('useridcookie', user_id, { expires: 1 });
var useridcookie = $.cookie("useridcookie");
// Get Base Url for development between local and dev enviroment
function getBaseURL() {
var url = location.href; // entire url including querystring - also: window.location.href;
var baseURL = url.substring(0, url.indexOf('/', 14));
if (baseURL.indexOf('http://localhost') != -1) {
// Base Url for localhost
var url = location.href; // window.location.href;
var pathname = location.pathname; // window.location.pathname;
var index1 = url.indexOf(pathname);
var index2 = url.indexOf("/", index1 + 1);
var baseLocalUrl = url.substr(0, index2);
return baseLocalUrl + "/";
}
else {
// Root Url for domain name
return baseURL + "/";
}
}
// set the base_url variable
base_url = getBaseURL();
document.domain = "transactionsserver.com"
// create a new websocket
var socket = io.connect('http://heartbeat.transactionsserver.com:4000');
socket.on('connect',function() {
var data = {
url: window.location.href,
};
socket.emit('client-data', data);
});
// this is always open you have to filter out the data you want
socket.on('notification', function (data) {
if(data.hasOwnProperty("data")) {
if(data.data[0]['seller_id'] != ''){
$('#StatusUpdate', window.parent.document).text( data.data[0]['seller_id']+ ':' + data.data[0]['seller_status'] +':'+ data.data[0]['buyer_id']+':'+ data.data[0]['buyer_status']).click();
}
}
window.parent.checkData(data,user_id);
if(data.hasOwnProperty("changed_user_id")) {
$('#StatusUpdate', window.parent.document).text( data.changed_user_id+ ':' + data.changed_user_status +':'+ data.changed_user_id).click();
}
});
</script>
</body>
</html>
Server . js
var app = require("express")();
var path = require('path');
var mysql = require("mysql");
var http = require('http').Server(app);
var io = require("socket.io")(http);
var sockets = {};
var mysql = require('mysql'),
connectionsArray = [],
connection = mysql.createConnection({
multipleStatements: true,
host: 'localhost',
user: '*****',
password: '******',
database: 'transactionsdb',
port: 3306
}),
POLLING_INTERVAL = 1000,
pollingTimer;
// Add Redis For Comparing SQL Results againts Cache
var redis = require('redis');
var client = redis.createClient();
var express = require('express');
/* Creating POOL MySQL connection.*/
var pool = mysql.createPool({
connectionLimit: 100,
host: 'localhost',
user: '*****',
password: '*****',
database: 'transactionsdb',
debug: false
});
var count = 0;
var clients = [];
function processAllTransactions(sellsreply) {
pool.query('SELECT t.id,t.status,t.original_status, t.active, t.buyer_id, t.seller_id, t.seller_acked, t.seller_complete, t.buyer_complete, b.user_status as buyer_status,t.chat_ended, s.user_status as seller_status FROM transaction t LEFT JOIN sf_guard_user_profile s ON s.user_id = t.seller_id LEFT JOIN sf_guard_user_profile b ON b.user_id = t.buyer_id WHERE active = 1 LIMIT 1', [sellsreply], function (err, sells) {
if (sells != '') {
// attempt to stop the updates if it's not the the active transaction
client.get('active transaction id:'+sellsreply, function (err, active_transaction_id) {
passed_active_transaction_id = active_transaction_id;
});
// is there a trasnaction with status defined and transation id does not equal the active transaction id
if(sells[0].status !== undefined && sells[0].id !== passed_active_transaction_id )
{
client.get('active transaction:'+sellsreply, function (err, data1) {
if(JSON.stringify(sells) != data1){
client.set('active transaction id:'+sellsreply,sells[0]["id"]);
client.set('active transaction:'+sellsreply,JSON.stringify(sells));
console.log(JSON.stringify(sells));
updateSockets({
data: sells // pass the database result
});
}
});
}
}
});
}
// Method
function getUserInfo(user_id, callback) {
var query = connection.query('SELECT user_status from sf_guard_user_profile WHERE user_id = ' + connection.escape(user_id));
query.on('result', function (row) {
callback(null, row.user_status);
});
}
var updateSockets = function (data) {
// adding the time of the last update
data.time = new Date();
console.log('Pushing new data to the clients connected ( connections amount = %s ) - %s', connectionsArray.length , data.time);
// sending new data to all the sockets connected
connectionsArray.forEach(function (tmpSocket) {
console.log(tmpSocket);
tmpSocket.volatile.emit('notification', data);
});
};
var pollingLoop = function () {
var socket;
for (var id in sockets) {
socket = sockets[id];
client.get("uuid:" + socket.id, function (err, useridreply) {
processAllTransactions(useridreply);
});
}
connection.query('SELECT * FROM sf_guard_user_profile; select * FROM transaction', function (err, result) {
// error check
if (err) {
console.log(err);
updateSockets(err);
throw err;
} else {
// loop through the queries
var element =
// compare the cache results againt the database query for users
result[0].forEach(function (element, index, array) {
client.get('logged_in:' + element.user_id, function (err, reply) {
if (reply === null) {
// console.log( element.user_id + ' is disconnected');
}
else {
// console.log(reply);
}
});
client.get("user:" + element.user_id, function (err, userreply) {
if (element.user_status != userreply) {
client.set('user:' + element.user_id, +element.user_status);
changed_users.push(element);
console.log(element.user_id + " is now set to: " + element.user_status);
updateSockets({
changed_user_id: element.user_id,
changed_user_status: element.user_status
});
}
});
});
}
// loop on itself only if there are sockets still connected
if (connectionsArray.length) {
pollingTimer = setTimeout(pollingLoop, POLLING_INTERVAL);
// reset changed users and changed transactions arrays
changed_users = [];
changed_transactions = [];
} else {
console.log('The server timer was stopped because there are no more socket connections on the app');
}
});
};
// count the connections array
Array.prototype.contains = function (k, callback) {
var self = this;
return (function check(i) {
if (i >= self.length) {
return callback(false);
}
if (self[i] === k) {
return callback(true);
}
return process.nextTick(check.bind(null, i + 1));
}(0));
};
io.sockets.on('connection', function (socket) {
// runs for every connection
sockets[socket.id] = socket;
socket.on('client-data', function (data) {
// get the user id from the url that is passed onload
var user_id = data.url.replace('http://servernameremoved.com:4000/id/', '');
console.log('user id ' + user_id + ' is connected with session id ' + socket.id);
client.set('uuid:' + socket.id, +user_id);
});
console.log('Number of connections:' + (connectionsArray.length));
// starting the loop only if at least there is one user connected
if (!connectionsArray.length) {
pollingLoop();
}
socket.on('disconnect', function (socketIndex) {
delete sockets[socket.id];
client.get("uuid:" + socket.id, function (err, userreply) {
console.log('user id ' + userreply + ' got redis disconnected');
});
socketIndex = connectionsArray.indexOf(socket);
console.log('socketID = %s got disconnected', socketIndex);
if (~socketIndex) {
connectionsArray.splice(socketIndex, 1);
}
});
connectionsArray.push(socket);
});
// express js route
app.get('/id/:id', function (req, res) {
clients.contains(req.params.id, function (found) {
if (found) {
console.log("Found");
} else {
client.set('logged_in:' + req.params.id, +req.params.id + 'is logged in');
}
});
res.sendFile(__dirname + '/client.html');
});
// build the server
http.listen(4000, function () {
console.log("Server Started");
});
Here is the console log results
Pushing new data to the clients connected ( connections amount = 2 ) - Sat May 30 2015 21:16:23 GMT-0700 (PDT)
30 2015 21:15:15 GMT-0700 (PDT)
user id 3 is connected with session id CRTtkRIl7ihQ2yaEAAAA
user id 2 is connected with session id wNG7XDcEDjhYKBEIAAAB
***********************************************
[{"id":1,"status":20,"original_status":15,"active":1,"buyer_id":2,"seller_id":1,"seller_acked":1,"seller_complete":0,"buyer_complete":1,"buyer_status":4,"chat_ended":"2015-05-31T03:58:40.000Z","seller_status":4}]
***********************************************
Pushing new data to the clients connected ( connections amount = 2 ) - Sat May 30 2015 21:16:23 GMT-0700 (PDT)
***********************************************
[{"id":1,"status":20,"original_status":15,"active":1,"buyer_id":2,"seller_id":1,"seller_acked":1,"seller_complete":0,"buyer_complete":1,"buyer_status":4,"chat_ended":"2015-05-31T03:58:40.000Z","seller_status":4}]
***********************************************
Pushing new data to the clients connected ( connections amount = 2 ) - Sat May 30 2015 21:16:23 GMT-0700 (PDT)
I've tried to search through stackoverflow for a similar question but most people are asking about the client-side of the NTLMv2 protocol.
I'm implementing a proxy that is performing the server-side of the protocol to authenticate users connecting to the proxy.
I've coded a lot of the protocol but I'm now stuck because the documentation that should take me further is difficult to understand.
This is the best documentation I've found so far: http://www.innovation.ch/personal/ronald/ntlm.html, but how to deal with the LM and NT responses is oblivious to me.
The proxy is located on an application server. The domain server is a different machine.
Example code for the node proxy:
var http = require('http')
, request = require('request')
, ProxyAuth = require('./proxyAuth');
function handlerProxy(req, res) {
ProxyAuth.authorize(req, res);
var options = {
url: req.url,
method: req.method,
headers: req.headers
}
req.pipe(request(options)).pipe(res)
}
var server = http.createServer(handlerProxy);
server.listen(3000, function(){
console.log('Express server listening on port ' + 3000);
});
ProxyAuth.js code:
ProxyAuth = {
parseType3Msg: function(buf) {
var lmlen = buf.readUInt16LE(12);
var lmoff = buf.readUInt16LE(16);
var ntlen = buf.readUInt16LE(20);
var ntoff = buf.readUInt16LE(24);
var dlen = buf.readUInt16LE(28);
var doff = buf.readUInt16LE(32);
var ulen = buf.readUInt16LE(36);
var uoff = buf.readUInt16LE(40);
var hlen = buf.readUInt16LE(44);
var hoff = buf.readUInt16LE(48);
var domain = buf.slice(doff, doff+dlen).toString('utf8');
var user = buf.slice(uoff, uoff+ulen).toString('utf8');
var host = buf.slice(hoff, hoff+hlen).toString('utf8');
var lmresp = buf.slice(lmoff, lmoff+lmlen).toString('utf8');
var ntresp = buf.slice(ntoff, ntoff+ntlen).toString('utf8');
console.log(user, lmresp, ntresp);
/* NOW WHAT DO I DO? */
},
authorize: function(req, res) {
var auth = req.headers['authorization'];
if (!auth) {
res.writeHead(401, {
'WWW-Authenticate': 'NTLM',
});
res.end('<html><body>Proxy Authentication Required</body></html>');
}
else if(auth) {
var header = auth.split(' ');
var buf = new Buffer(header[1], 'base64');
var msg = buf.toString('utf8');
console.log("Decoded", msg);
if (header[0] == "NTLM") {
if (msg.substring(0,8) != "NTLMSSP\x00") {
res.writeHead(401, {
'WWW-Authenticate': 'NTLM',
});
res.end('<html><body>Header not recognized</body></html>');
}
// Type 1 message
if (msg[8] == "\x01") {
console.log(buf.toString('hex'));
var challenge = require('crypto').randomBytes(8);
var type2msg = "NTLMSSP\x00"+
"\x02\x00\x00\x00"+ // 8 message type
"\x00\x00\x00\x00"+ // 12 target name len/alloc
"\x00\x00\x00\x00"+ // 16 target name offset
"\x01\x82\x00\x00"+ // 20 flags
challenge.toString('utf8')+ // 24 challenge
"\x00\x00\x00\x00\x00\x00\x00\x00"+ // 32 context
"\x00\x00\x00\x00\x00\x00\x00\x00"; // 40 target info len/alloc/offset
type2msg = new Buffer(type2msg).toString('base64');
res.writeHead(401, {
'WWW-Authenticate': 'NTLM '+type2msg.trim(),
});
res.end();
}
else if (msg[8] == "\x03") {
console.log(buf.toString('hex'));
ProxyAuth.parseType3Msg(buf);
/* NOW WHAT DO I DO? */
}
}
else if (header[0] == "Basic") {
}
}
}
};
module.exports = ProxyAuth;
The /* NOW WHAT DO I DO? */ comment specifies where I am stuck.
I hope I put enough information there, but let me know if anything else is needed.
Here is a simple TCP-Server stress test. As long as we send only one message per client everything works as expected. But when two messages per client are send,
the server suddenly stops without an exception or error.
So is this a bug or a feature?
var net = require("net");
var async = require("async");
var cluster = require("cluster");
// `ulimit -n` tells us that we can open max. 1024 files per process.
// Creating a socket means opening a file so we are limited.
var CLIENTS = 1000;
// Here is the weird part:
// - sending one message per client works fine
// - sending multiple message per client sucks
var MESSAGES = 2;
var TOTAL = CLIENTS * MESSAGES;
var PORT = 1234;
var HOST = "127.0.0.1";
if (cluster.isMaster) {
var count = 0;
var start = new Date;
var server = net.createServer(function(socket) {
socket.on("data", function(data) {
var t;
count++;
console.log("server received " + count + " messages");
socket.write(data, function(err) { if (err) console.error(err); });
if (count === TOTAL) {
t = (new Date) - start;
console.log("server received and sent " + count + " messages within " + t + "ms");
}
});
});
server.listen(PORT, HOST, function() { cluster.fork(); });
} else {
var run = function(i) {
var c = net.connect({ port: PORT, host: HOST }, function() {
var tasks = (function() {
var results = [];
for (var x = 1; x <= MESSAGES; ++x) {
results.push((function(x) {
return function(next) { c.write("Hello server!", next); };
})(x));
}
return results;
})();
async.series(tasks, function(err) {
if (err) { console.error(err); }
});
});
};
for (var i = 1; i <= CLIENTS; ++i) { run(i); }
}
Tested on Linux 3.11, Node.js 0.10.21
You're assuming that calling .write twice from a client triggers the data event twice on the server, not taking into account any buffering that might be going on which will coalecse multiple writes.
When the callback to .write is called, it doesn't mean the message it actually sent, it means that the message is put in some kernel buffer (which might contain more than one message when it's sent to the server).