delay of 5 seconds between each request in Node Js - javascript

Here is my code:
I have more than 500,000 records in my database and I want to loop and request some information from another server. I've successfully wrote all functions except delays between each request. If I send all request with node.js remote server goes downs or can not answer each request. So I need to create a queue for looping But my code is not working still sending all request very fast.
var http = require('http')
, mysql = require('mysql');
var querystring = require('querystring');
var fs = require('fs');
var url = require('url');
var client = mysql.createClient({
user: 'root',
password: ''
});
client.useDatabase('database');
client.query("SELECT * from datatable",
function(err, results, fields) {
if (err) throw err;
for (var index in results) {
username = results[index].username;
setInterval(function() {
requestinfo(username);
}, 5000 );
}
}
);
client.end();
}

Your problem lies in the for loop since you set all the requests to go off every 5 seconds. Meaning after 5 seconds all the requests will be fired relatively simultaneously. And since it is done with setInterval then it will happen every 5 seconds.
You can solve it in 2 ways.
First choice is to set an interval to create a new request every 5 seconds if the index is a number, so instead of a for loop you do something like:
var index = 0;
setInterval(function(){
requestinfo(results[index].username);
index++;
}, 5000)
The second choice is to set all the requests with an increasing timeout so you modify your current script like so:
var timeout = 0;
for (var index in results) {
username = results[index].username;
setTimeout(function() {
requestinfo(username);
}, timeout );
timeout += 5000;
}
This will set timeouts for 0,5,10,15... etc seconds so every 5 seconds a new request is fired.

Related

How to prevent one request from blocking other requests?

I've been trying to create a scenario where Request A would arrive first, but it would take a long time to process, meanwhile Request B would arrive second, but would take only a few seconds to process, and I would like for Request B to finish before Request A. However, Request A is currently blocking not just itself, but also Request B.
var http = require('http')
function block(){while(true){var x =2;}}
var server = http.createServer(function(req, res) {
if(req.url=='/wait'){block();}
res.writeHead(200);
res.end('Hello Http');
console.log(req.url)
});
server.listen(3000);
If your original goal is to block a request based on some condition then there 's a better way than using a while loop.
Although it would make your code asynchronous, which will require you to handle your request a little differently.
var server = http.createServer(function(req, res) {
if (req.url == '/wait')
setTimeout(serveRequest, 60000);
else
serveRequest();
function serveRequest() {
res.writeHead(200);
res.end('Hello Http');
console.log(req.url);
}
});
Though the request a depends on request b, there must be a rule to guarantee that request a1 and b1 are a pair, and a2 and b2 are pair, and so on.In other words, it should has a algorithm of checking wheather two request are a pair.
var em = new events.EventEmitter();
const EVENT_B_FINISH = 'bFinishedEvent';
var requestPairQueue = {};//key-value format,the key is the rid of request a,the value is the rid of request b.
//------request a arrive-----
var rid = //get the unique request id
//process a code......
if (requestPairQueue[rid]) {
//the paired b request has processed,send a response directly
}
em.on(EVENT_B_FINISH,function() {
if (requestPairQueue[rid]) {
//the paired b request processed after a while,send a response
}
});
//-----request b arrive----
var ridA = //the relation a request rid
var ridB = //the current b request id
//after process code....
requestPairQueue[ridA] = ridB;
em.emit(EVENT_B_FINISH);

Increase of parallel requests form node.js to external system takes more time to respond

I have a simple case where I'm requesting a different upstream proxy server from my node.js server. With the increase in load I see the request takes lot of time to execute(though time taken to respond from my upstream proxy server is constant across the requests). To demonstrate the issue i've written a sample program as below. When I execute the below program, the first request takes 118ms to execute and the last one takes 10970ms depending on the website you hit (I've changed the url to google, Try it out with your favourite website). If you observe i'm using async to parallelize my requests.
The question is, what is the reason node.js takes this much time to execute a request when run in parallel. To give some more context on the infra settings(centos 6.5) I have opened up my port range from 1024 to 65535, change the fin_timeout to 15 seconds and enable tw_reuse =1 for sockets in sysctl.conf
var http = require('http');
var uuid = require('node-uuid');
var async = require('async');
function callExternalUrl(){
var uniqueId = uuid.v4();
console.time(uniqueId);
var options = {
host: 'google.com',
port: '80',
path: '/',
method: 'GET'
};
var req = http.request(options, function(res) {
var msg = '';
res.setEncoding('utf8');
res.on('data', function(chunk) {
msg += chunk;
console.timeEnd(uniqueId);
});
res.on('end', function() {
});
});
req.end();
}
function iterateAsync(callback){
var iter = [];
for(var i=0; i<1000; i++){
iter[i] = i;
}
async.each(iter,
function(item, callback) {
callExternalUrl();
},
function(err) {
callback(err);
}
);
}
iterateAsync(function(){console.log('done');});
To give more context below is the code in ruby to do the same. I understand i can't compare these two languages as in apples to apples. But the idea is to show the time it takes to execute the same requests in sequence using ruby. I don't see any increase in the response times for each request going out in sequence. So, I doubt the parallel requests using node is taking more time for the request to respond(and the issue is not from the server to respond but its from sending out the request from the machine itself)
require 'rest_client'
1000.times do |number|
beginning = Time.now
response = RestClient.get 'http://google.com'
puts "Time elapsed #{Time.now - beginning} seconds"
end
For one, you're not calling the async iterator callback function:
function callExternalUrl(asyncCallback) {
...
res.on('end', function() {
asyncCallback();
});
...
}
function iterateAsync(callback) {
var iter = [];
for(var i=0; i<1000; i++){
iter[i] = i;
}
async.each(iter,
function(item, asyncCallback) { // <-- HERE
callExternalUrl(asyncCallback);
},
function(err) {
callback(err);
}
);
}
Also, depending on the Node version you're using, the http module may limit the number of parallel requests being made to a particular hostname:
$ node -pe 'require("http").globalAgent.maxSockets'
On Node 0.10, the default is 5; on Node 0.12, the default is Infinity ("unlimited"). So if you're not on Node 0.12, you should increase that value in your code:
var http = require('http');
http.globalAgent.maxSockets = Infinity;
...
I've tried to run your scenario by using JXcore (fork of Node.JS, and an open source project now on github), which offers multitasking (among many other new features).
var task = function (item) {
var http = require('http');
var uuid = require('node-uuid');
var uniqueId = uuid.v4() + "-" + process.threadId;
console.time(uniqueId);
var options = {
host: 'google.com',
port: '80',
path: '/',
method: 'GET'
};
var req = http.request(options, function (res) {
var msg = '';
res.setEncoding('utf8');
res.on('data', function (chunk) {
msg += chunk;
console.timeEnd(uniqueId);
});
res.on('end', function () {
process.release();
});
});
req.end();
process.keepAlive();
};
jxcore.tasks.setThreadCount(4);
console.time("total");
process.on('exit', function () {
console.timeEnd("total");
});
for (var i = 0; i < 1000; i++)
jxcore.tasks.addTask(task, i);
The sample is not really optimized, but still the total 1000 requests runs with JXcore a little bit faster for me (I was able to measure up to 20% gain on my platform). That may vary depending on the machine, since multitasking is using different threads/instances within one single process (no need for clustering any more). My machine has just 4 threads, that's why I used jxcore.tasks.setThreadCount(4);. You can try with your 32 :)
The way of handling each single request is not significantly different, so I'm not saying that each request takes less time, but the key might be hidden in different queuing mechanism as opposite to "async" module. And of course thanks to multitasking.

How to detect client disconnection from node.js server

I am new to node.js. How to detect client is disconnected from node.js server .
Here is my code:
var net = require('net');
var http = require('http');
var host = '192.168.1.77';
var port = 12345;//
var server = net.createServer(function (stream) {
stream.setEncoding('utf8');
stream.on('data', function (data) {
var comm = JSON.parse(data);
if (comm.action == "Join_Request" && comm.gameId =="game1") // join request getting from client
{
var reply0 = new Object();
reply0.message = "WaitRoom";
stream.write(JSON.stringify(reply0) + "\0");
}
});
stream.on('disconnect', function() {
});
stream.on('close', function () {
console.log("Close");
});
stream.on('error', function () {
console.log("Error");
});
});
server.listen(port,host);
How to know client side internet disconnection.
The best way to detect "dead sockets" is to send periodic application-level ping/keepalive messages. What that message looks like depends on the protocol you're using for communicating over the socket. Then it's just a matter of using a timer or other means of checking if you've received a "ping response" within a certain period of time after you sent the ping/keepalive message to the client.
On a semi-related note, it looks like you're using JSON messages for communication, but you're assuming a complete JSON string on every data event which is a bad assumption. Try using a delimiter (a newline is pretty common for something like this, and it makes debugging the communication more human-readable) instead.
Here is a simple example of how to achieve this:
var PING_TIMEOUT = 5000, // how long to wait for client to respond
WAIT_TIMEOUT = 5000; // duration of "silence" from client until a ping is sent
var server = net.createServer(function(stream) {
stream.setEncoding('utf8');
var buffer = '',
pingTimeout,
waitTimeout;
function send(obj) {
stream.write(JSON.stringify(obj) + '\n');
}
stream.on('data', function(data) {
// stop our timers if we've gotten any kind of data
// from the client, whether it's a ping response or
// not, we know their connection is still good.
clearTimeout(waitTimeout);
clearTimeout(pingTimeout);
buffer += data;
var idx;
// because `data` can be a chunk of any size, we could
// have multiple messages in our buffer, so we check
// for that here ...
while (~(idx = buffer.indexOf('\n'))) {
try {
var comm = JSON.parse(buffer.substring(0, idx));
// join request getting from client
if (comm.action === "Join_Request" && comm.gameId === "game1") {
send({ message: 'WaitRoom' });
}
} catch (ex) {
// some error occurred, probably from trying to parse invalid JSON
}
// update our buffer
buffer = buffer.substring(idx + 1);
}
// we wait for more data, if we don't see anything in
// WAIT_TIMEOUT milliseconds, we send a ping message
waitTimeout = setTimeout(function() {
send({ message: 'Ping' });
// we sent a ping, now we wait for a ping response
pingTimeout = setTimeout(function() {
// if we've gotten here, we are assuming the
// connection is dead because the client did not
// at least respond to our ping message
stream.destroy(); // or stream.end();
}, PING_TIMEOUT);
}, WAIT_TIMEOUT);
});
// other event handlers and logic ...
});
You could also just have one interval instead of two timers that checks a "last data received" timestamp against the current timestamp and if it exceeds some length of time and we have sent a ping message recently, then you assume the socket/connection is dead. You could also instead send more than one ping message and if after n ping messages are sent and no response is received, close the connection at that point (this is basically what OpenSSH does).
There are many ways to go about it. However you may also think about doing the same on the client side, so that you know the server didn't lose its connection either.

Updating client image after interval when server sends new image

In my ASP.NET MVC application, server is broadcasting image URL to all clients after 5 seconds
I am using SignalR to send image URL from server which invokes Javascript function.
In the Javascript function I am using following code to auto-refresh <img> element to update the src attribute but though it is updating my src I am not able to see on browser.
// Defining a connection to the server hub.
var myHub = $.connection.myHub;
// Setting logging to true so that we can see whats happening in the browser console log. [OPTIONAL]
$.connection.hub.logging = true;
// Start the hub
$.connection.hub.start();
// This is the client method which is being called inside the MyHub constructor method every 3 seconds
myHub.client.SendServerImageUrl = function (serverImageUrl) {
var N = 5;
// Function that refreshes image
function refresh(imageId, imageSrc) {
var image = document.getElementById(imageId);
var timestamp = new Date().getTime();
image.src = imageSrc + '?' + timestamp;
};
// Refresh image every N seconds
setTimeout(function () {
refresh('SampleImage', serverImageUrl);
}, N * 1000);
My HTML file contains only following code:
<img id="SampleImage">
Backend C# code
public class MyHub : Hub
{
public MyHub()
{
// Create a Long running task to do an infinite loop which will keep sending the server time
// to the clients every 5 seconds.
var taskTimer = Task.Factory.StartNew(async () =>
{
while(true)
{
Random rnd = new Random();
int number = rnd.Next(1,10);
string str = "~/Images/Sample";
Clients.All.SendServerImageUrl(str+Convert.ToString(number)+".jpg");
//Delaying by 5 seconds.
await Task.Delay(5000);
}
}, TaskCreationOptions.LongRunning
);
}
}
Try changing the casing in your callbacks:
myHub.client.sendServerImageUrl = ...;
Clients.All.sendServerImageUrl(...);
There should be no need for polling so get rid of setTimeout() on the client.
Keep an eye on the Console and make sure HTTP requests are working.

Learning Node - Book - Pages 16-18 - Two Examples

There are two examples in between these pages 16 and 18.
Example 1.3 is a server app.
Example 1.4 is a client app doing GET requests to the server.
When I run the two examples (at the same time) I notice some quite weird behavior
in the client. All requests are executed (i.e. the for loop in the client completes)
but the callbacks of only 5 of them get called. The client doesn't exit and also
doesn't error out. And just no more callbacks are called.
Any ideas what might be happening or how I can troubleshoot this further?
Note: I am running Node.js v0.10.20 on Windows 7.
Server:
var http = require('http');
var fs = require('fs');
// write out numbers
function writeNumbers(res) {
var counter = 0;
// increment, write to client
for (var i = 0; i<100; i++) {
counter++;
res.write(counter.toString() + '\n');
}
}
// create http server
http.createServer(function (req, res) {
var query = require('url').parse(req.url).query;
var app = require('querystring').parse(query).file;
// content header
res.writeHead(200, {'Content-Type': 'text/plain'});
if (!app){
res.end();
console.log('No file argument found in query string.');
return;
}else{
app = app + ".txt";
}
// write out numbers
writeNumbers(res);
// timer/timeout to open file and read contents
setTimeout(function() {
console.log('Opening file: ' + app + '.');
// open and read in file contents
fs.readFile(app, 'utf8', function(err, data) {
res.write('\r\n');
if (err)
res.write('Could not find or open file ' + app + ' for reading.\r\n');
else {
res.write(data);
}
// response is done
res.end();
});
},2000);
}).listen(8124);
console.log('Server running at 8124');
Client:
var http = require('http');
var N = 200;
// The URL we want, plus the path and options we need
var options = {
host: 'localhost',
port: 8124,
path: '/?file=automatic',
method: 'GET'
};
var callback_function = function(response) {
// finished? ok, write the data to a file
console.log('got response back');
};
for (var i = 1; i <= N; i++) {
// make the request, and then end it, to close the connection
http.request(options, callback_function).end();
console.log('done with call # ' + i);
}
--- Experiment Done ---
If I lower N to 10 and also if I do a
global "var i = 1" and then do this thing:
function schedule(){
http.request(options, callback_function).end();
console.log('done with call ' + i);
i++;
if (i<=N){
setTimeout(function(){
schedule();
}, 1000);
}
}
schedule();
instead of the loop in the client, I get similar behavior.
I guess that's what Milimetric meant by "sleep" i.e. just
to make sure I don't hit the server too quickly with too
many simultaneous requests.
But the behavior is not fully identical, it takes several mins
to print 'got response back' on the second set of 5 requests
and then another maybe 5-6 mins for the client to exit.
Still, all that does look weird to me.
C:\PERSONAL\NODE_TEST>node test004.js
done with call 1
got response back
done with call 2
got response back
done with call 3
got response back
done with call 4
got response back
done with call 5
got response back
done with call 6
done with call 7
done with call 8
done with call 9
done with call 10
got response back
got response back
got response back
got response back
got response back
C:\PERSONAL\NODE_TEST>
The problem is that the client doesn't consume the response body sent by the server, so the connection remains (half) open and the http agent only allows 5 concurrent requests per client by default, causing it to hang after 5 requests. The connection will eventually timeout, causing the next 5 requests to be processed.
node.js http.get hangs after 5 requests to remote site
Change your callback function to consume any data sent down the response.
var callback_function = function(response) {
// finished? ok, write the data to a file
console.log('got response back');
response.on('data', function () {});
};

Categories