JS - Fire event maximum once every x seconds - javascript

I have this event which is fired once every 2 seconds by external processes (it's a serial port receiving data) :
sp.on("data", function (rawData) {
try {
data = JSON.parse(rawData);
var collection = db.get('sensorsCollection');
collection.insert({
...
});
} catch (error) {
debug(error);
}
});
But I want to store data in database only once every, let's say 500 seconds to avoid overloading my database. How to achieve that ?
(Note : I tried to use underscore.js's function throttle but couldn't find how to pass argument to the function called in throttle so I couldn't pass my fresh data variable containing most recent data.)

Totally untested, but would something like this do what you want?:
(function() {
var collection = db.get('sensorsCollection');
var data = [];
sp.on("data", function (rawData) {
try {
data.push(JSON.parse(rawData));
} catch (error) {
debug(error);
}
});
setInterval(function() { // try-catch here too if necessary
collection.insert(data); // additional formatting?
data = [];
}, 500 * 1000);
}());
Editted to use setTimeout rather than throttle, which didn't make sense the way it was being used.

Store your datas, and send them every 500 seconds:
var my_datas=[];
sp.on("data", function (rawData){
try {
//store the data
my_datas.push(rawData);
});
} catch (error) {
debug(error);
}
});
setInterval(function(){
for(var i=0, len= my_datas.length; i<len; i++){
data = JSON.parse(my_datas[i]);
var collection = db.get('sensorsCollection');
collection.insert({
...
});
}
},500*1000);

Related

Javascript Promise().then to prevent re-calling the function before the first call be executed

In my node.js app, reading data from MSSQL using tedious, I'm calling the below every 1 second:
Fetch the data from the server (fetchStock function) and save it in temporary array
Send the data saved in the temporary array to the client using the Server-Sent Events (SSE) API.
It looks the 1 second is not enough to recall the fetchStock function before the previous call is completely executed, so I get execution errors from time to time.
I increased it to 5 seconds, but still get the same issue every once in a while.
How can I use Promise().then to be sure the fetchStock function is not re-called before the previouse call be completely executed?
var Request = require('tedious').Request;
var Connection = require('tedious').Connection;
var config = {
userName: 'sa',
password: 'pswd',
server: 'xx.xxx.xx.xxx',
options: {
database: 'DB',
rowCollectionOnRequestCompletion: 'true',
rowCollectionOnDone: 'true'
},
};
var sql = new Connection(config);
var addElem = (obj, elem)=> [].push.call(obj, elem);
var result = {}, tmpCol = {}, tmpRow = {};
module.exports = {
displayStock: function (es) {
var dloop = setInterval(function() {
if(result.error !== null)
if (es) es.send(JSON.stringify(result), {event: 'rmSoH', id: (new Date()).toLocaleTimeString()});
if(result.error === null)
if (es) es.send('connection is closed');
}, 1000);
},
fetchStock: function () {
request = new Request("SELECT ItemCode, WhsCode, OnHand FROM OITW where OnHand > 0 and (WhsCode ='RM' or WhsCode ='FG');", function(err, rowCount, rows) {
if (err) {
result = {'error': err};
console.log((new Date()).toLocaleTimeString()+' err : '+err);
}
if(rows)
rows.forEach(function(row){
row.forEach(function(column){
var colName = column.metadata.colName;
var value = column.value;
addElem(tmpCol, {colName: value})
});
addElem(tmpRow,{'item': tmpCol[0].colName, 'Whs': tmpCol[1].colName, 'Qty': tmpCol[2].colName});
tmpCol = {};
});
result = tmpRow;
tmpRow={}
});
sql.execSql(request);
}
}
I think what you need is a simple variable to check if there's already running request not Promise.
var latch = false;
// It will be called only if the previous call is completed
var doFetchStock = () => sql.execSql(new Request("SQL", (err, rowCount, rows) => {
// Your logic dealing with result
// Initializes the latch
latch = false;
});
module.exports = {
fetchStock: function () {
// Check if the previous request is completed or not
if (!latch) {
// Sets the latch
latch = true;
// Fetches stock
doFetchStock();
}
}
};
Actually I've used this kind of pattern a lot to allow some behavior only once.
https://github.com/cettia/cettia-javascript-client/blob/1.0.0-Beta1/cettia.js#L397-L413
https://github.com/cettia/cettia-javascript-client/blob/1.0.0-Beta1/cettia.js#L775-L797
Since javascript is mono-threaded a simple code like this should be enough on client-side
function () {
if(currentPromise != null){ // define in a closure outside
currentPromise = [..] // call to server which return a promise
currentPromise.then(function(){
currentPromise = null;
});
}
}

How can I throttle stack of api requests?

I have an array of ids, and I want to make an api request for each id, but I want to control how many requests are made per second, or better still, have only 5 open connections at any time, and when a connection is complete, fetch the next one.
Currently I have this, which just fires off all the requests at the same time:
_.each([1,2,3,4,5,6,7,8,9,10], function(issueId) {
github.fetchIssue(repo.namespace, repo.id, issueId, filters)
.then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
});
});
Personally, I'd use Bluebird's .map() with the concurrency option since I'm already using promises and Bluebird for anything async. But, if you want to see what a hand-coded counter scheme that restricts how many concurrent requests can run at once looks like, here's one:
function limitEach(collection, max, fn, done) {
var cntr = 0, index = 0, errFlag = false;
function runMore() {
while (!errFlag && cntr < max && index < collection.length) {
++cntr;
fn(collection[index++], function(err, data) {
--cntr;
if (errFlag) return;
if (err) {
errFlag = true;
done(err);
} else {
runMore();
}
});
}
if (!errFlag && cntr === 0 && index === collection.length) {
done();
}
}
runMore();
}
With Bluebird:
function fetch(id) {
console.log("Fetching " + id);
return Promise.delay(2000, id)
.then(function(id) {
console.log(" Fetched " + id);
});
}
var ids = [1,2,3,4,5,6,7,8,9,10];
Promise.map(ids, fetch, { concurrency: 3 });
<script src="https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.3.1/bluebird.min.js"></script>
<!-- results pane console output; see http://meta.stackexchange.com/a/242491 -->
<script src="http://gh-canon.github.io/stack-snippet-console/console.min.js"></script>
Divide your data into as many arrays as you want concurrent connections. Schedule with setTimeout, and have the completion callback handle the rest of the sub-array.
Wrap the setTimeout in a function of its own so that the variable values are frozen to their values at the time of delayed_fetch() invocation.
function delayed_fetch(delay, namespace, id, issueIds, filters) {
setTimeout(
function() {
var issueId=issueIds.shift();
github.fetchIssue(namespace, id, issueId, filters).then(function(response) {
console.log('Writing: ' + issueId);
writeIssueToDisk(fetchIssueCallback(response));
delayed_fetch(0, namespace, id, issueIds, filters);
});
}, delay);
}
var i=0;
_.each([ [1,2] , [3,4], [5,6], [7,8], [9,10] ], function(issueIds) {
var delay=++i*200; // millisecond
delayed_fetch(delay, repo.namespace, repo.id, issueIds, filters);
});
i'd recommend using throat just for this: https://github.com/ForbesLindesay/throat
Using Bluebird
function getUserFunc(user) {
//Get a collection of user
}
function getImageFunc(id) {
//get a collection of image profile based on id of the user
}
function search(response) {
return getUsersFunc(response).then(response => {
const promises = response.map(items => return items.id);
const images = id => {
return getImagesFunc(id).then(items => items.image);
};
return Promise.map(promises, images, { concurrency: 5 });
});
}
Previously i used ES6 function Promise.all(), but it doesn't work like what i'm expecting. Then go with third party library bluebird.js and Work like a charm.

javascript "don't make function in a loop"

How can I refractor my code to get rid of this error from JSLinter?
I tried moving the entire function out to a var but the code wasn't able to run after that.
for (i = 0; i < timeDifference; i++) {
timestamp ++;
console.log(timestamp);
energyDatum.find({timestamp: timestamp}).toArray(function(err, result) {
var data = {};
result.forEach(function(element) {
data[element.deviceId] = element;
});
var roomRawData = [];
mappings.forEach(function(room) {
var hash = {};
hash.floor = room.floor;
hash.name = room.name;
hash.room_type = room.room_type;
hash.energy_ac = sumApplianceEnergy('energy_ac', room, data);
hash.energy_light = sumApplianceEnergy('energy_light', room, data);
hash.energy_socket_1 = sumApplianceEnergy('energy_socket_1', room, data);
hash.energy_socket_2 = sumApplianceEnergy('energy_socket_2', room, data);
hash.energy_socket_3 = sumApplianceEnergy('energy_socket_3', room, data);
hash.energy_total = hash.energy_ac + hash.energy_light + hash.energy_socket_1 + hash.energy_socket_2 + hash.energy_socket_3;
hash.timestamp = timestamp;
roomRawData.push(hash);
});
roomRaw.insert(roomRawData, {w:1}, function(err, result) { console.log('done'); });
});
lastTimestamp.update({_id: timestampId}, {timestamp: timestamp});
}
JSLinter shows this message because your code has potential errors.
Take a look at this line:
energyDatum.find({timestamp: timestamp}).toArray(...);
This method is async, right? It means that the callback of toArray method
is called after the for loop finishes its iterations, and therefore timestamp
variable (when you use it inside this callback) doesn't have a value of current iteration,
but instead it has value incremented for timeDifference times.
To solve this problem you could move this callback to another function:
var getIterationFunc = function(timestamp) {
return function(err, result) {
var data = {};
// rest of function ...
}
}
and then use it:
energyDatum.find({timestamp: timestamp}).toArray(getIterationFunc(timestamp));
I believe this error should be fixed now. Hope this helps.
P.S. sorry for my English

How to set callback order invocation same as target function invocation

I have a problem in my project.
To describe this issue I have wrote simplified code snippet:
function waitFor(fnReady, fnCallback) {
var check = function() {
if (fnReady()) {
fnCallback();
}
else {
setTimeout(check, 100); // wait another 100ms, and try again
}
};
check();
}
var result = 0;
var flag = true;
function ajaxRequest() {
setTimeout(
function() { flag = false;
console.log('ping');
},3000
);
}
function ajaxRequestHandler() {
setTimeout(
function() { flag = true;
console.log('pong');
}, 200
);
}
for(var i =0;i<10; i++){
waitFor(function() { return flag; }, ajaxRequest);
waitFor(function() { return !flag; }, ajaxRequestHandler);
}
it returns:
ping - 10 times
pong - 10 times
desired result:
ping
3 second timeout
ping
---------------------
ping
3 second timeout
pong
--------------------
.....
Can you help correct my code?
UPDATE
Actual problem:
I have a google map.
I have a lot of places when I should to redraw it.
For application logic very important that If I send
request1
request2
request3
request4
I should handle responses in the this order
handle response of request1
handle response of request2
handle response of request3
handle response of request4
Problem that I don't know order of requests.
In different places of file I see following code rows:
google.maps.event.addListener(searchBox, 'bounds_changed', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getAllTerminals.json', renderTerminalsOnMapAndFitBounds);
.....
$.getJSON('getAllTerminalsInsideRectangle.json', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getAllTerminalsInsideCircle.json', renderTerminalsOnMapAndFitBounds);
...
$.getJSON('getBigTerminals.json', renderTerminalsOnMapAndFitBounds);
........
renderTerminalsOnMapAndFitBounds method sends request to server and in succes alternative render result on map. But this event happens very often
Try this pattern
var map = "abcdefghi".split("");
var responses = []; // collect responses
$.ajaxSetup({
beforeSend : function(jqxhr, settings) {
jqxhr.id = Number(settings.data.split(/id=/)[1]); // add `id` to `request`
console.log(settings.data.split(/id=/)[1]);
}
});
var request = function(id, data) {
// append `id` to `id` data
return $.post("/echo/json/", {json:JSON.stringify([data]), id:id})
};
$.each(map, function(k, v) {
setTimeout(function() {
request(k + 1, v)
.done(function(data) {
// do stuff at each response
console.log(data); // note return values
})
.always(function(data, textStatus, jqxhr) {
// do stuff at each response
responses.push([jqxhr.id, data[0]]);
// do stuff when all requests completed , results items in `responses`
if (responses.length === map.length) {
responses.sort(); // sort `responses` based on `id`
// do stuff with `responses`
console.log(responses);
}
});
},1 + Math.random() * 1000) // async
});
jsfiddle http://jsfiddle.net/guest271314/g254bbjg/
my variant:
var index = 0;
// callback function
function tryMe (param1) {
waitFor(function(){return param1 == index},
function(){console.log(param1);
index++;
}
)
}
// callback executer
function callbackTester (callback,i) {
setTimeout( function(){callback(i);}, 20000 - i*1000);
}
// test function
for(var i=0 ; i<10 ; i++){
callbackTester ( tryMe,i );
}
function waitFor(fnReady, fnCallback) {
var check = function() {
if (fnReady()) {
fnCallback();
}
else {
setTimeout(check, 100); // wait another 100ms, and try again
}
};
check();
}
http://jsfiddle.net/x061dx75/17/
I personally would use promises for this, but you've said no promises (not sure why), so here's a generic sequencer algorithm in plain javascript (tested in the jsFiddle linked below):
function sequence(fn) {
// initialize sequence data upon first use
if (typeof sequence.low === "undefined") {
sequence.low = sequence.high = 0;
sequence.results = {};
}
// save id in local variable so we can reference it in the closure from the function below
var id = sequence.high;
// advance to next sequence number
++sequence.high;
// initialize the result value for this sequence callback
sequence.results[id] = {fn: fn, args: [], ready: false, context: null};
return function(/* args */) {
// save args and context and mark it ready
var args = Array.prototype.slice.call(arguments, 0);
// get the results object for this callback and save info in it
var thisResult = sequence.results[id];
thisResult.args = args;
thisResult.context = this;
thisResult.ready = true;
// now process any requests in order that are ready
for (var i = sequence.low; i < sequence.high; i++) {
var result = sequence.results[i];
// if this one is ready, process it
if (result.ready) {
// increment counter past this result
++sequence.low;
// remove this stored result
delete sequence.results[i];
// process this result
result.fn.apply(result.context, result.args);
} else {
// if this one not ready, then nothing to do yet
break;
}
}
};
}
// your usage:
google.maps.event.addListener(searchBox, 'bounds_changed', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getAllTerminals.json', sequence(renderTerminalsOnMapAndFitBounds));
.....
$.getJSON('getAllTerminalsInsideRectangle.json', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getAllTerminalsInsideCircle.json', sequence(renderTerminalsOnMapAndFitBounds));
...
$.getJSON('getBigTerminals.json', sequence(renderTerminalsOnMapAndFitBounds));
........
Working demo: http://jsfiddle.net/jfriend00/aqugm1fs/
Conceptually, what this does is as follows:
Pass a substitute completion handler in place of the normal completion callback.
This substitute function marks each response with a sequence id and saved the original completion handler.
If a response comes back while another response with a lower sequence id is still pending, then the result is just stored and saved for later.
As each response comes in, it processes as many responses in sequence as are ready
Note: while all the examples you have use the same callback function, this will work with any callback function so it would work with a mix of different types of operations.

CouchDB _changes notifications - jquery.couch.js couch.app.db.changes() usage

I have replication working in CouchDB and want to update my UI when changes are pushed to the target database. I've read about _changes database API and found the couch.app.db.changes() function in jquery.couch.js However I can't work out how to use the function. I assume I need to set up listener, but my knowledge of Javascript is not yet what it needs to be.
Unfortunately the docs at http://www.couch.io/page/library-jquery-couch-js-database don't even list the changes() function.
Can someone help me here and also let me know what the options param is for.
Here is the code for the function in question:
changes: function(since, options) {
options = options || {};
// set up the promise object within a closure for this handler
var timeout = 100, db = this, active = true,
listeners = [],
promise = {
onChange : function(fun) {
listeners.push(fun);
},
stop : function() {
active = false;
}
};
// call each listener when there is a change
function triggerListeners(resp) {
$.each(listeners, function() {
this(resp);
});
};
// when there is a change, call any listeners, then check for another change
options.success = function(resp) {
timeout = 100;
if (active) {
since = resp.last_seq;
triggerListeners(resp);
getChangesSince();
};
};
options.error = function() {
if (active) {
setTimeout(getChangesSince, timeout);
timeout = timeout * 2;
}
};
// actually make the changes request
function getChangesSince() {
var opts = $.extend({heartbeat : 10 * 1000}, options, {
feed : "longpoll",
since : since
});
ajax(
{url: db.uri + "_changes"+encodeOptions(opts)},
options,
"Error connecting to "+db.uri+"/_changes."
);
}
// start the first request
if (since) {
getChangesSince();
} else {
db.info({
success : function(info) {
since = info.update_seq;
getChangesSince();
}
});
}
return promise;
},
Alternatively you can use longpoll changes feed. Here is one example:
function bind_db_changes(database, callback) {
$.getJSON("/" + database, function(db) {
$.getJSON("/"+ database +
"/_changes?since="+ db.update_seq +"&heartbeat=10000&feed=longpoll",
function(changes) {
if($.isFunction(callback)){
callback.call(this, changes);
bind_db_changes(database, callback);
}
});
});
};
bind_db_changes("test", function(changes){
$('ul').append("<li>"+ changes.last_seq +"</li>");
});
Note that $.couch.db.changes is now in the official documentation:
http://daleharvey.github.com/jquery.couch.js-docs/symbols/%24.couch.db.changes.html
Also a nice example of consuming _changes with the jquery.couch plugin here:
http://bradley-holt.com/2011/07/couchdb-jquery-plugin-reference
what about using the ajax-feateures of jquery?
function get_changes() {
$.getJSON("/path/to/_changes", function(changes) {
$.each(changes, function() {
$("<li>").html(this.text).prependTo(mychanges_div);
});
get_changes();
});
}
setTimeout(get_changes, 1000);
I've been doing work with JS Promises code which enabled mt to understand the CounchDB code I posted above. Here is a sample:
var promise_changes = app.db.changes();
// Add our deferred callback function. We can add as many of these as we want.
promise_changes.onChange( db_changes );
// called whenever this db changes.
function db_changes( resp ) {
console.log( "db_changes: ", resp );
}
Google Chrome goes into a Busy state with long polling, which I hope they will resolve one day.

Categories