I'm really confused at this point with losing data and can't figure out why. So I write a service to send GPS info from a device to an endpoint.
I'm using pm2 to launch my processes but the problem is this service sometimes don't send the info to the endpoint, and the device is sending data. The solution until now was restarting the instance in pm2. But this sometimes is not viable because I create a crontab to restart the GPS instance in pm2 every 45 minutes but it happens to lose information in a time window < 45 min this will not work...
I can't figure this out. Why I lose the data and restarting everything is okay? I saw a post in stack overflow about lost data in node.js process when the child sends the data to the parent and I read about 2 possible causes:
The child is not reading the GPS data info quickly enough in time to make a post and send info.
The child needs to make a JSON.stringify of the content sent to parent and parent needs to make a JSON.parse of the info received.
Here's my code:
var child_process = require("child_process");
var argv = require('minimist')(process.argv.slice(2));
//ex: nsGPSService.js -d 1111-11-11-111
var deviceId = argv.d;
var processDevices = [];
function runParent() {
setTimeout(function() {
return Database.Devices.getDevices().then(function(devices) {
return new Promise(function(resolve, reject) {
async.each(devices, function(device, callback) {
var result = _.filter(processDevices, { "id": device.id });
if (result.length == 0) {
var process = child_process.fork(__dirname + '/nsGPSService.js', ["-d", device.id]);
processDevices.push({ "process": process, "id": device.id });
process.on('message', function(data) {
//receber mensagens do filho
if (data.reason == "deleted") {
//child end process and alerts parent to remove from the list
var index = _.findIndex(processDevices, { "id": data.deviceId });
processDevices.splice(index, 1);
}
});
process.on('exit', function(code) {});
process.on("uncaughtException", function(error) {
process.exit(1);
});
}
callback();
}, function(error) {
error ? reject(error) : resolve();
});
}).then(function() {
runParent()
}).catch(function(error) {
runParent()
});
});
}, 5000);
}
if (!deviceId) {
return runParent();
}
function runChild(id) {
setTimeout(function() {
return Database.Devices.getDeviceById(id).then(function(device) {
if (!device) {
proccess.send({
"deviceId": id,
"reason": "deleted"
});
process.exit();
return;
}
return Controllers.Gps.getRadioInfo('gps', 'info', {}, device).then(function(data) {
return Controllers.Gps.sendDeviceInfo(data, device);
}).then(function() {
return runChild(id);
}).catch(function(e) {
return runChild(id);
});
});
}, 5000);
}
I really need to figure this out because I never know when I need to restart the service because I'm not getting info when in reality I'm receiving...
Which solution is really viable in my scenario and anyone can figure this problem?
Related
Right now I am replicating my entire device database over to my remote database.
Once that is complete, I grab all my data that is not older than 1 month from my remote database, using a filter, and bring it to my device.
FILTER
{
_id: '_design/filters',
"filters": {
"device": function(doc, req) {
if(doc.type == "document" || doc.type == "signature") {
if(doc.created >= req.query.date) return true;
else return false;
}
else return true;
}
}
}
REPLICATION
device_db.replicate.to(remote_db)
.on('complete', function () {
device_db.replicate.from(remote_db, {
filter: "filters/device",
query_params: { "date": (Math.floor(Date.now() / 1000)-2419200) }
})
.on('complete', function () {
console.log("localtoRemoteSync replicate.to success");
callback(true);
});
});
My question:
I want to be able to periodically delete data from my device that is older than 3 months (old enough data where I already know it's been sync'd)
But just because I delete it from my device, when I replicate the data back to my remote_db, I don't want it to be deleted on there too.
How can I delete specific data on my device but not have that deletion translated when I replicate?
FILTERS
Here, we have 2 filters:
noDeleted : This filter doesn't push _deleted documents.
device : Filter to get the latest data only.
{
_id: '_design/filters',
"filters": {
"device": function(doc, req) {
if (doc.type == "document" || doc.type == "signature") {
if (doc.created >= req.query.date) return true;
else return false;
}
return true;
},
"noDeleted": function(doc, req) {
//Document _deleted won't pass through this filter.
//If we delete the document locally, the delete won't be replicated to the remote DB
return !doc._deleted;
}
}
}
REPLICATION
device_db.replicate.to(remote_db, {
filter: "filters/noDeleted"
})
.on('complete', function() {
device_db.replicate.from(remote_db, {
filter: "filters/device",
query_params: { "date": (Math.floor(Date.now() / 1000) - 2419200) }
})
.on('complete', function() {
console.log("localtoRemoteSync replicate.to success");
callback(true);
});
});
Workflow
You push all your documents without pushing the deleted document.
You get all the updates for the latest data
You delete your old documents
You could either query the remote DB to get the ids of the documents that are too old and delete them locally. Note that the documents will still be there as _deleted. To completely remove them, a compaction will be required.
You could also totally destroy your local database after step1 and start from scratch.
callback(true);
Add a one-way filtered replication. However anything you need back on the server you will need to use a put request with the server's _rev.
For example
Replicate from server to client, then add a filter mechanism, like transfer:true to the docs you want to replicate. replication
db.replicate.from(remoteDB, {
live: true,
retry: true,
selector: {transfer:true}// or any other type of selector
});
To delete a doc on the client, set transfer to false, then delete it on the client. it won't meet your filter criteria so it won't replicate.
Anything you want to put back to the server use a put request instead of replicate.
If you want the document back on the client just set transfer to true in the doc.
Hei,
Im stuck for some reason. Im playing around with Arduino board and I want to read the data in the client.
My server code is this:
if(Meteor.isServer) {
var five = Meteor.npmRequire("johnny-five");
var board = new five.Board();
Meteor.startup(function() {
board.on("ready", Meteor.bindEnvironment(function() {
var temperature = new five.Thermometer({
controller: 'TMP36',
pin: 'A0'
});
Meteor.setInterval(function() {
console.log(temperature.celsius);
}, 5000);
}))
})
}
I don't want to save the data to collection but to read it online. How do I pass the variable temperature.celsius from server to the client? I cannot run the code in the client since i'm using NPM require and it works only in the server.
Right after the Meteor.setInterval definition, add this:
Meteor.methods({
temperature: function () {
return temperature;
},
});
Then add, at the bottom of your code:
if (Meteor.isClient) {
Template.tempReport.result = function () {
return Session.get('temperature') || "";
};
Template.tempReport.events = {
'click button' : function () {
Meteor.call('temperature',function(err, response) {
Session.set('temperature', response);
});
}
};
}
And finally in your HTML:
<template name="tempReport">
<div>Temperature: {{temperature}} <button>Update</button></div>
</Template>
I have reproduced my case with this jsbin http://emberjs.jsbin.com/xeninaceze/edit?js,output
Github API allows me to get the list of events by author:
API Link - api.github.com/users/:user/events
I can access to the commit message filtering the events “PushEvent”, and it s perfectly fine because i cam stream my latest commit message.
var gitactivitiesPromise = function() {
return new Ember.RSVP.Promise(function (resolve) {
Ember.$.ajax(eventsAct, {
success: function(events) {
var result = [];
events.filter(function(event) {
return event.type == 'PushEvent';
}).forEach(function(item){
item.payload.commits.map(function(commit){
result.push(store.createRecord('commit', {
message: commit.message,
}));
});
});
resolve(result);
},
error: function(reason) {
reject(reason);
}
});
});
};
The problem is that i want to stream beside the msg also his own url link. html_url
I need to know how i can tackle it? since the commit url links are not in the in the API Link
api.github.com/users/:user/events
But they are in the following api
api.github.com/repos/:user/repo/commits/branch
This makes bit more complicate to access to the latest commits url link html_url
This is a good example of what i am trying to do
http://zmoazeni.github.io/gitspective/#
It streams in the push events the latest commits message with links
It seems to me that all the relevant data is already there:
{
"id": "3414229549",
"type": "PushEvent",
"actor": {
...
"login": "paulirish"
},
"repo": {
...
"name": "GoogleChrome/devtools-docs"
},
"payload": {
...
"commits": [
{
...
"message": "fish shell. really liking it.",
"sha": "1f9740c9dd07f166cb4b92ad053b17dbc014145b"
},
...
You can access the author URL as actor and the repository as repo. With this it's easy to construct the relevant links:
...
.forEach(function(item) {
var repoUrl = 'https://github.com/' + item.repo.name;
var authorUrl = 'https://github.com/' + item.actor.login;
item.payload.commits.map(function(commit) {
result.push(store.createRecord('commit', {
authorUrl: authorUrl,
repositoryUrl: repoUrl,
commitUrl: repoUrl + '/commit/' + commit.sha,
message: commit.message
}));
});
})
...
Updated JSBin: http://emberjs.jsbin.com/feyedujulu/1/edit?js,output
I have several client connections and one should listen for data in other. How can I make a bridge between them (not an array iterator)?
Sample pseudo-code:
socket.on('data', function(chunk){
decodeChunk(chunk.toString('hex'), function(response){
if(response.name === 'toDB'){
//write smth. to DB
}
if(response.name === 'readDB'){
//watch all incoming tcp sockets with 'toDB' and continiously
//send to client connected with this socket
}
});
})
I can use something like rabbitMQ, but it's not for current stage in order to keep development more agile.
Any suggestions appreciated.
It's a bit tricky to address this with purely pseudo code, but I ll give it a shot anyway.
If I understand this correctly, then toDB and readDB are sample data and there are multiple toDB. Which means, you'd also have a bunch of toXY with a corresponding readXY OR a bunch of toPQ with corresponding readPQ. If so, then as soon as a toSOMETHING is connected, you can store that socket under a readSOMETHING. If the key already exists, then you d simply append the toSOMETHING socket to it. In time it would look something like this:
var socketData = {
readDB: {
sockets: [
<socket-object-toDB-1>,
<socket-object-toDB-2>,
<socket-object-toDB-3>
]
},
readXY: {
sockets: [
<socket-object-toXY-1>,
<socket-object-toXY-2>,
<socket-object-toXY-3>
]
}
}
And then your pseudo code would probably look something like
socket.on('data', function(chunk){
decodeChunk(chunk.toString('hex'), function(response){
if(response.name === 'toDB'){
//write smth. to DB
if (socketData['readDB']) {
// already exists, simply append incoming socket to it's sockets array
socketData['readDB'].sockets.push(socket);
} else {
// does not exist, add it with a sockets array
socketData['readDB'] = {
sockets: [socket]
}
}
if(response.name === 'readDB'){
// Write to all sockets of readDB
socketData['readDB'].sockets.forEach(function(sock) {
sock.write('Whatever message\r\n');
});
}
});
})
Ended up with such service:
var SocketMedium = {
sessions: {},
onEvent: function (event, cb) {
if (!this.sessions[event]) {
this.sessions[event] = [];
this.sessions[event].push(cb);
} else {
this.sessions[event].push(cb);
}
console.log('onEvent: ', event);
},
rTrigger: function (event, data) {
console.log('Started rTrigger: ', event);
if(this.sessions[event]){
console.log('FOUND: ', event);
this
.sessions[event]
.forEach(function (cb) {
cb(data);
})
}
console.log('Completed rTrigger', this.sessions);
},
cleaner: function(sessionId){
var event = 'session' + sessionId;
if (this.sessions[event]) {
delete this.sessions[event];
console.log('SocketMedium cleaned: ', this.sessions[event]);
}
console.log('SocketMedium can not find: ', this.sessions[event]);
}
};
module.exports = SocketMedium;
UPDATE
Got it to work, but I still think there's a problem. I only get the correct return data if I set the setTimeout timer to be really long, like 2000. If I leave it at 200 then the callback function executes with empty data because the API call hasn't been returned yet.
I've updated the code below.
Setup:
I'm sending a get value from the front end via AJAX (jQuery) and then using that value to call the Foursqaure API to get a list of relevant venues back.
This is working "fine" except that the order of events get screwy. When I insert the GET value into the parameters for the function to evaluate, I'm getting a return value that I'm not asking for, which then causes the template to render on the front end before the other return value from my function – the one I want – is given.
And actually I don't think it's actually being returned. Just logged to the console.
Question:
How can I return the filtered list of JSON objects at the end of the initGetVenues function in places.js via AJAX to the front end?
Context:
I'm using this package to connect to Foursquare:
https://npmjs.org/package/foursquarevenues
AJAX call on front end
$("#search-items").submit(function() {
var placeQuery = $("#search-input").val();
$.ajax({
url: '/return/places/',
data: {"passPlaceQuery": placeQuery},
type: 'get',
dataType: 'html',
success: function(data) {
$("#search-results-list").html(data);
},
});
return false;
});
index.js [UPDATED]
returnPlaces: function(req, res) {
if (req.headers['x-requested-with'] === 'XMLHttpRequest') {
console.log("I've started routing");
return places.findVenue({
ll: "38.214986,-85.637054",
radius: 32186,
query: req.query.passPlaceQuery,
intent: "browse",
categoryId: "4bf58dd8d48988d1e0931735"
}, function(err, data) {
console.log("Venue callback");
if (err) {
res.send(500);
}
console.log("Attempting render: " + data);
return res.render("place-results", {
layout: false,
foundPlaces: data
});
});
} else {
return res.redirect("/");
}
}
places.js [UPDATED]
(function() {
var foursquare, initGetVenues;
foursquare = (require('foursquarevenues'))('SECRET', 'SECRET');
module.exports = {
findVenue: initGetVenues = function(criteria, callback) {
var jsonUniquePlaces;
jsonUniquePlaces = [];
foursquare.getVenues(criteria, function(error, venues) {
var i, objUniquePlace, range, uniquePlaces, venueName;
if (!error) {
range = Object.keys(venues.response.venues).length;
uniquePlaces = [];
i = 0;
while (i < range) {
venueName = venues.response.venues[i].name;
if (!(uniquePlaces.indexOf(venueName) > -1)) {
uniquePlaces.push(venueName);
}
i++;
}
i = 0;
while (i < uniquePlaces.length) {
objUniquePlace = {
place: uniquePlaces[i]
};
jsonUniquePlaces.push(objUniquePlace);
i++;
}
jsonUniquePlaces = JSON.stringify(jsonUniquePlaces);
return jsonUniquePlaces;
}
});
return setTimeout((function() {
return callback(null, jsonUniquePlaces);
}), 200);
}
};
}).call(this);
When setTimeout is 2000 I get:
| I've started routing
| [{"place":"Quills Coffee"},{"place":"Quills Coffe"},{"place":"Quill's Coffee"}]
| Venue callback
| Attempting render: [{"place":"Quills Coffee"},{"place":"Quills Coffe"},{"place":"Quill's Coffee"}]
| GET /return/places/?passPlaceQuery=quills 200 2009ms - 150
When setTimeout is 200 I get:
| I've started routing
| Venue callback
| Attempting render:
| GET /return/places/?passPlaceQuery=quills 200 210ms - 11
| [{"place":"Quills Coffee"},{"place":"Quills Coffe"},{"place":"Quill's Coffee"}]
You can't just return your value from inside findVenue. The call to foursquare.getVenues is asynchronous. So when the node engine gets to the function call foursquare.getVenues(opt, callback) it simply starts the operation and continues executing any further statements, then index.js continues executing, then you render a response... Finally some time later, the foursquare.getVenues code invokes its callback (presumably when it's done talking to the foursquare API.)
You need to rewrite places.findVenue to take a callback argument. When you call places.findVenue() you will pass it a function to execute as a callback. That is when you're supposed to send the response.
Here is a simplified example that you can hopefully extend:
function findVenue(opt, callback){
setTimeout(function(){
console.log('Done with foursquare.getVenues, calling back to routing function')
callback(null, opt);
// you passs this callback function to setTimeout. it executes it in 200ms
// by convention, pass a null error object if successful
}
,200);
};
app.get('/return/places', function(req, res){
console.log('routing function start');
findVenue({
lat:40,
lng: 70,
query: 'foo'
}, function(err, data){
console.log('findVenue callback');
if(err){ return res.send(500) };
res.render('template', {foo: data});
});
});