I have a node.js app with a postgresql database running on Heroku and for some reason select functions seem to break and start failing after many successful attempts for what seems to be no reason at all..
Example code:
pg.connect(database_url, function(err, client) {
if(client) {
var date = new Date();
var query = client.query('SELECT * FROM accounts',function(err, result) {
if(!err) {
res.send(JSON.stringify(result.rows));
accounts = JSON.stringify(result.rows);
} else {
res.send('failed');
}
});
} else {
res.send(JSON.stringify(err));
}
});
The above code works perfectly about 10 times and then out of nowhere seems to break.
How can i remedy this issue?
If it works 10 times, and fails on the 11'th then it's probably because the pool of connections has run out.
You should call done in your code after you have finished with the request, to send the database connection back to the pool.
pg.connect(database_url, function(err, client, done) {
if(client) {
var date = new Date();
var query = client.query('SELECT * FROM accounts',function(err, result) {
if(!err) {
res.send(JSON.stringify(result.rows));
accounts = JSON.stringify(result.rows);
} else {
res.send('failed');
}
done(); // call done to close the conection
});
} else {
res.send(JSON.stringify(err));
}
});
see the documentation for connect
Related
First of all, "I'm noob"
My first time running some script on a cloud server.
So, I have a code that counts the number of views from my youtube vídeo and writes it in the title.
But I want that code running 24/7.
I make it works running on a VM instance on google cloud using cron-node running the code every 5 minutes and make it run forever with "forever" on the node.
And my question is: Have a better way to run it? Like using cloud functions or something? Or I should still run it on VM instance?
If yes, what should I do? Because I don't know how to use external dependencies on Cloud function like google API, if I don't have a console to install googleapi dependencies. (Maybe it's easy to make it, but I just don't know yet).
var readline = require('readline');
var { google } = require('googleapis');
var OAuth2 = google.auth.OAuth2;
const cron = require("node-cron");
cron.schedule("*/5 * * * *", () => {
// If modifying these scopes, delete your previously saved credentials
// at ~/.credentials/youtube-nodejs-quickstart.json
var SCOPES = ['https://www.googleapis.com/auth/youtube.force-ssl'];
var TOKEN_DIR = (process.env.HOME || process.env.HOMEPATH ||
process.env.USERPROFILE) + '/.credentials/';
var TOKEN_PATH = TOKEN_DIR + 'update-youtube-title.json';
const youtube = google.youtube('v3');
const video_id = 'f0ARwVVxoBc';
// Load client secrets from a local file.
fs.readFile('credentials.json', function processClientSecrets(err, content) {
if (err) {
console.log('Error loading client secret file: ' + err);
return;
}
// Authorize a client with the loaded credentials, then call the YouTube API.
authorize(JSON.parse(content), makeAuthCall);
});
/**
* Create an OAuth2 client with the given credentials, and then execute the
* given callback function.
*
* #param {Object} credentials The authorization client credentials.
* #param {function} callback The callback to call with the authorized client.
*/
function authorize(credentials, callback) {
var clientSecret = credentials.installed.client_secret;
var clientId = credentials.installed.client_id;
var redirectUrl = credentials.installed.redirect_uris[0];
var oauth2Client = new OAuth2(clientId, clientSecret, redirectUrl);
// Check if we have previously stored a token.
fs.readFile(TOKEN_PATH, function (err, token) {
if (err) {
getNewToken(oauth2Client, callback);
} else {
oauth2Client.credentials = JSON.parse(token);
callback(oauth2Client);
}
});
}
/**
* Get and store new token after prompting for user authorization, and then
* execute the given callback with the authorized OAuth2 client.
*
* #param {google.auth.OAuth2} oauth2Client The OAuth2 client to get token for.
* #param {getEventsCallback} callback The callback to call with the authorized
* client.
*/
function getNewToken(oauth2Client, callback) {
var authUrl = oauth2Client.generateAuthUrl({
access_type: 'offline',
scope: SCOPES
});
console.log('Authorize this app by visiting this url: ', authUrl);
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout
});
rl.question('Enter the code from that page here: ', function (code) {
rl.close();
oauth2Client.getToken(code, function (err, token) {
if (err) {
console.log('Error while trying to retrieve access token', err);
return;
}
oauth2Client.credentials = token;
storeToken(token);
callback(oauth2Client);
});
});
}
/**
* Store token to disk be used in later program executions.
*
* #param {Object} token The token to store to disk.
*/
function storeToken(token) {
try {
fs.mkdirSync(TOKEN_DIR);
} catch (err) {
if (err.code != 'EEXIST') {
throw err;
}
}
fs.writeFile(TOKEN_PATH, JSON.stringify(token), (err) => {
if (err) throw err;
console.log('Token stored to ' + TOKEN_PATH);
});
}
/**
* Lists the names and IDs of up to 10 files.
*
* #param {google.auth.OAuth2} auth An authorized OAuth2 client.
*/
/*
function getChannel(auth) {
var service = google.youtube('v3');
service.channels.list({
auth: auth,
part: 'snippet,contentDetails,statistics',
forUsername: 'GoogleDevelopers'
}, function (err, response) {
if (err) {
console.log('The API returned an error: ' + err);
return;
}
var channels = response.data.items;
if (channels.length == 0) {
console.log('No channel found.');
} else {
console.log('This channel\'s ID is %s. Its title is \'%s\', and ' +
'it has %s views.',
channels[0].id,
channels[0].snippet.title,
channels[0].statistics.viewCount);
}
});
}
*/
//código novo
const makeAuthCall = (auth) => {
//get views
youtube.videos.list(
{
auth: auth,
id: video_id,
part: 'snippet,statistics'
},
(err, response) => {
if (err) {
console.log(`A contagem de views falhou ${err}`)
return;
}
if (response.data.items[0]) {
//atualização encontrada
console.log('Encontramos uma atualização...');
updateVideoTitle(response.data.items[0], auth);
}
}
);
};
const updateVideoTitle = (video, auth) => {
let viewsText = video.statistics.viewCount.toLocaleString();
const newTitle = `Esse Vídeo tem ${viewsText} Visualizações`;
const oldTitle = video.snippet.title;
if (newTitle != oldTitle){
video.snippet.title = newTitle;
console.log(`Atualizando contagem de views para ${viewsText}`);
youtube.videos.update(
{
auth: auth,
part: 'snippet,statistics',
resource: video,
},
(err, response) => {
console.log(response)
if (err) {
console.log(`A contagem de views falhou ${err}`);
return;
}
if (response.data.items) {
console.log("Done");
}
}
);
}else{
console.log("Atualização não necessária");
};
}
})
This is a very broad question with too much to unpack. Keep in mind the differences below are very simplistically explained and there could be more steps.
Compute Engine/VM:
Step 1. Install javascript runtime (Nodejs) on the OS.
Step 2. Put your code & dependencies
Cloud Functions:
Step 1. Put your code & dependencies
Summary:
Cloud Functions removes the need to manage the operating system and
runtime. It might be easier for you. One way or another, you won't
escape the need for your dependency setup.
For Practice:
These "quick start" articles will give an understanding of the setup process. You learn by doing with a "hello world" example.
For Dependencies:
There is an article here that shows how to declare them in Cloud Functions. The sections of this article explains how you can load dependencies in different ways. The first example says: "The dependency is then imported in the function" and you see the part that says "require('escape-html')", it's the same as the "escape-html": "^1.0.3". This means that you can work backwards and see the requirements in your code to add them to your dependency manifest. But you may have to learn a bit more about the syntax by doing a Google search about package.json/npm and how it works in general.
For Authentication:
Assuming your code is properly written, the "require('googleapis')" at the top of your code means that you should only need to add dependencies like mentioned previously and your code may be good to go for authentication.
For Cron jobs:
In your code you are doing that by using "const cron = require("node-cron");" at the top of you code. This is like everything else I mentioned about dependencies and it's a software that triggers your code. I'm not sure if this will work like all other ones because Google has its own Cron jobs that work like this (outside the Cloud Functions environment, but triggers the Cloud Functions on the network). Keep in mind that this method could alter your code significantly. So if you can test with const cron = require("node-cron"); dependencies, then do that first.
Disclaimer:
Please open more specific questions if you can next time. Stackoverflow is not for consultation or broad questions with many follow-ups. It's more for coding answers to specific questions. Some may decide to close this post due to that.
Hope this helps.
I have a MongoDB instance and two JavaScript services running on a Linux server. The first service, moscaService.js, listens to MQTT topics on the server, and records what is sent in a MongoDB collection. The second service, integrationService.js, runs every second, reading data on the same MongoDB collection and, if there's a new register (or more), sends it to Ubidots.
The problem is that both services work on the same IP/port: localhost:27017; and, if there ever is an occasion in which both of them are active simultaneously (say, moscaService.js is recording something and then the integrationService.js tries to connect), there will be a connection error and the service will restart.
Here are the connection parts of both services:
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://127.0.0.1:27017/myGateway';
//integrationService.js
var job1 = new CronJob('*/1 * * * * *', function() {
MongoClient.connect(url, function(err, db) {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
});
}, null, true, timeZone);
//moscaService.js
server.on('published', function(packet, client) {
//the packet is read here
MongoClient.connect(url, function(err, db) {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
});
});
What I need is a way to properly handle the err instead of just exiting the service, because if there are new messages being published while the service is restarting, they will be lost. Something like testing if the port is open before connecting, or open a different port.
I tried creating another instance of MongoDB on a different port, in order to have each service listen to one, but it looks like Mongo locks more than one instance if it's trying to connect to the same database.
The code snippets here are just a small part; if anyone needs more parts to answer, just say so and I'll add them.
I have made an alteration and it solved this issue. I altered the code in a way that integrationService connects to MongoDB before starting the CronJob; that way, it only connects once and it keeps the connection alive.
Here's the connection part of the code:
var MongoClient = require('mongodb').MongoClient;
var url = 'mongodb://127.0.0.1:27017/myGateway';
//integrationService.js
MongoClient.connect(url, function(err, db) {
var job1 = new CronJob('*/1 * * * * *', function() {
if(err != null) {
logger.error({message: 'Connection error: ' + err});
process.exit(0);
} else {
executeService();
}
function executeService() {
// execution block
}
}, null, true, timeZone); // end CronJob
}); // end MongoClient.connect
Since this has solved the problem, I've left the err treatment as is was (although a more elegant way to treat it is still desirable).
Solving the problem on integrationService has solved it on moscaService as well, but I plan to make the same alteration on the second service too.
This is quite new to me so forgive me if this is a stupid/obvious question.
I have a web socket on my server.
I am sending around 40 jpegs as byte arrays (of approx. 12kb size each) every second.
On the client I display the image on a canvas control.
Each JPEG has a timestamp water-marked on it.
Every now and then, the time-stamp starts to lag from what is the current system time.
Sometimes several minutes old.
I would prefer to determine on the client when such a lag is abound and then somehow flush the web socket.
Is this possible?
Thanks
ADDITIONAL
It has been pointed out to me that just disconnecting and reconnecting should clear the socket of phantom messages.
Following is code that shows how my client reconnects if it loses connection. I had to put this in in case I did not receive a disconnect from my server:
function Reconnect() {
try {
ws = new WebSocket(url);
ws.binaryType = 'arraybuffer';
ws.onerror = function (e) {
try {
console.log('onerror');
}
catch (err) {
console.log(err);
}
};
ws.onclose = function () {
try {
Reconnect();
}
catch (err) {
console.log(err);
}
};
ws.onopen = function () {
try {
StartPing();
console.log('onopen');
}
catch (err) {
console.log(err);
}
};
ws.onmessage = function (e) {
try{
''display image on canvas
}
catch (err) {
console.log(err);
}
}
};
}
catch (err) {
Reconnect();
}
}
function StartPing() {
try{
window.clearInterval(window.timerID);
if (!window.timerID) { /* avoid firing a new setInterval, after one has been done */
window.timerID = setInterval(Ping, 20000);
}
}
catch (err)
{
console.log(err + ' Reconnect');
}
}
function Ping() {
try {
var now = new Date();
var d = now.format("yyyymmddhhMMss");
var currentTimeStamp = d + '000';
if (parseInt(currentTimeStamp) - parseInt(lastUpdated) < 60) {
console.log('Reconnect');
Reconnect();
}
else {
StartPing();
}
}
catch (err) {
console.log(err + ' Ping');
}
You should make sure you close your older WebSocket before connecting a new one, otherwise you get multiple simultaneous open connections which is not what you want (and will throw you off when trying to understand what's happening).
In your Reconnect function, do:
function Reconnect() {
try {
// ws is probably some global variable, so:
if(ws instanceof WebSocket)
ws.close();
ws = new WebSocket(url);
ws.binaryType = 'arraybuffer';
// Rest of code...
I'm buidling an app with Node anb Mongodb Native. I'm working on a db module which i can require and call in other modules so that I end up using just one connection. The module db.js started out with this code:
var _db = null;
var getDb = module.exports.getDb = function(callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('_db created');
_db = db;
callback(err, _db);
});
};
In my other modules that need a db connection I do this
db.getDb(function (err, connection) {
// Do something with connection
});
It works fine. But an unpleasant problem is that if my code would call getDb multiple times in a very short time span, I would end up with several copies of a connection. Like if I do my db.js requirements and getDb calls at the very beginning of all modules that need a db connection
I'm now thinking about controlling the calls to getDb by queuing them, so that only the absolute first call will create a connection and save it in _db. All later calls will get the created connection _db in return. I believe Async queue will help me with this...
The problem is that i dont understand how I write this with Async queue. The documentation is a little bit vague, and i dont find any better examples online. Maybe you can give me some hints. This is what i got so far...
var dbCalls = async.queue(function (task, callback) {
if (_db) {
console.log('_db returned');
return callback(null, _db);
}
MongoClient.connect('mongodb://localhost:' + config.db.port + '/' + config.db.name, {native_parser: true}, function (err, db) {
if (err) return callback(err);
console.log('Connected to mongodb://localhost:' + config.db.port + '/' + config.db.name);
_db = db;
callback(null, _db);
});
}, 1);
// I guess this .push() must be the exposed (exported) API for other modules to get a connection, but how do I return it to them,
dbCalls.push(null, function (err) {
console.log('finished processing foo');
});
dbCalls.push(null, function (err) {
console.log('finished processing bar');
});
I dont understand the object passed as first argument to .push() What should i use if for? Right now its null How do I pass on the connection and possible error all the way out to the module that made the call?
A quick and dirty solution without async.queue:
var _db = null;
var _err = null;
var _queue = [];
var _pending = false;
var getDb = module.exports.getDb = function(callback) {
if (_err || _db) {
console.log('_db returned');
return callback(_err, _db);
} else if (_pending) { // already a connect() request pending
_queue.push(callback);
} else {
_pending = true;
_queue.push(callback);
MongoClient.connect(..., function (err, db) {
_err = err;
_db = db;
_queue.forEach(function(queuedCallback) {
queuedCallback(err, db);
});
});
};
I appear to have a memory leak with my Node.js application. I built it quickly, and my JavaScript isn't too strong, so this might be easy.
I've done some heap dumps on it, and it's the String object? leaking memory, at the rate of about 1MB every 5 minutes. I expanded String, and it's actually String.Array?
Heap stack:
#!/usr/local/bin/node
var port = 8081;
var io = require('socket.io').listen(port),
sys = require('sys'),
daemon = require('daemon'),
mysql = require('mysql-libmysqlclient');
var updateq = "SELECT 1=1";
var countq = "SELECT 2=2";
io.set('log level', 2);
process.on('uncaughtException', function(err) {
console.log(err);
});
var connections = 0;
var conn = mysql.createConnectionSync();
dbconnect();
io.sockets.on('connection', function(client){
connections++;
client.on('disconnect', function(){ connections--; })
});
process.on('exit', function () {
console.log('Exiting');
dbdisconnect();
});
function dbdisconnect() {
conn.closeSync();
}
function dbconnect() {
conn.connectSync('leet.hacker.org','user','password');
}
function update() {
if (connections == 0)
return;
conn.query(updateq, function (err, res) {
if (err) {
dbdisconnect();
dbconnect();
return;
}
res.fetchAll(function (err, rows) {
if (err) {
throw err;
}
io.sockets.json.send(rows);
});
});
}
function totals() {
if (connections == 0)
return;
conn.query(countq, function (err, res) {
if (err) {
// Chances are that the server has just disconnected, lets try reconnecting
dbdisconnect();
dbconnect();
throw err;
}
res.fetchAll(function (err, rows) {
if (err) {
throw err;
}
io.sockets.json.send(rows);
});
});
}
setInterval(update, 250);
setInterval(totals,1000);
setInterval(function() {
console.log("Number of connections: " + connections);
},1800000);
daemon.daemonize('/var/log/epiclog.log', '/var/run/mything.pid', function (err, pid) {
// We are now in the daemon process
if (err) return sys.puts('Error starting daemon: ' + err);
sys.puts('Daemon started successfully with pid: ' + pid);
});
Current version
function totals() {
if (connections > 0)
{
var q = "SELECT query FROM table";
db.query(q, function (err, results, fields) {
if (err) {
console.error(err);
return false;
}
for (var row in results)
{
io.sockets.send("{ ID: '" + results[row].ID + "', event: '" + results[row].event + "', free: '" + results[row].free + "', total: '" + results[row].total + "', state: '" + results[row]$
row = null;
}
results = null;
fields = null;
err = null;
q = null;
});
}
}
Still leaking memory, but it seems only on these conditions:
From startup, with no clients -> Fine
1st client connection -> Fine
2nd client (even with the 1st client disconnecting and reconnecting) -> Leaking memory
Stop all connections -> Fine
1 new connection (connections = 1) -> Leaking memory
Do yourself a favour and use node-mysql, it's a pure javascript mysql client and it's fast. Other than that, you should be using asynchronous code to stop IO being blocked whilst you're working. Using the async library will help you here. It has code for waterfall callback passing among other things.
As for your memory leaking, it probably isn't socket.io, although I haven't used it in a few months, I have had many thousands of concurrent connections and not leaked memory, and my code wasn't the best either.
Two things, however. Firstly your code is faily unreadable. I suggest looking into properly formatting your code (I use two spaces for every indentation but some people use four). Secondly, printing the number of connections every half an hour seems a little silly, when you could do something like:
setInterval(function() {
process.stdout.write('Current connections: ' + connections + ' \r');
}, 1000);
The \r will cause the line to be read back to the start of the line and overwrite the characters there, which will replace the line and not create a huge amount of scrollback. This will help with debugging if you choose to put debugging details in your logging.
You can also use process.memoryUsage() for quickly checking the memory usage (or how much node thinks you're using).
Could this be related to the connected clients array not clearing properly when a client disconnects? The array value gets set to NULL rather than being dropped from the array.