node.js cron job in web app - javascript

I use express.js and 'cron' module for auto db updating, so I dont know where should I add my cron init code, so I added it to my 'bin/www' , but after server started it spams like every seconds(but I need every 2 minutes), but if I changed 2 on 5 , its never started.
here is my cron update js:
var catalogUpdater = require('../utils/catalogUpdater');
var descriptionDownloader = require('../utils/descriptionDownloader');
var CronJob = require('cron').CronJob;
var job = new CronJob('* */2 * * * *', function(){
console.log('started');
},
function(){console.log('stop')},
true);
module.exports = job;
here is my 'bin/www' code:
var app = require('../app');
var debug = require('debug')('shopnagby:server');
var http = require('http');
var config = require('../config');
var job = require('../cron/updateCron'); // include job updateCron to server startup;
/**
* Get port from environment and store in Express.
*/
var port = normalizePort(config.get("port"));
app.set('port', port);
/**
* Create HTTP server.
*/
var server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port);
server.on('error', onError);
server.on('listening', onListening);
.....
Where should I put my updateCron script?

You have incorrect cron syntax. Correct syntax have only 5 fields:
field allowed values
----- --------------
minute 0-59
hour 0-23
day of month 0-31
month 0-12 (or names, see below)
day of week 0-7 (0 or 7 is Sun, or use names)
To run in every two minutes:
var job = new CronJob('*/2 * * * *', function(){
console.log('started');
},
function(){console.log('stop')},
true);

Related

Why is express-rate-limit not working as expected?

The problem is that the rate limit is not enforced for the amount of time I specify. Instead of lasting 35 minutes, it lasts for only about 20 seconds. Also, if I keep making the request, the limit is always enforced, so that seems to refresh the time limit, which I think is also unexpected.
Apart from these issues, it works as expected, limiting the number of requests I specify in "max", as long as I make them quickly enough. I have tested locally, and on a Heroku server.
Here is the relevant code:
app.js
var express = require('express');
var dbRouter = require('./routes/db');
var limiter = require('express-rate-limit');
var app = express();
app.set('trust proxy', 1);
// This is a global limiter, not the one I'm having issues with.
// I've tried removing it, but the issue remained.
app.use(limiter({
windowMs: 10000,
max: 9
}));
app.use('/db', dbRouter);
module.exports = app;
db.js
var express = require('express');
var router = express.Router();
var level_controller = require('../controllers/levelController');
var limiter = require('express-rate-limit');
var level_upload_limiter = limiter({
windowMS: 35 * 60 * 1000,
max: 1,
message: 'Too many level uploads. Please try again in about 30 minutes.'
});
router.post('/level/create', level_upload_limiter, level_controller.level_create_post);
module.exports = router;
levelController.js
exports.level_create_post = [
(req, res, next) => {
// ...
}
];
It's the typo you made in your settings: windowMS -> windowMs

Trying to run multiple kue jobs at different times, but they're starting at the same time

I'm trying to run two different jobs on different schedules (see code ex below). For some reason, the job that is supposed to run once a day at 11pm runs every time the other job runs. How do I get them to run when I expect them to run? Thank you for your help.
const kue = require('kue-scheduler');
const Queue = kue.createQueue();
const jobOneSchedule = '0 0 23 * * *';
const jobTwoSchedule = '0 0 0/1 * * *';
let job1 = Queue.createJob('doJobOne')
.priority('low')
.unique('unique_one')
.removeOnComplete('true');
let job2 = Queue.createJob('doJobTwo')
.priority('high')
.unique('unique_two')
.removeOnComplete('true');
Queue.every(jobOneSchedule, job1);
Queue.every(jobTwoSchedule, job2);
Queue.process('doJobOne', function(job, done){
console.log('Job one done');
done();
});
Queue.process('doJobTwo', function(job, done){
console.log('Job two through');
done();
});
Workaround
As a workaround, I had to remove all of the job2 stuff and run it like this:
// Do job2 stuff every hour (3.6e6 ms)
const jobTwoInterval = 3.6e6;
setInterval(function(){
console.log('Job two through');
}, jobTwoInterval);
Working fine at my end
const jobOneSchedule = '39 11 * * *'; //Runs at 11:39 pm
const jobTwoSchedule = '*/20 * * * * *'; //Run every twenty seconds
Could you try flushing redis and try again.
setInterval has disadvantage that if server restart your job will be never be executed as interval will be cleared.

Multiple files requiring my module are overwriting it's variables

I'm trying to create a mongo connection pool factory that checks if a connection to mongo already exists and returns a connection. If it doesn't create the connection pool and return a connection.
I want to be able to require this from multiple files that are going to query mongo. Each file should require mongo like this:
var fooMongoFactory = require('../../lib/mongoFactory').init(mongodb://localhost:27017/foo);
and then you use it in your file like this:
fooMongoFactory.getConnection().then(function(db) {
// do stuff
});
The problem i'm having is that I want to be able to specify multiple different mongo instances in a file, but when doing that, my second init overrides the first one. Example:
var fooMongoFactory = require('../../lib/mongoFactory').init(mongodb://localhost:27017/foo);
var barMongoFactory = require('../../lib/mongoFactory').init(mongodb://localhost:27017/bar);
fooMongoFactory.getConnection().then(function(db) {
// querying in here is actually pointing at the mongo db "bar"
});
How can I tweak my factory so that I can connect to multiple different instances of mongo, as well as use this same factory across multiple files without having to instantiate it every time? I thought of using a constructor, but that would create a new connection pool in every single file that uses the mongoFactory.
/**
* Creates and manages the Mongo connection pool
*
* #type {exports}
*/
var Q = require('q');
var MongoClient = require('mongodb').MongoClient;
var dbPromise = null;
var db = null;
module.exports = function() {
return {
init: function init(connectionString) {
db = connectionString;
return module.exports;
},
/**
* Gets a connection to Mongo from the pool. If the pool has not been instantiated it,
* instantiates it and returns a connection. Else it just returns a connection from the pool
*
* #returns {*} - A promise object that will resolve to a mongo db object
*/
getConnection: function getConnection() {
// get a connection to mongo using the db string and return dbPromise
}
}
}();
The mongodb node module already has built-in connection pooling functionality that is used automatically when you call connect(). The default max connection pool size is 5, however you can change this value in the connection url (e.g. 'mongodb://localhost:27017/foo?maxPoolSize=15').
You'll also want to change the number of actual connections created by setting poolSize in the server config options to some value less than or equal to maxPoolSize. You may also want to set auto_reconnect to true.
Now what you could do is keep an object keyed on host:port that contains the database object for that server. If someone passes in a connection string that contains a host:port in your object, then return the pool. Otherwise create, cache, and return the new database object.
I ended up making the module so that you have to pass in the connection string for the mongodb that you want to connect to. This module ultimately keeps track of all the connections that have been made to mongo and whether there is a current connection to the mongodb passed in or not.
/**
* Creates and manages the Mongo connection pool
*
* #type {exports}
*/
var Q = require('q');
var MongoClient = require('mongodb').MongoClient;
var _ = require('underscore');
var connections = [];
var dbPromise = null;
module.exports = function() {
return {
/**
* Gets a connection to Mongo from the pool. If the pool has not been instantiated it,
* instantiates it and returns a connection. Else it just returns a connection from the pool
*
* #returns {*} - A promise object that will resolve to a mongo db object
*/
getConnection: function getConnection(connectionString) {
var def = Q.defer();
// If connectionString is null or undefined, return an error
if(_.isEmpty(connectionString)) {
def.reject('getConnection must contain a first parameter');
return dbPromise = def.promise;
}
// Check if connections contains an object with connectionString equal to the connectionString passed in and set the var to it
var pool = _.findWhere(connections, {connectionString: connectionString});
// If no conneciton pool has been instantiated, instantiate it, else return a connection from the pool
if(_.isUndefined(pool)) {
// Initialize connection once
MongoClient.connect(connectionString, function(err, database) {
if (err) {
def.reject(err);
}
// Add the connection to the array
connections.push({connectionString: connectionString, db: database});
def.resolve(database);
});
} else { // Else we have not instantiated the pool yet and we need to
def.resolve(pool.db);
}
return dbPromise = def.promise;
}
};
}();
https://github.com/toymachiner62/mongo-factory

Set HTTP timeout in Node.js

I'm working on an app that uses Node.js as the backend. Currently, I have a web server setup like this:
var express = require('express');
var http = require('http');
var app = module.exports.app = express();
http.createServer(app).listen(appConfig.port, function () {
var logger = app.get('logger');
logger.info('**** STARTING SERVER ****');
});
My app is working just fine. Except, I have now added a request that is takes ~5 minutes. From my understanding, Node defaults to a 2 minute timeout window. I can read the documentation here. However, I don't understand how to set the timeout to a greater value.
Can someone show me how to increase the timeout to 10 minutes?
this should set it. you need a reference to the server object then set the timeout
var express = require('express');
var http = require('http');
var app = module.exports.app = express();
var server = http.createServer(app);
server.setTimeout(10*60*1000); // 10 * 60 seconds * 1000 msecs
server.listen(appConfig.port, function () {
var logger = app.get('logger');
logger.info('**** STARTING SERVER ****');
});

How to stop Node.js from printing data to console?

I have this Code in node
var io = require('socket.io'), connect = require('connect');
var pg = require('pg');
var connectionString = "Some connection string";
var app = connect().use(connect.static('public')).listen(3000);
var chat_room = io.listen(app);
var DataArrayRows=[];
setInterval(function(){
pg.connect(connectionString, function(err, client, done) {
client.query('select * from jctsl_get_all_notifications_v1()', function(err, result) {
DataArrayRows=result.rows;
done();
});
});
},10000);
chat_room.sockets.on('connection',function(socket){
chat_room.sockets.emit('entrance',{message:DataArrayRows});
setInterval(function(){
chat_room.sockets.emit('entrance',{message:DataArrayRows});
},10000);
});
In the code, I've not written any console.log() statement but still on every request to database, the returned rows are displayed on the console window.
How can I stop node from doing that?
Set the log level lower
var chat_room = io.listen(app);
chat_room.set('log level', 0);
The log level defaults to 3
The amount of detail that the server should output to the logger.
0 - error
1 - warn
2 - info
3 - debug

Categories