I create a job:
var kue = require('kue');
var queue = kue.createQueue();
//name of the queue is myQueue
var job = queue.create('myQueue', {
from: 'process1',
type: 'testMessage',
data: {
msg: 'Hello world!'
}
}).save(function(err) {
if (err) {
console.log('Unable to save ' + err);
} else {
console.log('Job ' + job.id + ' saved to the queue.');
}
});
Is there a way I can update the job status (i.e. active, failed, in progress) myself? So for example:
The consumer picks up the job:
queue.process('myQueue', function(job, done){
console.log('IN HERE', job.state) // returns function
});
This is the function that is returned from the above:
function ( state, fn ) {
if( 0 == arguments.length ) return this._state;
var client = this.client
, fn = fn || noop;
var oldState = this._state;
var multi = client.multi();
And I want to hardcode a job state e.g. job.state = 'failed' and allow myself to update the job status when I want to?
Is this possible in Kue?
Quick answer, yes, you can use job.failed() or send an error back to done.
queue.process('myQueue', function(job, done){
console.log('IN HERE', job.state) // returns function
job.failed();
done(new Error('bad'));
});
However, it sounds like you want to handle the processing yourself. You can setup your own function like this.
queue.on('job enqueue', function(id, type){
console.log( 'Job %s got queued of type %s', id, type );
kue.Job.get(id, function(err, job){
if (err) return;
// do your custom processing here
if( something was processed ){
job.complete();
}else{
job.failed();
}
});
});
Here's a few more options you can also use.
job.inactive();
job.active();
job.complete();
job.delayed();
There's some examples on this page.
https://github.com/Automattic/kue
Related
I am running into an issue when I try to load the initial data for my blacklist from a Redis DB in my middleware code. Since the DB request takes some time it starts to fail.
Below is my code which gets fired when app starts via app.use(blacklist.blockRequests());.
When I try to make the function async I get the error that new TypeError('app.use() requires a middleware function').
One of the side effects is also that my array is empty when it's called again.
blockRequests: function() {
this.read();
this.logEvent('info', 'There are ' + this.blacklist.length + ' address(es) on the blacklist');
var self = this;
var interceptor = function(request, response, next) {
var ip = request.headers['x-forwarded-for'] || request.connection.remoteAddress;
if (self.isInBlacklist(ip)) {
self.logEvent('warn', 'Rejecting request from ' + ip + ', path and query was ' + request.originalUrl);
response.status(403).send();
} else {
next();
}
}
return interceptor;
},
And here is my read() function code:
read: function() {
try {
// get all records with prefix block:: from redis
redis.redis.keys('block::*', function (err, reply) {
// reply is null when the key is missing
if(err){}
else {
this.blacklist = []
for (let i = 0; i < reply.length; i++) {
let ipInt = reply[i].substring(7)
let ipStr = ipToInt(ipInt).toIP()
this.blacklist.push(ipStr)
}
}
});
} catch (error) {
if (error) {
this.blacklist = [];
}
}
}
If you're trying to make blockRequests() async, then it will start returning a promise and you can't use its return value directly in app.use(). Because then you'd be doing app.use(somePromise) and Express will balk because you have to pass it a function reference, not a promise.
Instead, you will have to use .then() or await to get the return value which is the function which you could then use with app.use().
If you show the larger calling context here (like where you're calling blockRequests() from), then we could offer more ideas on a fuller solution.
Here's a conceptual idea for how you could do this:
blockRequests: function() {
const self = this;
const interceptor = function(request, response, next) {
const ip = request.headers['x-forwarded-for'] || request.connection.remoteAddress;
if (self.isInBlacklist(ip)) {
self.logEvent('warn', 'Rejecting request from ' + ip + ', path and query was ' + request.originalUrl);
response.status(403).send();
} else {
next();
}
}
return interceptor;
},
read: function() {
// get all records with prefix block:: from redis
return new Promise((resolve, reject) => {
redis.redis.keys('block::*', (err, reply) => {
if (err) {
this.blacklist = [];
reject(err);
} else {
this.blacklist = [];
for (let i = 0; i < reply.length; i++) {
let ipInt = reply[i].substring(7)
let ipStr = ipToInt(ipInt).toIP()
this.blacklist.push(ipStr)
}
}
this.logEvent('info', 'There are ' + this.blacklist.length + ' address(es) on the blacklist');
resolve();
});
});
}
// register middleware for using blacklist
app.use(blacklist.blockRequests());
// now read the blacklist and when that is in place, then start the server
blacklist.read().then(() => {
// now we know that blacklist.blacklist is up-to-date
// start your server here
}).catch(err => {
console.log("Unable to start server - error in reading blacklist");
process.exit(1);
});
I'm new to learning Node.js, so I'm still getting used to asynchronous programming and callbacks. I'm trying to insert a record into a MS SQL Server database and return the new row's ID to my view.
The mssql query is working correctly when printed to console.log. My problem is not knowing how to properly return the data.
Here is my mssql query - in addJob.js:
var config = require('../../db/config');
async function addJob(title) {
var sql = require('mssql');
const pool = new sql.ConnectionPool(config);
var conn = pool;
let sqlResult = '';
let jobID = '';
conn.connect().then(function () {
var req = new sql.Request(conn);
req.query(`INSERT INTO Jobs (Title, ActiveJD) VALUES ('${title}', 0) ; SELECT ##IDENTITY AS JobID`).then(function (result) {
jobID = result['recordset'][0]['JobID'];
conn.close();
//This prints the correct value
console.log('jobID: ' + jobID);
}).catch(function (err) {
console.log('Unable to add job: ' + err);
conn.close();
});
}).catch(function (err) {
console.log('Unable to connect to SQL: ' + err);
});
// This prints a blank
console.log('jobID second test: ' + jobID)
return jobID;
}
module.exports = addJob;
This is my front end where a modal box is taking in a string and passing it to the above query. I want it to then receive the query's returned value and redirect to another page.
// ADD NEW JOB
$("#navButton_new").on(ace.click_event, function() {
bootbox.prompt("New Job Title", function(result) {
if (result != null) {
var job = {};
job.title = result;
$.ajax({
type: 'POST',
data: JSON.stringify(job),
contentType: 'application/json',
url: 'jds/addJob',
success: function(data) {
// this just prints that data is an object. Is that because I'm returning a promise? How would I unpack that here?
console.log('in success:' + data);
// I want to use the returned value here for a page redirect
//window.location.href = "jds/edit/?jobID=" + data;
return false;
},
error: function(err){
console.log('Unable to add job: ' + err);
}
});
} else {
}
});
});
And finally here is the express router code calling the function:
const express = require('express');
//....
const app = express();
//....
app.post('/jds/addJob', function(req, res){
let dataJSON = JSON.stringify(req.body)
let parsedData = JSON.parse(dataJSON);
const addJob = require("../models/jds/addJob");
let statusResult = addJob(parsedData.title);
statusResult.then(result => {
res.send(req.body);
});
});
I've been reading up on promises and trying to figure out what needs to change here, but I'm having no luck. Can anyone provide any tips?
You need to actually return a value from your function for things to work. Due to having nested Promises you need a couple returns here. One of the core features of promises is if you return a Promise it participates in the calling Promise chain.
So change the following lines
jobID = result['recordset'][0]['JobID'];
to
return result['recordset'][0]['JobID']
and
req.query(`INSERT INTO Jobs (Title, ActiveJD) VALUES ('${title}', 0) ; SELECT ##IDENTITY AS JobID`).then(function (result) {
to
return req.query(`INSERT INTO Jobs (Title, ActiveJD) VALUES ('${title}', 0) ; SELECT ##IDENTITY AS JobID`).then(function (result) {
and
conn.connect().then(function () {
to
return conn.connect().then(function () {
You may need to move code around that is now after the return. You would also be well served moving conn.close() into a single .finally on the end of the connect chain.
I recommend writing a test that you can use to play around with things until you get it right.
const jobId = await addJob(...)
console.log(jobId)
Alternatively rewrite the code to use await instead of .then() calls.
I am trying to build a result_arr of location objects to send as a response, but I am not sure how to send the response only when the entire array has been built. The response contains an empty array, but result_arr array is filled after the response has already been sent.
function handle_getLocations(req, res, done){
var con_id = req.body["contractor_id"];
console.log("Contractor ID :" + con_id.toString());
var result_arr = new Array();
employee.getActiveByContractor(con_id, function(err, employees){
if (err) {
console.log("Logging error in json:\n");
res.json({"code" : 100, "status" : "Error in connection database"});
return;
};
if(employees.length === 0) done(null);
for(var i=0;i<employees.length;i++){
assignment.getLocationsByEmployeeID(employees[i].employee_id, function(err, locations){
if (err) {
console.log("Logging error in json:\n");
res.json({"code" : 100, "status" : "Error in connection database"});
return;
};
console.log("Number of locations: " + locations.length.toString());
for(var j=0;j<locations.length;j++){
console.log("Assignment is: " + locations[j].assignment_id.toString());
location.getAllByID(locations[j].location_id, function(err, loc){
if (err) {
console.log("Logging error in json:\n");
res.json({"code" : 100, "status" : "Error in connection database"});
return;
};
var loc_obj = {};
loc_obj.display_name = loc[0].display_name;
loc_obj.location_id = loc[0].location_id;
console.log("Location is: " + loc_obj.display_name);
console.log("Location ID is: " + loc_obj.location_id.toString());
result_arr.push(loc_obj);
console.log(result_arr);
done(result_arr);
});
};
});
};
});
};
I know that in nodejs the idea is to not make blocking calls, but I am not sure how to make sure all of the information is sent in the response.
You are calling many asynchronous functions in the loop and do not have any logic to check when all they are completed to send the response back to the client.
I modified your code a bit to add the logic in VannilaJS way which is very messy below but working code.
Anyways I would suggest you to use promise based/asynchronous modules
like async, bluebird etc to handle this nicely. Using them, you
can improve readability and easy maintainability in your code to get
rid of callback hells and other disadvantages.
async http://caolan.github.io/async/
bluebird https://github.com/petkaantonov/bluebird
You can read more about this on the below link,
https://strongloop.com/strongblog/node-js-callback-hell-promises-generators/
function handle_getLocations(req, res, done){
var con_id = req.body["contractor_id"];
console.log("Contractor ID :" + con_id.toString());
var result_arr = new Array();
employee.getActiveByContractor(con_id, function(err, employees){
if (err) {
console.log("Logging error in json:\n");
res.json({"code" : 100, "status" : "Error in connection database"});
return;
};
if(employees.length === 0) done(null);
var employeesChecked = 0;
var errors = [];
function sendResponse(){
if(employeesChecked === employees.length) {
res.json(result_arr);
//done(result_arr); // If required, uncomment this line and comment the above line
}
}
for(var i=0;i<employees.length;i++){
assignment.getLocationsByEmployeeID(employees[i].employee_id, function(err, locations){
var locationsChecked = 0;
if (err) {
console.log(err);
errors.push(err);
++employeesChecked;
sendResponse();
} else {
console.log("Number of locations: " + locations.length.toString());
for(var j=0;j<locations.length;j++){
console.log("Assignment is: " + locations[j].assignment_id.toString());
location.getAllByID(locations[j].location_id, function(err, loc){
++locationsChecked;
if (err) {
console.log(err);
errors.push(err);
} else {
var loc_obj = {};
loc_obj.display_name = loc[0].display_name;
loc_obj.location_id = loc[0].location_id;
console.log("Location is: " + loc_obj.display_name);
console.log("Location ID is: " + loc_obj.location_id.toString());
result_arr.push(loc_obj);
console.log(result_arr);
}
if(locationsChecked === locations.length) {
++employeesChecked;
}
sendResponse();
});
}
}
});
}
});
}
In order not to consume much time during the request-response life time, you need to separate each logic in a single endpoint, but sometimes as your case, you may need to hit the database more than a time to fetch data that depends on another, so assuming that employee.getActiveByContractor returning promise and as it's an async method so you need to to chain it with .then like this:
employee.getActiveByContractor(con_id)
.then(function(employees) {
Also, you my need to read about Promise.
As Basim says, this is a good time to use Promises.
getLocationsByEmployeeID and getAllByID are async so they won't be done by the time the loop is finished and you send your response.
Promises are built into the latest Node.js version.
Learn here: https://www.udacity.com/course/javascript-promises--ud898
Suggestion:
Create promise wrappers for getLocationsByEmployeeID and getAllByID
Use Promise.all to make sure every getLocationsByEmployeeID and getAllByID are complete
return your http response within Promise.all's "success" callback
I'm trying to write a Node program that populates my MySQL database with data from files I have on disk. I may or may not be going about this the right way, but it's working. What I'm having trouble with is understanding how I should be handling allowing asynchronous functions to finish before the connection to the DB is ended. Ultimately, I'll be reading lots of data files, and insert them into the database like I did below. I could just use readFileSync instead of the asynchronous version, but I need to get a better handle on asynchronous functions.
When I insert the wine categories below, it works fine since it's not using an asynchronous function. However, when I use readFile to get data from a file, I get an error that connection ended before any of the queries were executed:
connection.connect( function(err) {
if(err) {
console.log(err);
}
});
// Take a table and the values, and insert a new row into a table
function insert_into( table, values ) {
if( values instanceof Array ) {
values = values.map( function( value ) {
return '"' + value + '"';
}).join(', ');
} else {
values = '"' + values + '"';
}
var statement = 'INSERT INTO ' + table + ' VALUES (NULL, ' + values + ')';
connection.query( statement, function(err, rows, fields) {
if (err) throw err;
console.log( values + " successfully added.");
});
};
// Populate the wine_categories table
var wine_categories = [
'red', 'white', 'rose', 'sparkling', 'fortified'
];
// Works fine when used alone
wine_categories.forEach( function( element ) {
insert_into( 'wine_categories', element );
});
// Populate the countries table
// connection.end() runs before this finishes its job
fs.readFile( countries, 'utf8', function (err, data) {
if (err) {
throw err;
} else {
var codes = Array.prototype.map.call(
data.split('\n'), function( country ) {
return country.split('\t');
});
codes.forEach( function( country ) {
if( country[1].length > 25 ) {
country[1] = country[1].substring(0, 25);
}
insert_into( 'countries', country );
});
}
});
connection.end();
Obviously, connection.end() needs to happen after all of the inserts have completed, but I'm not sure how to handle that. I don't want it to be a callback for the readFile call because I'll ultimately have many of similar calls in this file.
How should I structure my code so that all of the queries execute and connection.end() runs when they're all finished? The answer is probably obvious to an asynchronous wiz...
Using promises it would be like this:
pool.getConnectionAsync().then(function(connection) {
// Populate the wine_categories table
var wine_categories = [
'red', 'white', 'rose', 'sparkling', 'fortified'
];
var wineQueries = wine_categories.map(function(wine){
return insert_into(connection, "wine_categories", wine);
});
var countryQueries = fs.readFileAsync(countries, "utf-8").then(function(data) {
return data.split("\n").map(function(country) {
country = country.split("\t")[1];
if (country.length > 25) {
country = country.substring(0, 25);
}
return insert_into(connection, "countries", country);
});
});
Promise.all(wineQueries.concat(countryQueries))
.then(function() {
console.log("all done");
})
.catch(function(e) {
console.log("error", e);
})
.finally(function() {
connection.release();
})
});
Pre-requisite code for the above
var Promise = require("bluebird");
var fs = Promise.promisifyAll(require("fs"));
Promise.promisifyAll(require("mysql/lib/Connection").prototype);
var pool = Promise.promisifyAll(require("mysql").createPool({
"user": "...",
"password": "...",
"database": "...",
"host": "localhost",
"port": 3306,
"debug": false
}));
function insert_into(connection, table, values) {
if( values instanceof Array ) {
values = values.map(connection.escape, connection).join(', ');
} else {
values = connection.escape(values);
}
return connection
.queryAsync('INSERT INTO ' + table + ' VALUES (NULL, ' + values + ')')
.then(function() {
console.log(values + " successfully added.");
});
}
Assuming that insert_into is also asynchronous, you may want to use something like async.each to handle inserting your records. It has a convenient callback that will be called when all records are inserted, because only at that point do you want to close the connection:
async.each(codes, function(country, callback) {
if ( country[1].length > 25 ) {
country[1] = country[1].substring(0, 25);
}
insert_into( 'countries', country, callback ); // !! read below
}, function(err) {
// TODO: handle any errors
...
// Here, all countries are inserted.
connection.end();
});
However, this means that insert_into should also be made to accept a callback (using the common Node convention function(err, result)) that will be called when the record has been inserted. In the code above, I'm using the callback provided by async directly, meaning that once your insert_into is done, it will call the async callback signaling that this iteration of each is done.
EDIT: you can rewrite insert_into so it looks like this:
function insert_into( table, values, callback ) {
...
connection.query(..., function(err) {
callback(err);
});
}
Since you don't need the actual result from connection.query, you only have to pass err (instead of throwing it).
Tip: assuming that you're using node-mysql, you may want to take a look at the docs on how it can help you with escaping.
I have a function in my express app that makes multiple queries within a For Loop and I need to design a callback that responds with JSON when the loop is finished. But, I'm not sure how to do this in Node yet. Here is what I have so far, but it's not yet working...
exports.contacts_create = function(req, res) {
var contacts = req.body;
(function(res, contacts) {
for (var property in contacts) { // for each contact, save to db
if( !isNaN(property) ) {
contact = contacts[property];
var newContact = new Contact(contact);
newContact.user = req.user.id
newContact.save(function(err) {
if (err) { console.log(err) };
}); // .save
}; // if !isNAN
}; // for
self.response();
})(); // function
}; // contacts_create
exports.response = function(req, res, success) {
res.json('finished');
};
There are a few problems with your code besides just the callback structure.
var contacts = req.body;
(function(res, contacts) {
...
})(); // function
^ you are redefining contacts and res in the parameter list, but not passing in any arguments, so inside your function res and contacts will be undefined.
Also, not sure where your self variable is coming from, but maybe you defined that elsewhere.
As to the callback structure, you're looking for something like this (assuming contacts is an Array):
exports.contacts_create = function(req, res) {
var contacts = req.body;
var iterator = function (i) {
if (i >= contacts.length) {
res.json('finished'); // or call self.response() or whatever
return;
}
contact = contacts[i];
var newContact = new Contact(contact);
newContact.user = req.user.id
newContact.save(function(err) {
if (err)
console.log(err); //if this is really a failure, you should call response here and return
iterator(i + 1); //re-call this function with the next index
});
};
iterator(0); //start the async "for" loop
};
However, you may want to consider performing your database saves in parallel. Something like this:
var savesPending = contacts.length;
var saveCallback = function (i, err) {
if (err)
console.log('Saving contact ' + i + ' failed.');
if (--savesPending === 0)
res.json('finished');
};
for (var i in contacts) {
...
newContact.save(saveCallback.bind(null, i));
}
This way you don't have to wait for each save to complete before starting the next round-trip to the database.
If you're unfamiliar with why I used saveCallback.bind(null, i), it's basically so the callback can know which contact failed in the event of an error. See Function.prototype.bind if you need a reference.