I am writing this code as a project for a customer
and when i go to a show route i got this 500 internal server error
http.get('/files/:id', function(req, res) {
var vid;
var pap;
Videos.find({}, function(err, videos) {
if (err) {
console.log(err);
} else {
vid = videos;
}
});
Papers.find({}, function(err, file) {
if (err) {
console.log(err);
} else {
pap = file;
}
});
Material.findById(req.params.id, function(err, found) {
if (err) {
console.log(err);
} else {
res.render('files', {
file: pap,
video: vid,
current: found
});
}
});
});
this is my show route code.
Note : if i reload the page the error is gone and the page open.
The reason is you need to wait for all the database queries to finish before rendering. In your code, it is possible for the page to render before the other two queries have completed and returned their data. The good news is that Mongoose supports Promises for asynchronous functions.
http.get('/files/:id', function(req, res) {
Promise.all([
Videos.find({}).exec(),
Papers.find({}).exec(),
Material.findById(req.params.id).exec()
]).then( ([video, paper, material]) => {
res.render('files', {
file: paper,
video: video,
current: material
});
}).catch( error => console.log(error) );
});
The functions you're using with Mongoose are asynchronous in nature; the variables vid and pap are not initialized when you run res.render. When you attempt to use those variables in your frontend (template like Jade, Handlebars EJS, I don't know what you're using), they are undefined, and subsequently cause the 500 error. You'll need to run the functions such that the results of all Mongoose queries are available to res.render when it runs; either using an async NodeJS library, or calling each function within one another and then calling res.render at the end.
Solution 1: Using async Node module
var async = require('async');
async.parallel([
// Each function in this array will execute in parallel
// The callback function is executed once all functions in the array complete
function (cb) {
Videos.find({}, function(err, videos) {
if (err) {
return cb(err);
} else {
return cb(null, videos);
}
});
},
function (cb) {
Papers.find({}, function(err, papers) {
if (err) {
return cb(err);
} else {
return cb(null, papers);
}
});
},
function (cb) {
Material.findById(req.params.id, function(err, found) {
if (err) {
return cb(err);
} else {
return cb(null, found);
}
});
}
], function (err, results) {
if (err) {
// If any function returns an error
// (first argument), it will be here
console.log(err);
}
else {
// Even though the functions complete asynchronously,
// the order in which they are declared in the array
// will correspond to the position in the array
// if it returns anything as a second argument.
var videos = results[0];
var files = results[1];
var found = results[2];
res.render('files', {
file: files,
video: videos,
current: found
});
}
});
Solution 2: Nested Callbacks
Videos.find({}, function(err, videos) {
var vid = videos;
if (err) {
console.log(err);
} else {
Papers.find({}, function(err, file) {
var pap = file;
if (err) {
console.log(err);
} else {
Material.findById(req.params.id, function(err, found) {
if (err) {
console.log(err);
} else {
res.render('files', {
file: pap,
video: vid,
current: found
});
}
});
}
});
}
});
Related
I have the following code which I'm using to learn how to transition from callbacks, through to async, then moving onto promises and finally await.
For the first time, I'm really struggling to understand why I get nothing at all returned to the console.
I have several logging events in place, but these never trigger inside the code, and non of the errors are thrown / exceptions raised.
I have put in additional logging outside the functions to demonstrate that the files running when requesting eg, nodemon app.js from the terminal. However, the terminal hangs on 'starting'.
What am I doing wrong?
In addition to the code here, I have tried extensively wrapping different parts in try / catch blocks, but nothing is ever returned.
index.js:
const mysql = require('mysql');
const async = require('async');
const dbConfig = require('./db');
const employees = require('./employees');
async.series(
[
function(callback) {
mysql.createConnection(dbConfig, function(err) {
callback(err);
});
},
function(callback) {
employees.getEmployee(101, function(err, emp) {
if (err) {
callback(err);
return;
}
console.log(emp);
});
}
],
function(err) {
if (err) {
console.log(err);
}
}
);
employees.js:
const mysql = require('mysql');
const async = require('async');
function getEmployee(empId, getEmployeeCallback) {
async.waterfall(
[
function(callback) {
mysql.createConnection(function(err, conn) {
if (err) {
console.log('Error getting connection', err);
} else {
console.log('Connected to database');
}
callback(err, conn);
});
},
function(conn, callback) {
conn.execute(
`select *
from employees`,
function(err, result) {
if (err) {
console.log('Error executing query', err);
} else {
console.log('Query executed');
}
callback(err, conn, result);
}
);
}
],
function(err, conn, result) {
if (err) {
getEmployeeCallback(err);
} else {
getEmployeeCallback(null, result.rows[0]);
}
// If error getting conn, no need to close.
if (conn) {
conn.close(function(err) {
if (err) {
console.log('Error closing connection', err);
} else {
console.log('Connection closed');
}
});
}
}
);
}
module.exports.getEmployee = getEmployee;
db.js:
var mysql = require('mysql');
var connection = mysql.createConnection({
host:'localhost',
user:'developer',
password:'superseceretpassword',
database:'testing'
});
connection.connect(function(err) {
if (err) throw err;
});
module.exports = connection;
I'm trying to pass a function ereaseFiles() before the upload.array() method is called but I can't figure out how to make it.
The main goal is with a put request to delete all files on disk related to that object before of uploading new ones.
I've tried to set the function in the diskStorage section as well as in the callback of the put route. I even tried handling it in the function itself before the upload.array() method was called. I've tried working with promises but that is way too hard for me.
//function to be called (this works)
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
console.log('successfully deleted files');
});
}
});
}
//My multer setup:
const storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads/');
},
filename: function (req, file, cb) {
cb(null, req.body.numeroScheda + "-" + file.originalname);
}
});
const upload = multer({
storage: storage, limits: {
fileSize: 1024 * 1024 * 2,
},
});
//MY EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});
What should happen is that all files on disk associated with the object (numeroScheda) get deleted before the new ones are uploaded to keep the storage of files automated and clean.
EDIT 1:
I've created a delete route that works:
app.delete("/immobili/:_id", (req, res) => {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, function (err, immobile) {
if (err) return console.error(err);
ereaseFiles(immobile);
});
Immobile.findOneAndRemove(requestedId, err => {
if (err) console.error(err);
else res.redirect('/immobili');
});
});
the function ereaseFiles looks now like this:
ereaseFiles = immobile => {
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
I've tried to implement this in the edit route with the middleware as kindly suggested with this:
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
let timer = setInterval(() => {
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
ereaseFiles(immobile);
console.log(this.cancellato);
if (this.cancellato) {
clearInterval(timer);
next();
}
});
}, 1000);
});
This works if the uploaded pictures are more or same than before but if less it outputs with strange behaviors (sometimes it uploads some pictures, sometimes none).
You can use a middleware for that. Just make sure that the middleware is positioned before your put request handler.
app.use("/immobili/:_id", function(req,res,next){
eraseFiles(req);
let timer = setInterval(() => {
if(erased){
clearInterval(timer);
next();
}
},100);
})
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) { ...
EDIT 1:
Please change your eraseFiles function to:
function ereaseFiles(req) {
glob("uploads/" + req.body.numeroScheda + "*", function (err, files) {
for (let i=0;i<files.length-1;i++) {
fs.unlink(files[i], err => {
if (err) throw err;
console.log('successfully deleted files');
if(i===files.length-1)
erased = true;
});
}
});
}
Edit 2: Changed a lot of things. Now the approach is your route will go to the middleware first. Your eraseFiles function will be called. While the erased variable is not true, your put route will not be hit. When the erasedFiles function is complete it will set erased to true. For this to work, you'll have to set erased=false in the file before all this.
I won! The solution was to put a little timer on the next() function as it was firing too soon and the uploads and it was mixing the two. Thanks for your help everyone!
I've also added an ereased variable that turned off and on as the function completes. Thanks to Mr. Web for that.
Here's the answer if someone runs across this, there's some Italian in the code, hopefully is readable enough anyways!
cancellaFoto = immobile => {
cancellato = false;
glob("uploads/" + immobile.numeroScheda + "*", function (err, files) {
for (const file of files) {
fs.unlink(file, err => {
if (err) throw err;
});
}
});
cancellato = true;
}
app.use("/immobili/:_id", function (req, res, next) {
const requestedId = req.params._id;
Immobile.findOne({ _id: requestedId }, (err, immobile) => {
if (err) return console.error(err);
immobile.immagini = [];
cancellaFoto(immobile);
console.log(immobile.immagini);
if (this.cancellato) {
console.log(this.cancellato);
return setTimeout(next, 500);
} else {
return console.log("Aborted");
}
});
});
//EDIT PUT ROUTE
app.put("/immobili/:_id", upload.array('immaginePrincipale', 30), function (req, res) {
const requestedId = req.params._id;
const proprietaImmagini = req.files;
const immagini = proprietaImmagini.map(function (immagine) {
//console.log(immagine.path);
return immagine.path;
});
console.log(immagini);
Immobile.findOneAndUpdate(requestedId, {
numeroScheda: req.body.numeroScheda,
categoria: req.body.categoria,
titolo: req.body.titolo,
sottotitolo: req.body.sottotitolo,
descrizione: req.body.descrizione,
localita: req.body.localita,
locali: req.body.locali,
superficie: req.body.superficie,
camere: req.body.camere,
bagni: req.body.bagni,
immagini: immagini,
}, function (err, updatedImmobile) {
if (err) return console.error(err);
res.redirect("/immobili/" + requestedId);
});
});
I have multiple res.send in one route, how can I append them all into one and send the accumulated list at the end?
I prefer to do it in the following form:
{
"writer": {success message},
"archive": {success message},
...
}
and another one like above for the list errors.
here is the code:
router.post('/some/route', function (req, res) {
if (req.isLoggedIn()) {
return res.status(403).json({});
}
MyModel.findById(req.user._id,function (err, data) {
if(err || data.rights !== 'super'){
return res.status(403).json({});
}
if(req.body.writer){
Books.update(
{ writer : req.body.id},
{ $set : { writer : req.body.writer} },
function (err) {
if(err){
res.status(500).send(err);
}
else{
res.status(200).send('updated successfully.');
}
}
);
}else{
Books.remove({writer: req.body.id}, function(err){
if (err){ return console.log(err)}
});
}
MetaInfo.findOneAndRemove({_id: req.body.id}, function (err, data) {
console.log(err);
});
Archive.findOne({_id: req.body.id},function (err, data) {
smtpTransporter.sendMail({...}, function (error, response) {
if (error) {
console.log(error);
} else {
console.log("Mail sent");
}
smtpTransporter.close();
});
data.remove();
if (err) {
console.log(err);
return res.status(200).json({
success: false,
message: 'server error',
err: err
});
}
res.status(200).json({
success: true
});
})
});
});
I assume your problem are the asynchronous calls to the database.
So best take a library of your choice (for example async) and do your async processes, in the callback then finally send your result.
Your result could look like this:
async.parallel([
function(callback) { ... },
function(callback) { ... }
], function(err, results) {
// send your result here
});
Note that if you are using .parallel the final callback will be immediatly called if one of the promises fails. see the docu
Sorry if the title is not quite descriptive.
I am using Node and trying to use export.module to have clean code.
app.js
// ...
require('./router')(app);
module.exports = app;
router.js
cloudant = require("./helpers/cloudant")
// ...
module.exports = (app) => {
// ...
app.post("/statsPage", function(req, res) {
// ...
var a = cloudant.listUsers();
console.log("from post ", a) // --> it shows ("undefined")
if(a == false || a == undefined ) {
res.render("error");
} else {
res.render("statsPage", {
results: a
});
}
cloudant.js
exports = module.exports = {}
exports.listUsers = function() {
db.find({selector: {_id:{ "$gt": 0}}}, function(err, body) {
if(err) {
console.log(err);
return false;
} else {
console.log(body.docs) // --> it shows results correctly
return body.docs;
}
});
}
I've made the same way others "export" methods, like "insert", so I'm convinced that this issue is not related neither to my db connection or export "config".
The db.find method is asynchronous, so the data you get from the database is only available in the callback function. If you look carefully at the function you're exporting in cloudant.js, you'll see that there is no return statement returning any data, only in the callback function, that doesn't help anything.
There are many ways to solve this (and many, many posts on SO dealing with it).
Simplest solution for you would be to pass your own callback to your listUsers function:
exports.listUsers = function (callback) {
db.find({ selector: { _id: { "$gt": 0 } } }, function (err, body) {
if (err) {
console.log(err);
callback(err);
} else {
callback(body.docs);
}
});
}
router.js
app.post("/statsPage", function(req, res) {
cloudant.listUsers(function (a) {
console.log("from post ", a);
});
});
To catch errors I have written if-else blocks in every function which looks bad. Please suggest a better way to handle errors in async node
async.waterfall([
function(callback){
fnOne.GetOne(req, res,function(err,result) {
if(err){
console.error("Controller : fnOne",err);
callback(err,null);
}
else{
var fnOne = result;
callback(null, fnOne);
}
})
},
function(fnOne, callback){
fnTwo.two(fnOne,function(err,result) {
if(err) {
console.error(err);
callback(err,null);
}
else{
callback(null, context);
}
})
}
], function (err, result) {
if(err){
console.error("Controller waterfall Error" , err);
res.send("Error in serving request.");
}
});
You can pass the error to async and catch it in the callback
async.waterfall([
function (callback) {
fnOne.GetOne(req, res, callback); // err and result is passed in callback
}, // as it's "function(err, result)"
function (fnOne, callback) { // the same as the arguments for the
fnTwo.two(fnOne, callback); // callback function
}
], function (err, result) {
if (err) {
console.error("Error :", err);
res.send("Error in serving request.");
}else{
res.end("A-OK");
}
});
You do too much stuff
Waterfall already have an internal error management.
callback(err, [results]) - An optional callback to run once all the
functions have completed. This will be passed the results of the last
task's callback.
Try this
async.waterfall([
function(callback){
fnOne.GetOne(req,res, callback)
},
function(fnOne, callback){
fnTwo.two(fnOne,callback) {
}
], function (err, result) {
if(err){
console.error("Controller waterfall Error" , err);
res.send("Error in serving request.");
}
});
async.each(files, (file, callback) => {
// Create a new blob in the bucket and upload the file data.
const blob = bucket.file(file.file.originalname);
const blobStream = blob.createWriteStream();
blobStream.on('error', (err) => {
callback(err);
});
blobStream.on('finish', () => {
// The public URL can be used to directly access the file via HTTP.
Storage.bucket(BUCKET_NAME)
.file(blob.name)
.move(body.email + '_' + file.dir + '.' + blob.name.split('.').pop())
.then((e) => {
body[file.dir] = format(`https://storage.googleapis.com/${BUCKET_NAME}/${e[0].name}`)
callback();
})
.catch(err => {
console.error('ERROR: ', err);
});
});
blobStream.end(file.file.buffer);
}, (err) => {
if (err) {
console.error(err);
return res.status(422).send({error: true, data: {message: "An error occured. Please fill all fields and try again"}});
}
// save to db
});