i am trying to upload multiple images from a form to a webpage but the problem is that it is creating new entries at each step of the for loop if i take it out of the loop then the req.body.ca.video1 becomes undefined. how do i create the entry only one time with all the images.
the post route
router.post("/n/v/n", upload.array('images'), middleware.isloggedin, async function(req, res) {
try {
req.body.ca.video1 = [];
for (var i = 0; i < req.files.length; i++) {
console.log(req.files[i].path);
cloudinary.v2.uploader.upload(req.files[i].path, { resource_type: "auto" }, function(error, result) {
req.body.ca.video1.push({
url: result.secure_url,
format: result.format
});
req.body.ca.author = {
id: req.user._id,
username: req.user.username
}
req.body.ca.created = new Date();
////if i get out of this then req.body.ca becomes undefined
const campground = await Campground.create(req.body.ca);
});
/////here the video1 goes undefined
}
req.flash("success", "it is uploading");
res.redirect("/c");
} catch (err) {
req.flash("error", err.message);
res.redirect("back");
}
});
That's because cloudinary.upload is async. Outside of the loop is executed before the request ends.
You need to wait for the request to finish to be able to access it.
Also you shouldn't declare new variables on the request body, unless you need them on another middleware on the pipeline.
Related
I'm trying to retrieve a list of objects and send them back to my mobile app. I'm just having some difficulty actually sending them after the forEach loop is over.
I tried appending that variable "data" to an array and sending it outside of the loop but the array is empty. Obviously, there is data being retrieved, but it doesn't get pushed into the array on time.
How can I make sure the loop is over before I call res.send() ? I reduced the code as much as I could to make it as simple as possible.
var stripe = require("stripe")("stripe_key");
exports.fetchTransactions = function(req, res) {
var account = req.body.account;
stripe.transfers.list({ destination: account}, function(err, transactions) {
transactions.data.forEach(function(item) {
stripe.transfers.retrieve(item.id, function(err, transfer) {
if (err) {
console.log(err);
};
var data = {
amount: item.amount,
created: item.created
};
// Can't call res.send(data) inside the loop because res.send() can only be called once.
// But if I call res.send(array) outside of the loop, the array is still empty
});
});
});
};
Keep track of the responses from API. Call res.send when all responses have been received.
var stripe = require("stripe")("stripe_key");
exports.fetchTransactions = function(req, res) {
var account = req.body.account;
stripe.transfers.list({
destination: account
}, function(err, transactions) {
var pending= transactions.data.length;
transactions.data.forEach(function(item) {
stripe.transfers.retrieve(item.id, function(err, transfer) {
pending--;
if (err) {
return console.log(err);
};
var data = {
amount: item.amount,
created: item.created
};
if (pending == 0) {
res.send(array);
}
});
});
});
};
Hello I'm stuck in my first callback "selectArticleByTitle(title, callback)", the terminal send "Cannot read property 'id' of undefined". I don't know how to force the first callback to finish this and launch the others.
router.get('/article/:title', function(req, res){
dataBase.selectArticleByTitle(req.params.title, function(db_titleERR, db_titleResults){
console.log(db_titleResults);
dataBase.selectArticle(db_titleResults[0].id, function(db_resultsArticleERR, db_resultsArticle) {
//Get id of the previous article
dataBase.previousArticle(db_titleResults[0].id, function(db_previousIdERR, db_previousId){
//Get id of the next article
dataBase.nextArticle(db_titleResults[0].id, function(db_nextIdERR, db_nextId){
//Get lastArticle
dataBase.lastArticle(function(db_lastArticleERR, db_lastArticle) {
});
});
});
});
});
});
});
exports.selectArticleByTitle = function(title, callback){
connection.query('select * from article where title=?', [title], function(err, row){
if(err)
callback(err, null);
else{
if(row){
callback(null, row);
}
}
});
}
Here the log
console.log(db_titleResults);
[ RowDataPacket {
id: 7,
genre: 'Sciences',
picture: 'xw',
source: 'xswx',
title: 'zzazzaz',
meta: 'azazadsq',
inputDate: 2017-04-15T10:00:00.000Z,
visitor: 0 } ]
[]
Thank you in advance
If you want to stick with the original code then try the below...
The issue is that you are being returned one row. However you are trying to access the result as if there are many rows being returned in an array.
The below should at least get rid of your error. I would recommend to check the length of the results as well. if db_titleResults.length is defined then you know sql returned an array.
Instead of db_titleResults[0].id, you should use db_titleResults.id.
router.get('/article/:title', function(req, res){
dataBase.selectArticleByTitle(req.params.title, function(db_titleERR, db_titleResults){
console.log(db_titleResults);
dataBase.selectArticle(db_titleResults.id, function(db_resultsArticleERR, db_resultsArticle) {
//Get id of the previous article
dataBase.previousArticle(db_titleResults.id, function(db_previousIdERR, db_previousId){
//Get id of the next article
dataBase.nextArticle(db_titleResults.id, function(db_nextIdERR, db_nextId){
//Get lastArticle
dataBase.lastArticle(function(db_lastArticleERR, db_lastArticle) {
});
});
});
});
});
});
});
I am not sure what library you are using to connect to sql but can avoid nested call backs with an approach like below:
const sql = require('mssql')
sql.connect(config, err => {
// ... error checks
const request = new sql.Request()
request.stream = true // You can set streaming differently for each request
request.query('select * from article where title=?', [title])
request.on('row', row => {
// Emitted for each row in a recordset
dataBase.selectArticle(row.id, ...);
dataBase.previousArticle(row.id, ...);
dataBase.lastArticle(row.id, ...);
});
request.on('error', err => {
// May be emitted multiple times
});
});
I have an app that works fine local but once on heroku, it does not work. My app has a scrape function that stores the retrieved articles from the website into a database. The problem is, this does not work on Heroku for some reason and I get the error MongoError: Invalid Operation, No operations in bulk. I tried using new Promise and ORM but I still get the same error. The weird part is, this worked once on heroku when I was using the new Promise, but after I went to a route called /delete the scrape stopped working for some reason. This route just deletes all the data from both my models. I am unsure now what the problem is. Could it be that when I am doing an insertMany into my Article model, there are empty properties that do not get assigned?
Update: I just put a console.log of the array scrappedStuff inside of the scraper function and it is showing the array is empty in the heroku logs. Why is it showing the array is empty? Shouldnt this call back function that is passed the array be called once the .each() is finished?
//MY MODEL
var ArticleSchema = new Schema({
title: {
type: String
},
link: {
type: String
},
description: {
type: String
},
saved: {
type: Boolean,
default: false
},
img: {
type: String
},
comment: [{
type: Schema.Types.ObjectId,
ref: 'Comment'
}]
});
//ROUTE FILE
var scrape = require('./scrape');
var scraper = function (response, scrapedStuff, displayScrape) {
console.log("INSIDE SCRAPER CALLBACK FUNCTION (INSERTMANY)", scrapedStuff)
console.log("TWOOOOOOOOOOOOOOOOOOOOOOOOOO")
Article.collection.insertMany(scrapedStuff, { ordered: false }, function(err, docs){
if(err){
console.log(err);
}else{
console.log("INSERT MANY DOCS", docs);
console.log("FINISHED SCRAPING ELSE")
// resolve();
displayScrape(response);
}
});
}
var displayScrape = function(response){
console.log("THREEEEEEEEEEEEEEEEEEEEEEE")
Article.find({}, function(err, doc){
if(err){
console.log(err)
}else{
if(doc.length > 0){
console.log("CHECK TO SEE IF DOCS IN HOME GOT UPDATE", doc)
var articleLength = [];
for(var x = 0; x < doc.length; x++){
if(doc[x].saved === false){
articleLength.push(doc[x]);
}
}
// var finalLength = articleLength.length;
response.render('home', {data: {articles: doc, length: articleLength.length, finishScrape: true}})
}else{
response.render('home');
}
}
});
}
router.get('/scrape', function(req, response){
scrape(response, scraper, displayScrape);
});
// SCRAPE FILE
var request = require('request');
var cheerio = require('cheerio');
var scrape = function(response, scraper, displayScrape) {
console.log("ONNNNNNNNNEEEEEEEEEEE")
request('http://www.gamespot.com/', function(err, res, html){
if(err)
{
console.log(err)
// reject(err);
}
// else
var scrapedStuff = [];
console.log("GOT RESPONSE FROM GAMESPOT");
var $ = cheerio.load(html);
// var articleCounter = 1;
$('article a').each(function(i, element){
// var scrapedStuff = {};
// scrapedStuff.articleID = articleCounter;
var scrpObj = {
title: $(this).attr('data-event-title'),
link: $(this).attr('href'),
img: $(this).children('figure').children('div.media-img').children('img').attr('src'),
description: $(this).children('div.media-body').children('p.media-deck').text()
}
scrapedStuff.push(scrpObj);
});
console.log("SCRAPED RESULTS", scrapedStuff);
scraper(response, scrapedStuff, displayScrape);
});
}
module.exports = scrape;
I've created following code, however i can't seem to figure out how to update the image if findOneAndUpdate find result and beside that it seems like result.save is being executed before put_form_url. How can i achieve such a function where if it exist it will update all properties and upload new image to s3 and if not it will create a new object with s3 upload.
router.post('/:id/:name/:birth/:country/:image', function(req, res, next) {
var params = req.params;
var accessHeader = req.headers;
process.env.AWS_ACCESS_KEY_ID=''
process.env.AWS_SECRET_ACCESS_KEY=''
AWS.config.region = 'eu-west-1';
var s3 = new AWS.S3();
User.findOneAndUpdate({"_id": params.id}, {$set:{"name": params.name, "birthday": params.birth, "country": params.country}}, {new: true}, function(error, result) {
if (!error) {
// If the document doesn't exist
if (!result) {
// Create it
put_from_url(params.image, new Date().toString, function(err, res) {
result = new User({
_id: params.id,
name: params.name,
birthday: new Date(params.birth),
country: params.country,
image: "url" + params.id
});
});
}
// Save the document
result.save(function(error) {
if (!error) {
res.json({ message: 'User created!' });
} else {
res.send(err);
}
});
}
});
});
Upload function
function put_from_url(url, key, callback) {
request({
url: url,
encoding: null
}, function(err, res, body) {
if (err)
return callback(err, res);
uploader.putObject({
Bucket: "",
Key: "/" + key,
ContentType: res.headers['content-type'],
ContentLength: res.headers['content-length'],
Body: body // buffer
}, callback);
})
}
Okay, it seems you have two parts of the question.
how do you achieve it to store an image if there is no user and update if there is already a user.
So in either case you have to store the image, so why not to do something like either first you update the image and then you update/insert the user or do the vice-versa. personally I would choose the former.
router.post('/:id/:name/:birth/:country/:image', function(req, res, next) {
var params = req.params;
var accessHeader = req.headers;
process.env.AWS_ACCESS_KEY_ID=''
process.env.AWS_SECRET_ACCESS_KEY=''
AWS.config.region = 'eu-west-1';
var s3 = new AWS.S3();
put_from_url(params.image, params.id, function(err, res) {
result = new User({
_id: params.id,
name: params.name,
birthday: new Date(params.birth),
country: params.country,
image: "url" + params.id
});
// Save the document
result.save(function(error) {
if (!error) {
res.json({ message: 'User created!' });
} else {
res.send(err);
}
});
});
few notes is that for key you were using date but I just used id so that it can be updated with new image later on. s3 object can be used and updated by unique key. if you want to do something like keep an old copy of images, you better use key like params.id/(new Date().toString) so old images would not be overwritten
Secondly, save function inherently updates if the object is there based on _id else it creates a new one.
it seems like result.save is being executed before put_form_url
It surely will, javascript is inherently asynchronous which means if one code is taking too long, it will execute next statement. when you make a call to s3, its an I/O call which will try to upload an image to s3 and it will be handled, meanwhile your next line of code which in your case was result.save would be executed.
There are two ways. i> wrap the next call in the callback of previous call. and far better and superior ii> use promises.
Promises are sort of future event where you can tell upload to s3 then save into DB.
Promises have little learning curve, but it's great, clean and really helpful. q is one such promise module.
I'm trying to have a total message count for a user's inbox displayed within my layout. I was thinking that I needed to use Express' dynamicHelpers to do this, but in Express <= 2.x, these are not async calls, and I need to do some async processing within them: in this case, a database call with a callback.
I'm trying the following to place the count within my session, which itself is put in a dynamicHelper accessible to the views. However, due to the asynchronous nature of these callbacks, session.unreadMessages is always undefined.
messageCount: function(req, res) {
var Messages = require('../controllers/messages'),
messages = new Messages(app.set('client'));
if(req.session.type === 'company') {
messages.getCompanyUnreadCount(req.session.uid, function(err, result) {
req.session.unreadMessages = result[0].unread;
});
} else if(req.session.type === 'coder') {
messages.getCoderUnreadCount(req.session.uid, function(err, result) {
req.session.unreadMessages = result[0].unread;
});
}
return;
}
Is there another or better way to perform this task?
It should be noted that req.session.unreadMessages is defined (at least within that callback), but undefined when session is called using the helper.
Not sure, it it would be a 'best way', but I'm used to using a filter (or a so called middleware) to load data before it reaches the actual destiny, like in:
filters.setReqView = function(req,res,next) {
req.viewVars = {
crumb: [],
flash_notice: augument,
}
somethingAsync(function(err,data){
req.viewVars.someData = data
next()
})
}
app.all('*', filters.setReqView )
// then on my request:
...
res.render('auth/win', req.viewVars )
Refactoring your code you would have:
app.all('*', function(req, res, next) {
if(req.session && req.session.type){
var Messages = require('./controllers/messages'),
messages = new Messages(app.set('client'));
if(req.session.type === 'company') {
messages.getCompanyUnreadCount(req.session.uid, function(err, result) {
req.session.messageCount = result[0].unread;
next();
});
} else if(req.session.type === 'coder') {
messages.getCoderUnreadCount(req.session.uid, function(err, result) {
req.session.messageCount = result[0].unread;
next();
});
}
} else {
next()
}
});