push is not working in map function in Node js - javascript

I m using Mongo DB and after getting result from aggregate query, map the result of that query and try to push result to array . While consoling that array it returning an empty array, but if we console inside the map there are data s. Below is my code snippet
let house = await House.aggregate([
{ $group: { _id: "$houseNumber", count: { $sum: 1 } } },
{ $match: { _id: { $ne: null }, count: { $gt: 1 } } },
{ $project: { houseNumber: "$_id", _id: 0 } },
]);
var arr = [];
house.map(async (hNo) => {
var house2 = await House.find(
{ houseNumber: hNo.houseNumber },
"houseNumber"
);
arr.push(house2);
});
console.log(arr);
I m thinking async await making problem here. Please help me. Thank you so much.

Async function are not supported in .map
You can use Promise.all to await every async function result.
let house = await House.aggregate([
{ $group: { _id: "$houseNumber", count: { $sum: 1 } } },
{ $match: { _id: { $ne: null }, count: { $gt: 1 } } },
{ $project: { houseNumber: "$_id", _id: 0 } },
]);
// Array of Promises
const promises = house.map(hNo =>
House.find(
{ houseNumber: hNo.houseNumber },
"houseNumber"
);
)
// Array with each promise result
const arr = await Promise.all(promises);
// Now you able to log it
console.log(arr);
While consoling that array it returning an empty array
map is not waiting for an async function result, so at the moment of logging arr you have an empty array and arr.length pending Promises.

Related

Return variable from database find instead of array

I need to pull two values out of my mongoDB database and right now the code is returning an array. How do I get:
totalGuests
attendedGuests
out of the query and stored in variables so I can display them on the client?
module.exports.showEvent = async(req, res,) => {
const event = await Event.findById(req.params.id).populate('artist');
if (!event) {
req.flash('error', 'Cannot find that Event');
return res.redirect('/events');
}
res.render('events/show', { event });
const { guest_id } = req.cookies;
const lookUp = Event.collection.find({ _id: req.params.id},
{
_id: 1,
name: 1,
guests: 1,
totalGuests: {
$size: "$guests"
},
attendedGuests: {
$size: {
"$filter": {
input: "$guests",
cond: {
$eq: [
"$$this.attended",
"Y"
]
}
}
}
}
});
console.log(lookUp);

How to calculate a field in mongodb

Hello i am trying to make a caculate field in mongoDB, however i get this error:
MongoError: The dollar ($) prefixed field '$add' in '$add' is not valid for storage.
This is the code:
router.post('/char1btn', ensureAuthenticated, function(req, res) {
const {
MongoClient
} = require("mongodb");
//Replace the uri string with your MongoDB deployment's connection string.
const uri =
'mongodb+srv://test:test1234#databasetest.5f9jh.mongodb.net/Databasetest?retryWrites=true&w=majority';
const client = new MongoClient(uri);
async function run() {
try {
await client.connect();
const database = client.db("Databasetest");
const collection = database.collection("users");
//create a filter for charactername to update
const filter = {
characterimg: ""
};
// this option instructs the method to create a document if no documents match the filter
const options = {
upsert: false
};
const updateDoc = {
$set: {
health: 150,
attack: 3,
defence: 3,
endurance: 10,
characterimg: "https://i.ibb.co/MPg2SMp/Apocaliptic1.png",
},
$set: {
$add: ["$power", "$attack", "$defence", {
$devide: ["$endurance", 3]
}]
}
}
const result = await collection.updateOne(filter, updateDoc, options);
console.log(
`${result.matchedCount} document(s) matched the filter, updated ${result.modifiedCount} document(s)`,
);
} finally {
res.redirect('/main');
await client.close();
}
}
run().catch(console.dir);
})
Does anyone know how to fix this?
Try this one:
const updateDoc = [
{
$set: {
health: 150,
attack: 3,
defence: 3,
endurance: 10,
characterimg: "https://i.ibb.co/MPg2SMp/Apocaliptic1.png",
}
},
{
$set: {
result: { $sum: ["$power", "$attack", "$defence", { $divide: ["$endurance", 3] }] }
}
}
];
const result = await collection.updateOne(filter, updateDoc, options);
You need two $set stages. Otherwise $sum: [...] (or $add) will use old values or fail if fields did not exist before. Also be aware that updateDoc need to be an array, see updateOne()
Update based on comment by #WernfriedDomscheit
if you are using MongoDB version > 4.2, then you can use pipeline inside update So the update query like below:
db.users.updateOne({},
[
{
$set: {
attack: 1,
defence: 2,
endurance: 3
}
},
{
$set: {
power: {
"$add": ["$attack", "$defence", { $divide: ["$endurance", 3] }]
}
}
}
],
{ upsert: true }
);
will have the output:
{
"_id" : ObjectId("603124a22391a75d9a2ddec0"),
"attack" : 1,
"defence" : 2,
"endurance" : 3,
"power" : 4
}
So in your case:
const updateDoc = [
{
$set: {
health: 150,
attack: 3,
defence: 3,
endurance: 10,
characterimg: "https://i.ibb.co/MPg2SMp/Apocaliptic1.png",
}
},
{
$set: {
power: {
$add: ["$attack", "$defence", { $divide: ["$endurance", 3] }]
}
}
}
];

How to find duplicates in mongoDB with two conditions

I want to find duplicates in the MongoDB atlas. I have seen examples for finding duplicates but they didn't take two conditions as I understand it. for some network issues, we discovered duplicates with a reference(refno) number in the database
so I want to find duplicate refno within these two conditions (currentDate and transactionTypeId)
const transactionRecord = await this.tranModel.aggregate(
[{ $group:
{ _id: "$refno", count: { $sum: 1 } } }, { $match: { count: { $gt: 1 } } }
])
With the above code I was able to get all the duplicates in the db, but I want to limit it to the condition
{ transactiontdate: currentDate, transactionTypeID: transactionTypesid });
I got it working
const Record = await this.tranModel.aggregate([
{
$match: { transactiontdate: currentDate, transactionTypeID: transactionTypeID }
},
{
$group: { _id: "$refno", count: { $sum: 1 } }
}, {
$match: { count: { $gt: 1 } }
}
])
if (Record != null) {
for (let i = 0; i < Record.length; i++) {
const element = Record[i]._id.toString()
tranArray.push(element)
}
}
}

MongoDB Node Driver Count of current aggregation

I am using mongodb for node and am trying to aggregate a collection of documents based on some set filters and then limit it to 10. I have it aggregating just fine and limiting just fine but I need to get the total number of that aggregated documents before I limit them to 10.
Here is my code.
var qry = [];
if (filter.FocusArea && filter.FocusArea != "(None)") {
qry.push({
$match: { 'ProgramAreaId': filter.FocusArea }
});
}
if (filter.Status && filter.Status != "(None)") {
qry.push({
$match: { 'StatusId': filter.Status }
});
}
if (filter.ProgOfficer && filter.ProgOfficer != "(None)") {
qry.push({
$match: { 'ProgramOfficerId': filter.ProgOfficer }
});
}
if (filter.Fund && filter.Fund != "(None)") {
qry.push({
$match: { 'FundId': filter.Fund }
});
}
var skipNbr = (parseInt(filter.Page) * 10 - 10);
qry.push({ $project: { _id: '$_id', count: { $sum: '$$ROOT' }, content: '$$ROOT'} }) // I would like to do the count here.
qry.push({ $skip: skipNbr })
qry.push({ $limit: 10 })
var apps = mongo.collection('Applications').aggregate(qry, function(err, docs) {
callback(docs);
});
Can this be done in one aggregation query or does it need to be split into two?
It's possible to do so in a single query.
You can project the filtered array using $project into two different fields: one with the content and one with the count.
You can use $slice to limit the content array.
db.collection.aggregate([
{
$match: {} // Filter
},
{
$group: {
_id: 1,
array: {$push: '$$ROOT'}
}
},
{
$project: {
content: {
$slice: ['$array', skip, limit]
},
total: {
$size: '$array'
}
}
}
], {allowDiskUse: true})

skipped count 0 in aggregate function

I'm stuck on this for couple of days. I'm trying to get the count: 0 where there is no documents in the given time period. This is the aggregate function I'm using at the moment:
var getCount = function(timeBlock, start, end, cb) {
Document.aggregate(
{
$match: {
time: {
$gte: new Date(start),
$lt: new Date(end)
}
}
},
{
$project: {
time: 1,
delta: { $subtract: [
new Date(end),
'$time'
]}
}
},
{
$project: {
time: 1,
delta: { $subtract: [
"$delta",
{ $mod: [
"$delta",
timeBlock
]}
]}
}
},
{
$group: {
_id: { $subtract: [
end,
"$delta"
]},
count: { $sum: 1 }
}
},
{
$project: {
time: "$_id",
count: 1,
_id: 0
}
},
{
$sort: {
time: 1
}
}, function(err, results) {
if (err) {
cb(err)
} else {
cb(null, results)
}
})
}
I tried using $cond, but with no luck
The group stage is producing documents based on grouping on your given _id and counting the number of documents from the previous stage that end up in the group. Hence, a count of zero would be the result of a document being created from 0 input documents belonging to the group. Thinking about it this way, it's clear that there's no way the aggregation pipeline can do this for you. It doesn't know what all of the "missing" time periods are and it can't invent the appropriate documents out of thin air. Reapplying your extra knowledge about the missing time periods to complete the picture at the end seems like a reasonable solution (not "hacky") if you need to have an explicit count of 0 for empty time periods.
Though it has already been said the best thing to do here is "merge" your results post process rather than expect "keys" that do not exist to appear or to issue multiple queries with explicit keys that are possibly not going to aggregate results and combine them.
What has not already been said is how you actually do this, so I'll give you a MongoDB "thinking" kind of way to collect your results.
As a quick disclaimer, you could possibly employ much the same approach by "seeding" empty keys for each interval using mapReduce, or possibly even altering your data so that there is always an empty value within each possible block. Those approaches seem basically "hacky" and in the mapReduce case is not going to provide the best performance or muliple results.
What I would suggest is that working with collection results for the MongoDB brain can be made simple. There is a neat little solution called neDB, which is billed as a kind of SQL Lite for MongoDB. It supports a subset of functionality and is therefore perfect for "in memory" manipulation of results with a MongoDB mindset:
var async = require('async'),
mongoose = require('mongoose'),
DataStore = require('nedb'),
Schema = mongoose.Schema;
var documentSchema = new Schema({
time: { type: Date, default: Date.now }
});
var Document = mongoose.model( "Document", documentSchema );
mongoose.connect('mongodb://localhost/test');
var getCount = function(timeBlock, start, end, callback) {
async.waterfall(
[
// Fill a blank series
function(callback) {
var db = new DataStore();
var current = start;
async.whilst(
function() { return current < end },
function(callback) {
var delta = end - current;
db.insert({ "_id": end - delta, "count": 0 },function(err,doc) {
//console.log( doc );
current += timeBlock;
callback(err);
});
},
function(err) {
callback(err,db);
}
);
},
// Get data and update
function(db,callback) {
var cursor = Document.collection.aggregate(
[
// Match documents
{ "$match": {
"time": {
"$gte": new Date(start),
"$lt": new Date(end)
}
}},
// Group. 1 step and less hacky
{ "$group": {
"_id": {
"$let": {
"vars": {
"delta": {
"$subtract": [
{ "$subtract": [ new Date(end), "$time" ] },
{ "$mod": [
{ "$subtract": [ new Date(end), "$time" ] },
timeBlock
]}
]
}
},
"in": { "$subtract": [ end, "$$delta" ] }
}
},
"count": { "$sum": 1 }
}}
],
{ "cursor": { "batchSize": 100 } }
);
cursor.on("data",function(item) {
cursor.pause();
console.log( "called" );
db.update(
{ "_id": item._id },
{ "$inc": { "count": item.count } },
{ "upsert": true },
function(err) {
cursor.resume();
}
);
});
cursor.on("end",function() {
console.log( "done" );
db.find({},function(err,result) {
callback(err,result);
});
});
}
],
function(err,result) {
callback(err,result);
}
);
}
mongoose.connection.on("open", function(err,conn) {
getCount(
1000 * 60 * 60, // each hour
new Date("2014-07-01").valueOf(), // start
new Date("2014-07-02").valueOf(), // end
function(err,result) {
if (err) throw err;
console.log( result );
}
);
});
So essentially create each interval as in memory collection and then just update those interval records with the actual data retrieved. I can't think of another way to do that where it would be more simple and natural to the way of thinking.
Just a footnote, the "interval" logic is just replicated from your question, but in fact the time periods are "rounded up" where 15 minutes would appear in hour 1. It usually is the practice to round down so that everything belongs to the interval it falls in and not the next one.
this is hacky fix I did for now:
var getCount = function(timeBlock, start, end, cb) {
Document.aggregate(
{
$match: {
time: {
$gte: new Date(start),
$lt: new Date(end)
}
}
},
{
$project: {
time: 1,
delta: { $subtract: [
new Date(end),
'$time'
]}
}
},
{
$project: {
time: 1,
delta: { $subtract: [
"$delta",
{ $mod: [
"$delta",
timeBlock
]}
]}
}
},
{
$group: {
_id: { $subtract: [
end,
"$delta"
]},
count: { $sum: 1 }
}
},
{
$project: {
time: "$_id",
count: 1,
_id: 0
}
},
{
$sort: {
time: 1
}
}, function(err, results) {
if (err) {
cb(err)
} else {
// really hacky way
var numOfTimeBlocks = ( end - start ) / timeBlock
// in case there is no 0s in the given period of time there is no need
// to iterate through all of the results
if ( results.length === numOfTimeBlocks ) {
cb(results);
} else {
var time = start;
var details = [];
var times = results.map(function(item) {
return item.time;
});
for( var i = 0; i < numOfTimeBlocks; i++) {
time += timeBlock;
var idx = times.indexOf(time);
if (idx > -1) {
details.push(results[idx]);
} else {
var documentCount = { count: 0, time: time };
details.push(documentCount);
}
}
cb(details);
}
}
})
}
I was also thinking about doing one query per time block, which gives the same result but I think is inefficient because you query the database N times.

Categories