Insert value inside array within Mongo DB documents using bulk write [duplicate] - javascript

I want to show products by ids (56e641d4864e5b780bb992c6 and 56e65504a323ee0812e511f2) and show price after subtracted by discount if available.
I can count the final price using aggregate, but this return all document in a collection, how to make it return only the matches ids
"_id" : ObjectId("56e641d4864e5b780bb992c6"),
"title" : "Keyboard",
"discount" : NumberInt(10),
"price" : NumberInt(1000)
"_id" : ObjectId("56e65504a323ee0812e511f2"),
"title" : "Mouse",
"discount" : NumberInt(0),
"price" : NumberInt(1000)
"_id" : ObjectId("56d90714a48d2eb40cc601a5"),
"title" : "Speaker",
"discount" : NumberInt(10),
"price" : NumberInt(1000)
this is my query
productModel.aggregate([
{
$project: {
title : 1,
price: {
$cond: {
if: {$gt: ["$discount", 0]}, then: {$subtract: ["$price", {$divide: [{$multiply: ["$price", "$discount"]}, 100]}]}, else: "$price"
}
}
}
}
], function(err, docs){
if (err){
console.log(err)
}else{
console.log(docs)
}
})
and if i add this $in query, it returns empty array
productModel.aggregate([
{
$match: {_id: {$in: ids}}
},
{
$project: {
title : 1,
price: {
$cond: {
if: {$gt: ["$discount", 0]}, then: {$subtract: ["$price", {$divide: [{$multiply: ["$price", "$discount"]}, 100]}]}, else: "$price"
}
}
}
}
], function(err, docs){
if (err){
console.log(err)
}else{
console.log(docs)
}
})

Your ids variable will be constructed of "strings", and not ObjectId values.
Mongoose "autocasts" string values for ObjectId into their correct type in regular queries, but this does not happen in the aggregation pipeline, as in described in issue #1399.
Instead you must do the correct casting to type manually:
ids = ids.map(function(el) { return mongoose.Types.ObjectId(el) })
Then you can use them in your pipeline stage:
{ "$match": { "_id": { "$in": ids } } }
The reason is because aggregation pipelines "typically" alter the document structure, and therefore mongoose makes no presumption that the "schema" applies to the document in any given pipeline stage.
It is arguable that the "first" pipeline stage when it is a $match stage should do this, since indeed the document is not altered. But right now this is not how it happens.
Any values that may possibly be "strings" or at least not the correct BSON type need to be manually cast in order to match.

In the mongoose , it work fine with find({_id:'606c1ceb362b366a841171dc'})
But while using the aggregate function we have to use the mongoose object to convert the _id as object eg.
$match: { "_id": mongoose.Types.ObjectId("606c1ceb362b366a841171dc") }
This will work fine.

You can simply convert your id to
let id = mongoose.Types.ObjectId(req.query.id);
and then match
{ $match: { _id: id } },

instead of:
$match: { _id: "6230415bf48824667a417d56" }
use:
$match: { _id: ObjectId("6230415bf48824667a417d56") }

Use this
$match: { $in : [ {_id: mongoose.Types.ObjectId("56e641d4864e5b780bb992c6 ")}, {_id: mongoose.Types.ObjectId("56e65504a323ee0812e511f2")}] }
Because Mongoose autocasts string values for ObjectId into their correct type in regular queries, but this does not happen in the aggregation pipeline. So we need to define ObjectId cast in pipeline queries.

Related

How to delete all documents except most recent for each group in mongo? [SOLVED]

I have a collection with this data registered
{
_id: 0000120210903, iid: 00001, date: 20210903 }, {
_id: 0000220210903, iid: 00002, date: 20210903 }, {
_id: 0000120210101, iid: 00001, date: 20210101 }
I want to delete all except the document with the most recent date for each iid.
My idea is to group by the date, select the _id of the register with the max(date) and then delete all except this array of _ids. But I can't figure out how to do it.
db.getCollection('testing_data').aggregate(
{ $sort:{ _id:1 }},
{ $group:{
_id:"$iid",
lastId:{ "$last":"$_id" },
}},
{ $project:{ _id: 0, lastId: 1 } }
)
But I don't know where to go from here. Any help is greatly appreciated.
[Solution]
To fix the problem I used an aggregation to recover the combination of the field iid (the identifier shared between documents) and the unique _id as an array.
Then for each element on the array it performs a deleteMany operation on the iid but letting out the most recent _id. In this case I sort by _id because it includes the date but could also sort by the field date.
Due to the high volume of data { allowDiskUse: true } had to be put in the aggregate.
var ids = db.getCollection('testing_data').aggregate([
{ $sort:{ _id:1 }},
{ $group:{
_id:"$iid",
lastId:{ "$last":"$_id" },
}},
{ $project:{ _id: 1, lastId: 1 } }
], { allowDiskUse: true } ).toArray();
ids.forEach(function(x){
db.getCollection('testing_data').deleteMany({ "iid": x._id, "_id": {$ne:x.lastId} })
});
Mine Idea is just stock all _ids at some array that you want to delete, and then use deleteMany with $or filter
db.getCollection("testing_data").find({}).toArray((err,data)=>{
let to_elim = [];
let filtering ={};
for(let el of data){
if(!filtering[el.iid]) filtering[el.iid] = el;
else {
if(filtering[el.iid].date>el.date) to_elim.push({_id:new ObjectID(el._id)})
}
}
db.getCollection("testing_data").deleteMany({$or:to_elim})
})
I hope that all is written rightly, cause wrote all that down on mobile
There is missing some checking if something more recent...
[Solution]
To fix the problem I used an aggregation to recover the combination of the field iid (the identifier shared between documents) and the unique _id as an array.
Then for each element on the array it performs a deleteMany operation on the iid but letting out the most recent _id. In this case I sort by _id because it includes the date but could also sort by the field date.
Due to the high volume of data { allowDiskUse: true } had to be put in the aggregate.
var ids = db.getCollection('testing_data').aggregate([
{ $sort:{ _id:1 }},
{ $group:{
_id:"$iid",
lastId:{ "$last":"$_id" },
}},
{ $project:{ _id: 1, lastId: 1 } }
], { allowDiskUse: true } ).toArray();
ids.forEach(function(x){
db.getCollection('testing_data').deleteMany({ "iid": x._id, "_id": {$ne:x.lastId} })
});

mongodb - $lookup pipeline using COLLSCAN instead of index

I'm trying to use indexing on my $lookup pipeline but it doesn't seem to be working as intended.
Here's my query:
db.map_levels.explain().aggregate([
{
$lookup:
{
from:
"map_level_revisions",
pipeline:
[
{
$match:
{
$expr:
{
$eq:
[
"$account_id",
ObjectId("5b66ca21d6b54f479bef62a4")
]
}
}
}
],
as:
"revisions"
}
},
])
and here's the explanation:
{
"stages" : [
{
"$cursor" : {
"query" : {
},
"queryPlanner" : {
"plannerVersion" : 1,
"namespace" : "test-creator.map_levels",
"indexFilterSet" : false,
"parsedQuery" : {
},
"winningPlan" : {
"stage" : "COLLSCAN",
"direction" : "forward"
},
"rejectedPlans" : [ ]
}
}
},
{
"$lookup" : {
"from" : "map_level_revisions",
"as" : "revisions",
"let" : {
},
"pipeline" : [
{
"$match" : {
"$expr" : {
"$eq" : [
"$account_id",
ObjectId("5b66ca21d6b54f479bef62a4")
]
}
}
}
]
}
}
],
"ok" : 1
}
How do I make it use an index instead?
Just a note, the query does return the document.
The collection scan in your explain output is referring to the map_levels collection, as noted in the queryPlanner.namespace value. The $lookup stage merges data from another collection into the current pipeline. Since you haven't specified any query stages before the $lookup, the map_levels collection will be iterated using a collection scan. If an entire collection is being loaded without any filtering or sort criteria, a collection scan has less overhead than iterating an index and fetching the documents.
You can avoid the current collection scan by adding a $match stage before your $lookup (assuming you don't want to process the full map_levels collection).
How can I check the index used by $lookup?
Unfortunately query explain output does not (as at MongoDB 4.0) indicate index usage for $lookup stages. A workaround for this would be running explain using your lookup's pipeline as a top level aggregation query.
There's a relevant issue to watch/upvote in the MongoDB Issue tracker: SERVER-22622: Improve $lookup explain to indicate query plan on the "from" collection.

mongodb: Trying to push a value in mongodb

I'm trying to $push a value in an array inside mongodb. This is the query:
db.user.update(
{ "_id": 5ac34a3c23f1b0343c155782 },
{ $push: { "courseId":5acd192a3ff62a287452891f}});
The first id, after _id is coming from a user model with this parameter, "_id" : ObjectId("5acb75a9e773ed2c10d5caa8").
The second id, after courseId is coming from this parameter "_id" : ObjectId("5acd192a3ff62a287452891f").
Desired user model should look like:
"_id" : ObjectId("5ac34a3c23f1b0343c155782"),
"tokens" : [ ],
"courseId" : [
"5ac37f5cd494e905b86c2433",
"<new id>"
],
Where am I going wrong?
db.user.update({ "_id": 5ac34a3c23f1b0343c155782 }, { $push: { "courseId":5acd192a3ff62a287452891f}})
You may need to use the "_id": ObjectId("5ac34a3c23f1b0343c155782") instead of just alphanumeric
See if following snippet works for you:
//following query will push to courseId
let modifier = {$push: { "courseId" :"5acd192a3ff62a287452891f"} };
let options = {returnOriginal: false};
db.collection('user').findOneAndUpdate({_id: ObjectId('5ac34a3c23f1b0343c155782')}, modifier, options)
.then(function(op) {
// see success or failure here in op
console.log(op);
})

Nested array query using mongoose

I'm using mongoose (with nodejs) to make my queries.
I got the following Schemas for my database model (minified ofc):
var HistorySchema = new Schema({
status : String,
time : Date
});
var TaskSchema = new Schema({
game_id : Schema.Types.ObjectId,
history : [HistorySchema]
}, {collection: 'task'});
Now I want to give an example (insertion) for this which I want to use to show my problem and wish:
(Insertion)
{
"_id" : ObjectId("5772ca87439632101510fa6b"),
"history" :
[
{
"status" : "open",
"time" : ISODate("2016-06-25T12:17:46.982Z")
},
{
"status" : "complete",
"time" : ISODate("2016-06-30T12:17:46.982Z")
}
]
}
so far so good... Now I have a given date, in this case:
ISODate("2016-06-28T12:17:46.982Z")
Now I want to get from my collection all TaskSchema objects including matching HistorySchmea objects from the array. So I want to exclude the not matching part in history array.
I tried many things like $pull operation like
db.task.find(
{
"game_id": ObjectId("57711397893a97aa170aa983"),
"history.time":{
$lte: ISODate("2016-06-28T12:17:46.982Z")
}
},{$pull: {
"history": {
time: {
$gte: ISODate("2016-06-28T12:17:46.982Z")
}
}
}
}
but then I get errors like
Unsupported projection option: $pull: { history: { time: { $gte: new Date(1467116266982) } } }
Does anyone know how I could realize this query? I'm working now for days on this and couldn't find any help.
Thanks in advance!
One solution is to use the aggregation framework :
db.task.aggregate([
{$match: {_id : ObjectId("5772ca87439632101510fa6b")}},
{$unwind : "$history"},
{$match :{"history.time" :
{
$lte: ISODate("2016-06-30T12:17:46.982Z"),
$gte: ISODate("2016-06-01T12:17:46.982Z")
}
}
},
{$group:
{
_id:"$_id",
history: { $push: { status: "$history.status", time: "$history.time" } }
}
}
]);

How to filter $match equals to object value using aggregate query in mongoose

I have the following Record in Mongo Database
{
"_id" : ObjectId("54a0d4c5bffabd6a179834eb"),
"is_afternoon_scheduled" : true,
"employee_id" : ObjectId("546f0a06c7555ae310ae925a"),
"currDate" : ISODate("2014-12-28T18:30:00Z"),
"modified_date" : ISODate("2014-12-29T04:12:53.677Z"),
"modified_by" : ObjectId("541a9c223416b36f67cfbfe8"),
"__v" : 0,
"manager_schedule" : {
"afternoon_schedule_details" : {
"event" : ObjectId("54507897cecff53914c82b6d"),
"is_afternoon_scheduled" : true
},
"modified_by" : ObjectId("541a9c223416b36f67cfbfe8"),
"modified_date" : ISODate("2014-12-29T04:13:00.432Z")
}
}
I would like to Filter aggregate with $match equals to employee_id. i am using the below query in mongoose but i didn't get any result with query. something is wrong with method or query. but when i run this query directly in mongodb i get the correct result. Need help in this.
Availability.aggregate()
.match( { employee_id : "546f0a06c7555ae310ae925a" } )
.group({_id : "$employee_id",count: { $sum: 1 }})
.exec(function (err, response) {
if (err) console.log(err);
res.json({"message": "success", "data": response, "status_code": "200"});
}
);
Use the function mongoose.Types.ObjectId() to obtain the ObjectId() for the string and then use it
to query in the $match stage.
var val = mongoose.Types.ObjectId("546f0a06c7555ae310ae925a");
Availability.aggregate()
.match( { employee_id : val} )
.group({_id : "$employee_id",count: { $sum: 1 }})
.exec(function (err, response) {
if (err) console.log(err);
res.json({"message": "success", "data": response,
"status_code": "200"});
}
);
It would be kind of nice if the Mongoose helper .match() method here did something like what the other query and update helpers do. But there is a good reason why it doesn't I guess.
You can just throw a string like this at a regular .find() query in Mongoose because the schema defined for the model you use is applied to "cast" the types of any fields used in the query object. So it would just 'coerce' the string into an ObjectId value.
Maybe some smarts could be built in here to realize that this was the "first" pipeline stage, but the main reason this does not work in the same way is because the aggregation framework is meant to "alter" documents away from the defined schema structure they originally come with.
So you need to import the ObjectId() function and use that to cast the value properly:
var ObjectId = require('mongodb').ObjectID();
Availability.aggregate()
.match( { employee_id : ObjectId("546f0a06c7555ae310ae925a") } )
.group({_id : "$employee_id",count: { $sum: 1 }})
.exec(function (err, response) {
if (err) console.log(err);
res.json({"message": "success", "data": response, "status_code": "200"});
}
);
You need to do this manually because it cannot do it for you.

Categories