Multiple schema references in single schema array - mongoose - javascript

Can you populate an array in a mongoose schema with references to a few different schema options?
To clarify the question a bit, say I have the following schemas:
var scenarioSchema = Schema({
_id : Number,
name : String,
guns : []
});
var ak47 = Schema({
_id : Number
//Bunch of AK specific parameters
});
var m16 = Schema({
_id : Number
//Bunch of M16 specific parameters
});
Can I populate the guns array with a bunch of ak47 OR m16? Can I put BOTH in the same guns array? Or does it require a populate ref in the assets array, like this, which limits it to a single specific type?
guns: [{ type: Schema.Types.ObjectId, ref: 'm16' }]
I know I could just have separate arrays for different gun types but that will create an insane amount of extra fields in the schema as the project scales, most of which would be left empty depending on the loaded scenario.
var scenarioSchema = Schema({
_id : Number,
name : String,
ak47s : [{ type: Schema.Types.ObjectId, ref: 'ak47' }],
m16s: [{ type: Schema.Types.ObjectId, ref: 'm16' }]
});
So back to the question, can I stick multiple schema references in a single array?

What you are looking for here is the mongoose .discriminator() method. This basically allows you to store objects of different types in the same collection, but have them as distinquishable first class objects.
Note that the "same collection" principle here is important to how .populate() works and the definition of the reference in the containing model. Since you really can only point to "one" model for a reference anyway, but there is some other magic that can make one model appear as many.
Example listing:
var util = require('util'),
async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.connect('mongodb://localhost/gunshow');
//mongoose.set("debug",true);
var scenarioSchema = new Schema({
"name": String,
"guns": [{ "type": Schema.Types.ObjectId, "ref": "Gun" }]
});
function BaseSchema() {
Schema.apply(this, arguments);
// Common Gun stuff
this.add({
"createdAt": { "type": Date, "default": Date.now }
});
}
util.inherits(BaseSchema, Schema);
var gunSchema = new BaseSchema();
var ak47Schema = new BaseSchema({
// Ak74 stuff
});
ak47Schema.methods.shoot = function() {
return "Crack!Crack";
};
var m16Schema = new BaseSchema({
// M16 Stuff
});
m16Schema.methods.shoot = function() {
return "Blam!!"
};
var Scenario = mongoose.model("Scenario", scenarioSchema);
var Gun = mongoose.model("Gun", gunSchema );
var Ak47 = Gun.discriminator("Ak47", ak47Schema );
var M16 = Gun.discriminator("M16", m16Schema );
async.series(
[
// Cleanup
function(callback) {
async.each([Scenario,Gun],function(model,callback) {
model.remove({},callback);
},callback);
},
// Add some guns and add to scenario
function(callback) {
async.waterfall(
[
function(callback) {
async.map([Ak47,M16],function(gun,callback) {
gun.create({},callback);
},callback);
},
function(guns,callback) {
Scenario.create({
"name": "Test",
"guns": guns
},callback);
}
],
callback
);
},
// Get populated scenario
function(callback) {
Scenario.findOne().populate("guns").exec(function(err,data) {
console.log("Populated:\n%s",JSON.stringify(data,undefined,2));
// Shoot each gun for fun!
data.guns.forEach(function(gun) {
console.log("%s says %s",gun.__t,gun.shoot());
});
callback(err);
});
},
// Show the Guns collection
function(callback) {
Gun.find().exec(function(err,guns) {
console.log("Guns:\n%s", JSON.stringify(guns,undefined,2));
callback(err);
});
},
// Show magic filtering
function(callback) {
Ak47.find().exec(function(err,ak47) {
console.log("Magic!:\n%s", JSON.stringify(ak47,undefined,2));
callback(err);
});
}
],
function(err) {
if (err) throw err;
mongoose.disconnect();
}
);
And output
Populated:
{
"_id": "56c508069d16fab84ead921d",
"name": "Test",
"__v": 0,
"guns": [
{
"_id": "56c508069d16fab84ead921b",
"__v": 0,
"__t": "Ak47",
"createdAt": "2016-02-17T23:53:42.853Z"
},
{
"_id": "56c508069d16fab84ead921c",
"__v": 0,
"__t": "M16",
"createdAt": "2016-02-17T23:53:42.862Z"
}
]
}
Ak47 says Crack!Crack
M16 says Blam!!
Guns:
[
{
"_id": "56c508069d16fab84ead921b",
"__v": 0,
"__t": "Ak47",
"createdAt": "2016-02-17T23:53:42.853Z"
},
{
"_id": "56c508069d16fab84ead921c",
"__v": 0,
"__t": "M16",
"createdAt": "2016-02-17T23:53:42.862Z"
}
]
Magic!:
[
{
"_id": "56c508069d16fab84ead921b",
"__v": 0,
"__t": "Ak47",
"createdAt": "2016-02-17T23:53:42.853Z"
}
]
You can also uncomment the mongoose.set("debug",true) line in the listing to see how mongoose is actually constructing the calls.
So what this demonstrates is that you can apply different schemas to different first class objects, and even with different methods attached to them just like real objects. Mongoose is storing these all in a "guns" collection with the attached model, and it will contain all "types" refernced by the discriminator:
var Gun = mongoose.model("Gun", gunSchema );
var Ak47 = Gun.discriminator("Ak47", ak47Schema );
var M16 = Gun.discriminator("M16", m16Schema );
But also each different "type" is referenced with it's own model in a special way. So you see that when mongoose stores and reads the object, there is a special __t field which tells it which "model" to apply, and hence attached schema.
As one example we call the .shoot() method, which is defined differently for each model/schema. And also you can still use each as a model by itself for queries or other operations, since Ak47 will automatically apply the __t value in all query/upates.
So though the storage is in one collection it can appear to be many collections, but also has the benefit of keeping them together for other useful operations. This is how you can apply the kind of "polymorphism" you are looking for.

I post my solution to this question. With the same concept of using discriminators
baseSchema: Main model collection that contains the array field with mutiples schemas
itemSchema: Schema parent for discriminator use
fizzSchema & buzzSchema: Multiples schemas are wanted to use in array
Model
const itemSchema = Schema({
foo: String,
}, { discriminatorKey: 'kind', _id: false});
const fizzSchema = Schema({
fizz: String,
}, { _id: false });
const buzzSchema = Schema({
buzz: String,
}, { _id: false });
const baseSchema = Schema({
items: [itemSchema],
});
baseSchema.path('items').discriminator('fizz', fizzSchema);
baseSchema.path('items').discriminator('buzz', buzzSchema);
const List = model('list', baseSchema);
Testbench
const body = {
items: [
{ foo: 'foo'},
{ foo: 'foo', fizz: 'fizz'},
{ foo: 'foo', kind: 'fizz', fizz: 'fizz'},
{ foo: 'foo', kind: 'buzz', buzz: 'buzz'},
{ kind: 'buzz', buzz: 'buzz'},
]
};
const doc = new List(body);
console.log(doc);
Output
{
items: [
{ foo: 'foo' },
{ foo: 'foo' },
{ fizz: 'fizz', foo: 'foo', kind: 'fizz' },
{ buzz: 'buzz', foo: 'foo', kind: 'buzz' },
{ buzz: 'buzz', kind: 'buzz' }
],
_id: new ObjectId("626a7b1cf2aa28008d2be5ca")
}
Can be executed here: https://replit.com/#Gabrirf/Mongoose

Related

Mongoose - Populating a nested array of objects not working

I have a collection called Orders that contains this schema:
const mongoose = require('mongoose');
const orderSchema = new mongoose.Schema({
user: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User',
required: true
},
restaurant: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Restaurant',
required: true
},
dishes: [
{
dish: {
type: mongoose.Schema.Types.ObjectId,
ref: 'Dish'
},
amount: Number
}
],
price: {
type: Number,
required: true
},
comment: {
type: String,
required: false
},
status: {
type: String,
enum: ['PROCESSING', 'CANCELLED', 'COMPLETED', 'ERROR'],
default: 'PROCESSING'
},
timestamp: {
type: Date,
default: Date.now
}
})
module.exports = mongoose.model('Order', orderSchema);
Inside my router, I have this code:
let orders = await Order.find({restaurant: restaurantID, status:'PROCESSING'}).populate('dishes._id').exec()
Order.find does not throw an exception, but it isnt working either.
I want the res.body to look like this:
{
"_id": "objectID",
"user": "objectID",
"restaurant": "objectID",
"dishes": [
{
"amount": number,
"dish": {
//dish object
}
},
...
],
//other order properties
},
...
]
But for some reason the dishes array looks like this:
"dishes": [
{
"amount": 1,
"_id": "6184e848e6d1974a0569783d"
}
],
What am I doing wrong?
I know that if populate() worked the res.body dishes array would not have a property called 'dish' and instead have a property called _id that would contain the dish object, but this shouldnt be hard to change once populate() works.
EDIT:
I realised that my createOrder route could be part of the problem since it ignores my schema and uses an id property for the objectID instead of dish. The array I save to the DB contains a property called id for the id instead of dish, but shouldnt my schema throw an exception when i try to save something like this to my database?
At first glance, I think the problem might be that you have a syntax problem.
Try
.populate('dishes').exec()
instead of
.populate('dishes._id').exec()

How to query for the occurrence of an element in the array in mongo DB aggregation pipeline

I'm using mongoose to connect to Mongo DB.
At first this is my schema:
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const TestSchema = new Schema({
story: String,
seenByUser: [String],
medicationStage: {
type: String,
enum: [
"no-medication",
"just-after-medication",
"towards-end-of-medication",
],
required: true,
},
});
// Compile model from schema
const TestModel = mongoose.model("Test", TestSchema);
module.exports = {
Test: TestModel,
};
Here you can see I have a field called seenByUser which is an array of strings that is an array of user names. I'm setting up a data aggregation pipeline where I want to see given a user name fetch me all the documents where this user Name does not occur in seenByUser array. grouped by medicationStage. I'm unable to create this pipeline. Please help me out.
If I've undesrtood correctly you can try this aggregation:
First $match where does not exists yourValue into the array seenByUser.
Then $group by medicationStage. This create a nested array (because $push add the whole array)
And $project to use $reduce and flat the array.
db.collection.aggregate({
"$match": {
"seenByUser": {
"$ne": yourValue
}
}
},
{
"$group": {
"_id": "$medicationStage",
"seenByUser": {
"$push": "$seenByUser"
}
}
},
{
"$project": {
"_id": 0,
"medicationStage": "$_id",
"seenByUser": {
"$reduce": {
"input": "$seenByUser",
"initialValue": [],
"in": {
"$concatArrays": [
"$$value",
"$$this"
]
}
}
}
}
})
Example here

Setting _id field to ObjectId in MongoDB before it is written to DB [duplicate]

I want generate an ObjectID for each Object present inside my array. The thing is I'm getting the products with a .forEach statement from another server and push them inside my array without a Schema that generates an ObjectID....
Product Schema:
const productsSchema = new mongoose.Schema({
apiKey: String,
domain: String,
totalcount: Number,
totaldone: Number,
allSKUS: Array,
allProducts: Array,
created_at: { type: Date },
updated_at: { type: Date },
}, { collection: 'products', timestamps: true });
productsSchema.plugin(uniqueValidator);
const Products = mongoose.model('Products', productsSchema);
module.exports = Products;
My Code:
const newProduct = {
apiKey: userApiProducts.apiKey,
domain: userApiProducts.domain,
totalcount: userApiProducts.totalcount,
totaldone: userApiProducts.totaldone,
allSKUS: userApiProducts.allSKUS,
allProducts: userApiProducts.allProducts // generate ObjectID for each object that gets pushed inside the Array
};
Products.findOneAndUpdate( userApiProducts.domain, newProduct, {upsert:true} , (err, existingProducts) => {
if (err) { return next(err); }
});
Output:
// Please Check ADD OBJECT ID HERE comment. This is where i want to generate an unique ObjectID before I push the data. I tried with var id = mongoose.Types.ObjectId(); but i'm afraid it will not be Unique...
{
"_id" : ObjectId("58780a2c8d94cf6a32cd7530"),
"domain" : "http://example.com",
"updatedAt" : ISODate("2017-01-12T23:27:15.465Z"),
"apiKey" : "nf4fh3attn5ygkq1t",
"totalcount" : 11,
"totaldone" : 11,
"allSKUS" : [
"Primul",
"Al doilea",
"Al treilea"
],
"allProducts" : [
{
// ADD OBJECT ID HERE
"id": 1,
"sku": "Primul",
"name": "Primul",
"status": 1,
"total_images": 2,
"media_gallery_entries": [
{
"id": 1,
"media_type": "image",
"label": null,
"position": 1,
"disabled": false,
"types": [
"image",
"small_image",
"thumbnail",
"swatch_image"
],
"file": "/g/r/grafolio_angel_and_devil.png"
},
{
"id": 2,
"media_type": "image",
"label": null,
"position": 2,
"disabled": false,
"types": [],
"file": "/g/r/grafolio_angel_and_devil_thumbnail.jpg"
}
]
},
{
// ADD OBJECT ID HERE
"id": 3,
"sku": "Al doilea",
"name": "Al doilea",
"status": 1,
"total_images": 2,
"media_gallery_entries": [
{
"id": 4,
"media_type": "image",
"label": null,
"position": 2,
"disabled": false,
"types": [],
"file": "/g/r/grafolio_angel_and_devil_thumbnail_1.jpg"
},
{
"id": 5,
"media_type": "image",
"label": null,
"position": 3,
"disabled": false,
"types": [],
"file": "/b/e/before.png"
}
]
}, etc ......
],
"__v" : 0,
"createdAt" : ISODate("2017-01-12T22:58:52.524Z")
}
Is there any way of doing this without having to make a ton of DB Calls? I can't imagine saving like this
array.forEach((x)=> {
Products.save({})
})
Hope someone has already worked on something similar and found the perfect solution for this !
If you want to add ObjectId automatically, you need to define a separate schema for it and set the _id options for the schema as true.
Do the following:
Change your productsSchema as CatalogueSchema (for ease of
understanding).
Define a new ProductSchema for Product (element of allProducts)
In CatalogueSchema define allProducts type as [Product.schema]. This will automatically add _id (ObjectId).
Also, you don't need to add created_at and updated_at as part of schema when you set timestamps option as true.
Catalogue Schema
const Product = require('Product_Schema_Module_Path'); // Edit
const CatalogueSchema = new mongoose.Schema({
apiKey: String,
domain: String,
totalcount: Number,
totaldone: Number,
allSKUS: Array,
allProducts: [Product.schema]
// Note the change here (Array -> [Product.schema]
// Creating a separate schema ensures automatic id (ObjectId)
}, { collection: 'catalogue', timestamps: true });
CatalogueSchema.plugin(uniqueValidator);
const Catalogue = mongoose.model('Catalogue', CatalogueSchema);
module.exports = Catalogue;
Product Schema
(New schema to ensure adding of ObjectId)
const ProductSchema = new mongoose.Schema({
id: Number,
sku: String,
name: String,
status: Number,
total_images: Number,
media_gallery_entries: Array
}, { _id: true, timestamps: true });
// _id option is true by default. You can ommit it.
// If _id is set to false, it will not add ObjectId
ProductSchema.plugin(uniqueValidator);
const Product = mongoose.model('Product', ProductSchema);
module.exports = Product;
EDIT (Save Products in Catalogue)
(Also, note that you have to require the ProductSchema module in your CatalogueSchema module)
// Map userApiProducts.allProducts to array of Product documents
const products = userApiProducts.allProducts.map(product => {
return new Product(product);
})
const newProduct = {
apiKey: userApiProducts.apiKey,
domain: userApiProducts.domain,
totalcount: userApiProducts.totalcount,
totaldone: userApiProducts.totaldone,
allSKUS: userApiProducts.allSKUS,
allProducts: products
};
Catalogue
.findOneAndUpdate({ domain: userApiProducts.domain }, newProduct, { upsert:true } , (err, products) => {
// Handle error
});
To add multiple documents into Mongo you can use db.collection.insert():
https://docs.mongodb.com/manual/reference/method/db.collection.insert/
In Mongoose you can use Model.insertMany():
http://mongoosejs.com/docs/api.html#model_Model.insertMany
But keep in mind that when you have one document inside of other document in Mongoose they are not actually stored like that in Mongo. Mongo only stores the IDs of the child documents and not their contents in the parent document - and not even any info on which collection those IDs belong to.
When you use population then Mongoose actually retrieves the relevant documents from the DB in separate requests to Mongo. So, population is a concept of Mongoose. Mongo just stores the IDs, so you need to create the documents first before you can insert the IDs.
The thing that you are trying to do would be easy without using Mongoose. You can store multiple documents in one request in Mongo using your own IDs if you want and you can store another document with an array of those IDs in another request.
Of course however you do it you will get inconsistent state during the operation because Mongo doesn't support transactions.

MongoDB find() with dot notation does not work [duplicate]

I'm pretty new to Mongoose and MongoDB in general so I'm having a difficult time figuring out if something like this is possible:
Item = new Schema({
id: Schema.ObjectId,
dateCreated: { type: Date, default: Date.now },
title: { type: String, default: 'No Title' },
description: { type: String, default: 'No Description' },
tags: [ { type: Schema.ObjectId, ref: 'ItemTag' }]
});
ItemTag = new Schema({
id: Schema.ObjectId,
tagId: { type: Schema.ObjectId, ref: 'Tag' },
tagName: { type: String }
});
var query = Models.Item.find({});
query
.desc('dateCreated')
.populate('tags')
.where('tags.tagName').in(['funny', 'politics'])
.run(function(err, docs){
// docs is always empty
});
Is there a better way do this?
Edit
Apologies for any confusion. What I'm trying to do is get all Items that contain either the funny tag or politics tag.
Edit
Document without where clause:
[{
_id: 4fe90264e5caa33f04000012,
dislikes: 0,
likes: 0,
source: '/uploads/loldog.jpg',
comments: [],
tags: [{
itemId: 4fe90264e5caa33f04000012,
tagName: 'movies',
tagId: 4fe64219007e20e644000007,
_id: 4fe90270e5caa33f04000015,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
},
{
itemId: 4fe90264e5caa33f04000012,
tagName: 'funny',
tagId: 4fe64219007e20e644000002,
_id: 4fe90270e5caa33f04000017,
dateCreated: Tue, 26 Jun 2012 00:29:36 GMT,
rating: 0,
dislikes: 0,
likes: 0
}],
viewCount: 0,
rating: 0,
type: 'image',
description: null,
title: 'dogggg',
dateCreated: Tue, 26 Jun 2012 00:29:24 GMT
}, ... ]
With the where clause, I get an empty array.
With a modern MongoDB greater than 3.2 you can use $lookup as an alternate to .populate() in most cases. This also has the advantage of actually doing the join "on the server" as opposed to what .populate() does which is actually "multiple queries" to "emulate" a join.
So .populate() is not really a "join" in the sense of how a relational database does it. The $lookup operator on the other hand, actually does the work on the server, and is more or less analogous to a "LEFT JOIN":
Item.aggregate(
[
{ "$lookup": {
"from": ItemTags.collection.name,
"localField": "tags",
"foreignField": "_id",
"as": "tags"
}},
{ "$unwind": "$tags" },
{ "$match": { "tags.tagName": { "$in": [ "funny", "politics" ] } } },
{ "$group": {
"_id": "$_id",
"dateCreated": { "$first": "$dateCreated" },
"title": { "$first": "$title" },
"description": { "$first": "$description" },
"tags": { "$push": "$tags" }
}}
],
function(err, result) {
// "tags" is now filtered by condition and "joined"
}
)
N.B. The .collection.name here actually evaluates to the "string" that is the actual name of the MongoDB collection as assigned to the model. Since mongoose "pluralizes" collection names by default and $lookup needs the actual MongoDB collection name as an argument ( since it's a server operation ), then this is a handy trick to use in mongoose code, as opposed to "hard coding" the collection name directly.
Whilst we could also use $filter on arrays to remove the unwanted items, this is actually the most efficient form due to Aggregation Pipeline Optimization for the special condition of as $lookup followed by both an $unwind and a $match condition.
This actually results in the three pipeline stages being rolled into one:
{ "$lookup" : {
"from" : "itemtags",
"as" : "tags",
"localField" : "tags",
"foreignField" : "_id",
"unwinding" : {
"preserveNullAndEmptyArrays" : false
},
"matching" : {
"tagName" : {
"$in" : [
"funny",
"politics"
]
}
}
}}
This is highly optimal as the actual operation "filters the collection to join first", then it returns the results and "unwinds" the array. Both methods are employed so the results do not break the BSON limit of 16MB, which is a constraint that the client does not have.
The only problem is that it seems "counter-intuitive" in some ways, particularly when you want the results in an array, but that is what the $group is for here, as it reconstructs to the original document form.
It's also unfortunate that we simply cannot at this time actually write $lookup in the same eventual syntax the server uses. IMHO, this is an oversight to be corrected. But for now, simply using the sequence will work and is the most viable option with the best performance and scalability.
Addendum - MongoDB 3.6 and upwards
Though the pattern shown here is fairly optimized due to how the other stages get rolled into the $lookup, it does have one failing in that the "LEFT JOIN" which is normally inherent to both $lookup and the actions of populate() is negated by the "optimal" usage of $unwind here which does not preserve empty arrays. You can add the preserveNullAndEmptyArrays option, but this negates the "optimized" sequence described above and essentially leaves all three stages intact which would normally be combined in the optimization.
MongoDB 3.6 expands with a "more expressive" form of $lookup allowing a "sub-pipeline" expression. Which not only meets the goal of retaining the "LEFT JOIN" but still allows an optimal query to reduce results returned and with a much simplified syntax:
Item.aggregate([
{ "$lookup": {
"from": ItemTags.collection.name,
"let": { "tags": "$tags" },
"pipeline": [
{ "$match": {
"tags": { "$in": [ "politics", "funny" ] },
"$expr": { "$in": [ "$_id", "$$tags" ] }
}}
]
}}
])
The $expr used in order to match the declared "local" value with the "foreign" value is actually what MongoDB does "internally" now with the original $lookup syntax. By expressing in this form we can tailor the initial $match expression within the "sub-pipeline" ourselves.
In fact, as a true "aggregation pipeline" you can do just about anything you can do with an aggregation pipeline within this "sub-pipeline" expression, including "nesting" the levels of $lookup to other related collections.
Further usage is a bit beyond the scope of what the question here asks, but in relation to even "nested population" then the new usage pattern of $lookup allows this to be much the same, and a "lot" more powerful in it's full usage.
Working Example
The following gives an example using a static method on the model. Once that static method is implemented the call simply becomes:
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
Or enhancing to be a bit more modern even becomes:
let results = await Item.lookup({
path: 'tags',
query: { 'tagName' : { '$in': [ 'funny', 'politics' ] } }
})
Making it very similar to .populate() in structure, but it's actually doing the join on the server instead. For completeness, the usage here casts the returned data back to mongoose document instances at according to both the parent and child cases.
It's fairly trivial and easy to adapt or just use as is for most common cases.
N.B The use of async here is just for brevity of running the enclosed example. The actual implementation is free of this dependency.
const async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
mongoose.connect('mongodb://localhost/looktest');
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt,callback) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
this.aggregate(pipeline,(err,result) => {
if (err) callback(err);
result = result.map(m => {
m[opt.path] = m[opt.path].map(r => rel(r));
return this(m);
});
callback(err,result);
});
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
function log(body) {
console.log(JSON.stringify(body, undefined, 2))
}
async.series(
[
// Clean data
(callback) => async.each(mongoose.models,(model,callback) =>
model.remove({},callback),callback),
// Create tags and items
(callback) =>
async.waterfall(
[
(callback) =>
ItemTag.create([{ "tagName": "movies" }, { "tagName": "funny" }],
callback),
(tags, callback) =>
Item.create({ "title": "Something","description": "An item",
"tags": tags },callback)
],
callback
),
// Query with our static
(callback) =>
Item.lookup(
{
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
},
callback
)
],
(err,results) => {
if (err) throw err;
let result = results.pop();
log(result);
mongoose.disconnect();
}
)
Or a little more modern for Node 8.x and above with async/await and no additional dependencies:
const { Schema } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
dateCreated: { type: Date, default: Date.now },
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
});
itemSchema.statics.lookup = function(opt) {
let rel =
mongoose.model(this.schema.path(opt.path).caster.options.ref);
let group = { "$group": { } };
this.schema.eachPath(p =>
group.$group[p] = (p === "_id") ? "$_id" :
(p === opt.path) ? { "$push": `$${p}` } : { "$first": `$${p}` });
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": opt.path,
"localField": opt.path,
"foreignField": "_id"
}},
{ "$unwind": `$${opt.path}` },
{ "$match": opt.query },
group
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [opt.path]: m[opt.path].map(r => rel(r)) })
));
}
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.create(
["movies", "funny"].map(tagName =>({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
const result = (await Item.lookup({
path: 'tags',
query: { 'tags.tagName' : { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
mongoose.disconnect();
} catch (e) {
console.error(e);
} finally {
process.exit()
}
})()
And from MongoDB 3.6 and upward, even without the $unwind and $group building:
const { Schema, Types: { ObjectId } } = mongoose = require('mongoose');
const uri = 'mongodb://localhost/looktest';
mongoose.Promise = global.Promise;
mongoose.set('debug', true);
const itemTagSchema = new Schema({
tagName: String
});
const itemSchema = new Schema({
title: String,
description: String,
tags: [{ type: Schema.Types.ObjectId, ref: 'ItemTag' }]
},{ timestamps: true });
itemSchema.statics.lookup = function({ path, query }) {
let rel =
mongoose.model(this.schema.path(path).caster.options.ref);
// MongoDB 3.6 and up $lookup with sub-pipeline
let pipeline = [
{ "$lookup": {
"from": rel.collection.name,
"as": path,
"let": { [path]: `$${path}` },
"pipeline": [
{ "$match": {
...query,
"$expr": { "$in": [ "$_id", `$$${path}` ] }
}}
]
}}
];
return this.aggregate(pipeline).exec().then(r => r.map(m =>
this({ ...m, [path]: m[path].map(r => rel(r)) })
));
};
const Item = mongoose.model('Item', itemSchema);
const ItemTag = mongoose.model('ItemTag', itemTagSchema);
const log = body => console.log(JSON.stringify(body, undefined, 2));
(async function() {
try {
const conn = await mongoose.connect(uri);
// Clean data
await Promise.all(Object.entries(conn.models).map(([k,m]) => m.remove()));
// Create tags and items
const tags = await ItemTag.insertMany(
["movies", "funny"].map(tagName => ({ tagName }))
);
const item = await Item.create({
"title": "Something",
"description": "An item",
tags
});
// Query with our static
let result = (await Item.lookup({
path: 'tags',
query: { 'tagName': { '$in': [ 'funny', 'politics' ] } }
})).pop();
log(result);
await mongoose.disconnect();
} catch(e) {
console.error(e)
} finally {
process.exit()
}
})()
what you are asking for isn't directly supported but can be achieved by adding another filter step after the query returns.
first, .populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } ) is definitely what you need to do to filter the tags documents. then, after the query returns you'll need to manually filter out documents that don't have any tags docs that matched the populate criteria. something like:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags.length;
})
// do stuff with docs
});
Try replacing
.populate('tags').where('tags.tagName').in(['funny', 'politics'])
by
.populate( 'tags', null, { tagName: { $in: ['funny', 'politics'] } } )
Update: Please take a look at the comments - this answer does not correctly match to the question, but maybe it answers other questions of users which came across (I think that because of the upvotes) so I will not delete this "answer":
First: I know this question is really outdated, but I searched for exactly this problem and this SO post was the Google entry #1. So I implemented the docs.filter version (accepted answer) but as I read in the mongoose v4.6.0 docs we can now simply use:
Item.find({}).populate({
path: 'tags',
match: { tagName: { $in: ['funny', 'politics'] }}
}).exec((err, items) => {
console.log(items.tags)
// contains only tags where tagName is 'funny' or 'politics'
})
Hope this helps future search machine users.
After having the same problem myself recently, I've come up with the following solution:
First, find all ItemTags where tagName is either 'funny' or 'politics' and return an array of ItemTag _ids.
Then, find Items which contain all ItemTag _ids in the tags array
ItemTag
.find({ tagName : { $in : ['funny','politics'] } })
.lean()
.distinct('_id')
.exec((err, itemTagIds) => {
if (err) { console.error(err); }
Item.find({ tag: { $all: itemTagIds} }, (err, items) => {
console.log(items); // Items filtered by tagName
});
});
#aaronheckmann 's answer worked for me but I had to replace return doc.tags.length; to return doc.tags != null; because that field contain null if it doesn't match with the conditions written inside populate.
So the final code:
query....
.exec(function(err, docs){
docs = docs.filter(function(doc){
return doc.tags != null;
})
// do stuff with docs
});

Append inside Schemaless Array

I am new to MongoDB, so far playing around with it, confronted with a problem, here i am having a hard time when trying to append multiple objects inside Schema-Less Array.So far i tried $push to append multiple objects inside array but got a Mongo Error.
[MongoError: Can't use $push/$pushALL within non-array
i don't know why i am getting this error, when using $push with array
Schema:
EventTypeSchema = new Schema(){
type: String,
eventID: {
type: Schema.Types.ObjectId,
ref: 'User'
}
}
PersonSchema = new Schema(){
PersonID: {
type: Schema.Types.ObjectId,
ref: 'User'
}
Invitation: [ ] //Schema-less
}
In Controller i have Access to both EventType and Person Model
Controller:
exports.update = function(req,res){
var event = new EventType();
event.type = 'EVENT';
event.eventID = req.body.eventid;
var query = {'PersonID': req.body.personid};
var update = {$push:{'Invitation': event}};
Person.update(query,update,function(err,user){...})
};
for debugging purposes i tried to give Mixed type Schema for Array but didn't get it to work
PersonSchema = new Schema(){
PersonID: {
type: Schema.Types.ObjectId,
ref: 'User'
}
Invitation: [ {
type: Schema.Types.Mixed
} ]
}
When i removed $push on update then only whole event object is getting inside Invitation, the reason i created Schema-less array is because i am dealing with different type of invitation, here i just described about event invitation, otherwise there are different type of invitations i am dealing with like, User Invitation for request, Conference invitation, so there would combination of different objectId's, i think there should be the way to append to schema-less array in mongoDB.
EDIT:
The following is what I came up with. Not able to get it to work though.
function PortalTypes() {
Schema.apply(this,arguments);
this.add({
object_type: String,
});
}
util.inherits( PortalTypes, Schema );
var userType = new PortalTypes({
ID : {
type: Schema.Types.ObjectId,
ref : 'User'
}
});
var eventType = new PortalTypes({
ID : {
type: Schema.Types.ObjectId,
ref : 'events'
}
});
var user = new userType({ID:'dsaj3232--objectID','object_type':'user'});
user.save();
var event = new eventType({ID:'dasddehiqe98--objectID','object_type':'event'});
event.save();
Networks.Invitation.push(user,event);
How can I do something like this?
Despite your schema that error at the top means that that there is a matching document in the collection that does not have this field set as an array, but it's present with another type. Possibly just a string or object.
Here's a little, contrived example listing to demonstrate:
var async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
var personSchema = new Schema({
invitation: []
});
var Person = mongoose.model( 'Person', personSchema );
mongoose.connect('mongodb://localhost/test');
async.waterfall(
[
function(callback) {
Person.remove({},function(err,num) {
callback(err);
});
},
function(callback) {
console.log( "Creating" );
var person = new Person();
person.save(function(err,person) {
if (err) callback(err);
console.log(person);
callback(err,person);
});
},
function(person,callback) {
console.log( "Updating" );
Person.findOneAndUpdate(
{ "_id": person._id },
{ "$push": { "invitation": "something" } },
function(err,doc) {
if (err) callback(err);
console.log(doc);
callback(err);
}
);
},
function(callback) {
console.log( "Upserting" );
Person.findOneAndUpdate(
{ "name": "bob" },
{ "$set": { "invitation": {} } },
{ "upsert": true },
function(err,doc) {
if(err) callback(err);
console.log(doc);
callback(err,doc);
}
);
},
function(bob,callback) {
console.log( "Failing" );
Person.findOneAndUpdate(
{ "name": "bob" },
{ "$push": { "invitation": "else" } },
function(err,doc) {
if (err) callback(err);
console.log(doc);
callback(err);
}
);
}
],
function(err) {
if (err) throw err;
console.log( "Done" );
mongoose.disconnect();
}
);
That should give results like this:
Creating
{ __v: 0, _id: 54a18afb345b4efc02f21020, invitation: [] }
Updating
{ _id: 54a18afb345b4efc02f21020,
__v: 0,
invitation: [ 'something' ] }
Upserting
{ _id: 54a18afb9997ca0c4a7eb722,
name: 'bob',
__v: 0,
invitation: [ {} ] }
Failing
/home/neillunn/scratch/persons/node_modules/mongoose/lib/utils.js:413
throw err;
^
MongoError: exception: The field 'invitation' must be an array but is of type Object
in document {_id: ObjectId('54a18afb9997ca0c4a7eb722')}
The error message is a bit different since they were improved a bit in MongoDB 2.6 and upwards ( where this error string comes from ) to be a bit more precise about the actual problem. So in modern versions you would be told exactly what was wrong.
Despite the schema, methods like .update() ( I used .findOneAndUpdate() for convenience ) bypass the mongoose schema definition somewhat and go right to the database. So it's possible to do this and also possible you just had a document in place already, or otherwise created when a different schema definition was in place.
So that's the first problem here.
The rest of what you seem to be asking is for a "polymorphic" type of association in the array, and also where you do not wish to "embed" the whole created object in the array but just a reference to it.
Mongoose has "discriminators" to allow for this sort of thing, allowing different model types for objects to be stored within the same collection, but resolving to their own object and schema "type".
Following the current documentation example, here is an example listing of what that might look like:
var util = require('util'),
async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
function logger(label,content) {
console.log(
"%s:\n%s\n", label, JSON.stringify( content, undefined, 4 ) );
}
function BaseSchema() {
Schema.apply(this,arguments);
this.add({
name: String,
createdAt: { type: Date, default: Date.now }
});
}
util.inherits( BaseSchema, Schema );
var personSchema = new BaseSchema(),
bossSchema = new BaseSchema({ department: String });
var companySchema = new Schema({
people: [{ type: Schema.Types.ObjectId, ref: 'Person' }]
});
var Person = mongoose.model( 'Person', personSchema ),
Boss = Person.discriminator( 'Boss', bossSchema ),
Company = mongoose.model( 'Company', companySchema );
mongoose.connect('mongodb://localhost/test');
async.waterfall(
[
function(callback) {
Company.remove({},function(err,num) {
callback(err);
});
},
function(callback) {
Person.remove({},function(err,num) {
callback(err);
});
},
function(callback) {
var person = new Person({ name: "Bob" });
person.save(function(err,person) {
logger("Person", person);
callback(err,person);
});
},
function(person,callback) {
var boss = new Boss({ name: "Ted", department: "Accounts" });
boss.save(function(err,boss) {
logger("Boss", boss);
callback(err,person,boss);
});
},
function(person,boss,callback) {
var company = new Company();
company.people.push(person,boss);
company.save(function(err,company) {
logger("Stored",company);
callback(err,company);
});
},
function(company,callback) {
Company.findById(company.id)
.populate('people')
.exec(function(err,company) {
logger("Polulated",company);
callback(err);
});
}
],
function(err) {
if (err) throw err;
mongoose.disconnect();
}
);
Which will produce output like this:
Person:
{
"__v": 0,
"name": "Bob",
"createdAt": "2014-12-29T17:53:22.418Z",
"_id": "54a1951210a7a1b603161119"
}
Boss:
{
"__v": 0,
"name": "Ted",
"department": "Accounts",
"__t": "Boss",
"createdAt": "2014-12-29T17:53:22.439Z",
"_id": "54a1951210a7a1b60316111a"
}
Stored:
{
"__v": 0,
"_id": "54a1951210a7a1b60316111b",
"people": [
"54a1951210a7a1b603161119",
"54a1951210a7a1b60316111a"
]
}
Polulated:
{
"_id": "54a1951210a7a1b60316111b",
"__v": 0,
"people": [
{
"_id": "54a1951210a7a1b603161119",
"name": "Bob",
"__v": 0,
"createdAt": "2014-12-29T17:53:22.418Z"
},
{
"_id": "54a1951210a7a1b60316111a",
"name": "Ted",
"department": "Accounts",
"__v": 0,
"__t": "Boss",
"createdAt": "2014-12-29T17:53:22.439Z"
}
]
}
As you can see, there is a different structure for how Person and Boss are saved, notably the _t property as well as other defined properties for the different objects. Both however are actually stored in the same "people" collection and can be queried as such.
When storing these on the Company object, only the "reference id" values are stored in the array. Debatable to what you might want, but this is the difference between "referenced" and "embedded" schema models. You can see however when the .populate() method is called, then the objects are restored to their full form as they are read from the referenced collection.
So check your collection for existing documents that vary from your schema definition, and consider the approach as shown to represent a "polymorphic" association for different "types" of objects.
Note though that this kind of resolution is only supported under the "referenced" schema design, which can also possibly have it's drawbacks. If you want the objects stored as "embedded" within the single Company collection ( for example ), then you don't get the type of object resolution with varying schema types done by mongoose automatically. Resolving different types of objects would have to be done manually in your code, or provided plugin or however you do it.
More
Being specific to all of the purpose because there seems to be some confusion following something based on the standard documentation example, here is a more heavily commented listing:
var util = require('util'),
async = require('async'),
mongoose = require('mongoose'),
Schema = mongoose.Schema;
// Utility
function logger(label,content) {
console.log(
"%s:\n%s\n", label,
util.inspect( content, false, 8, false ) );
}
/*
* Schemas:
*
* you can use a base schema for common fields or just a plain
* definition
*/
var portalSchema = new Schema(),
userSchema = new Schema({
"name": String,
"age": Number
}),
eventSchema = new Schema({
"place": String,
"eventDate": { type: Date, default: Date.now }
});
/*
* Models
*
* there is only one "model" defined and therefore one collection only
* as everything is comes from a stored __v field with the "Model" name
* defined in the discriminator
*/
var Portal = mongoose.model( 'Portal', portalSchema ),
User = Portal.discriminator( 'User', userSchema ),
Event = Portal.discriminator( 'Event', eventSchema );
/*
* Then there is the thing that is going to consume the references to the
* 'Portal' model. The array here references the "base" model.
*/
var otherSchema = new Schema({
"afield": String,
"portals": [{ type: Schema.Types.ObjectId, ref: "Portal" }]
});
var Other = mongoose.model( 'Other', otherSchema );
/*
* Meat:
*
* Let's start doing things
*/
mongoose.connect('mongodb://localhost/test');
// Just because we're passing around objects without globals or other scoping
async.waterfall(
[
// Start fresh by removing all objects in the collections
function(callback) {
Other.remove({},function(err,num) {
callback(err);
});
},
function(callback) {
Portal.remove({},function(err,num) {
callback(err);
});
},
// Create some portal things
function(callback) {
var eventObj = new Event({ "place": "here" });
eventObj.save(function(err,eventObj) {
logger("Event", eventObj);
callback(err,eventObj);
});
},
function(eventObj,callback) {
var userObj = new User({ "name": "bob" });
userObj.save(function(err,userObj) {
logger("User", userObj);
callback(err,eventObj,userObj);
});
},
// Store the references in the array for the Other model
function(eventObj,userObj,callback) {
var other = new Other({
"afield": "something"
});
other.portals.push(eventObj,userObj);
other.save(function(err,other) {
logger("Other Stored",other);
callback(err,other);
});
},
// See how it's all really stored
function(other,callback) {
Portal.find({},function(err,portals) {
logger("Portals",portals);
callback(err,other);
});
},
// But watch the magic here
function(other,callback) {
User.find({},function(err,portals) {
logger("Just Users!",portals);
callback(err,other);
});
},
// And constructed as one object by populate
function(other,callback) {
Other.findById(other.id)
.populate('portals')
.exec(function(err,other) {
logger("Other populated",other);
console.log("%s: %s",
"1st Element", other.portals[0].constructor.modelName );
console.log("%s: %s",
"2nd Element", other.portals[1].constructor.modelName );
callback(err);
});
}
],
function(err) {
// It's just a script, so clean up
if (err) throw err;
mongoose.disconnect();
}
);
That should explain some things and what "discriminators" are. Everything is stored in just "one" collection which is bound to the base model. Everything else is defined using .discriminator() from that base. The "name" of the "class model" or "discriminator" is stored on the object. But note that is stored on the collection only, not in the place where they are referenced as that only stores the _id values. Look at the output carefully:
Event:
{ __v: 0,
place: 'here',
__t: 'Event',
_id: 54a253ec456b169310d131f9,
eventDate: Tue Dec 30 2014 18:27:40 GMT+1100 (AEDT) }
User:
{ __v: 0,
name: 'bob',
__t: 'User',
_id: 54a253ec456b169310d131fa }
Other Stored:
{ __v: 0,
afield: 'something',
_id: 54a253ec456b169310d131fb,
portals: [ 54a253ec456b169310d131f9, 54a253ec456b169310d131fa ] }
Portals:
[ { _id: 54a253ec456b169310d131f9,
place: 'here',
__v: 0,
__t: 'Event',
eventDate: Tue Dec 30 2014 18:27:40 GMT+1100 (AEDT) },
{ _id: 54a253ec456b169310d131fa,
name: 'bob',
__v: 0,
__t: 'User' } ]
Just Users!:
[ { _id: 54a253ec456b169310d131fa,
name: 'bob',
__v: 0,
__t: 'User' } ]
Other populated:
{ _id: 54a253ec456b169310d131fb,
afield: 'something',
__v: 0,
portals:
[ { _id: 54a253ec456b169310d131f9,
place: 'here',
__v: 0,
__t: 'Event',
eventDate: Tue Dec 30 2014 18:27:40 GMT+1100 (AEDT) },
{ _id: 54a253ec456b169310d131fa,
name: 'bob',
__v: 0,
__t: 'User' } ] }
1st Element: Event
2nd Element: User
So there is only one collection for all "portal" types but there is some magic there as shown. The "others" collection only stores the _id values in it's array of "portals". This is how mongoose references work, where the "model" and attached schema is not stored in the data but as part of the code definition.
The "discriminator" part stores this "model name" on the field so it can be resolved to the correct type, but it's still all in the same collection, and part of the User model magic demonstrated.
Why? It's how .populate() works. Under the hood an $in operator is used with the array content, so it's all expected to be in the one place. But you can still resolve types as shown.
If you expect using separate collections, then you are doing everything manually and storing model names and querying other collections for references all by yourself.

Categories