I was going through a stack overflow question on handling collection. The answer given by Brian Genisio was pretty convincing. I have tried his approach as follows:
Collection definition
var PersonCollection = Backbone.Collection.extend({
model : Person,
url: '/people',
parse: function(resp, xhr) {
this.header = resp.header;
this.stats = resp.stats;
return resp.people;
}
});
Collection usage
var personCollection = new PersonCollection();
personCollection.fetch();
console.log(personCollection.header); //undefined
console.log(personCollection.status); //undefined
The collection is fetching models perfectly but I am getting other assigned properties as undefined. Please suggest me a solution to fix this issue.
You need to wait till the fetch is done.
personCollection.fetch().done(() => {
console.log(personCollection.header);
console.log(personCollection.status);
});
Related
In the code below, the fetch() and sync() methods are not doing anything.
I am trying to see how the data in my localStorage gets updated and the methods are not updating it (example LS string is in the code)
Where am I going wrong?
function makeWorkingLS(collDesc, projDesc, Id, Description, ElapsedSeconds, ElapsedTime, WorkItemType){
//Create observable object from params
var activeTaskObject = kendo.observable ({
client: collDesc,
project: projDesc,
taskId: Id,
description: Description,
elapsedSeconds: ElapsedSeconds,
elapsedTime: ElapsedTime,
comment: WorkItemType
});
// example string in localStorage:
//{"client":"Morken Mindy","project":"Shazbat creation engine","taskId":183,"description":"Create the Shazbat 100% efficiency engine","elapsedSeconds":296803,"elapsedTime":"82h43m","comment":"Task"}
// Convert to JSON string for localStorage
var activeTask = JSON.stringify(activeTaskObject);
console.info(activeTask);
//Write to localStorage
window.localStorage.setItem("activeTask",activeTask);
//Set it as the active datasource for updating to webservice
var activeTaskDS = new kendo.data.DataSource({
transport: {
read: function(options){
taskItem = JSON.parse(localStorage["activeTask"]);
},
update: {
url: remUpd, //url var declared earlier in the process
dataType: "json"
}
},
schema: {
model: {
client: "client",
taskId: "taskId"
},
data: function(){
return taskItem;
}
}
});
activeTaskDS.fetch(function(){
activeTaskDS.data()[0].set("client", "NOBODY");
activeTaskDS.sync();
cosole.log("activeTaskDS.data()[0] : "+activeTaskDS.data()[0]); //should read 'NOBODY' but reads 'Morken Mindy'
});
}
Thanks in advance,
Neil.
I'm not sure what is the problem actually, but I have to point some important things:
AFAIK, when you customize any transport methods you have to pass the data into a callback in the options object:
transport: {
read: function(options){
taskItem = JSON.parse(localStorage["activeTask"]);
// Tells the widget to handle that collection
options.success(taskItem);
}
}
In schema.data it seems that you want to pass your data through this method(correct me if I'm wrong). But this method isn't for that purpose. It is used just to tell the widget which field to read(in case of passing a string to it) or to read a property from a response, which comes as a parameter that you are not using. Check the second example here. So this may not be right way to read the taskItem object as data;
Speaking about the taskItem object, it seems that its the base data of your dataSource but it isn't defined(at least on the snippet you posted). What I mean is, if you follow the step 1 you won't even need to read from that object no more.
Please let me know if this is helpful and if you need anyting more.
I am trying to bulk insert documents into MongoDB (so bypassing Mongoose and using the native driver instead as Mongoose doesn't support bulk insert of an array of documents). The reason I'm doing this is to improve the speed of writing.
I am receiving the error "RangeError: Maximum Call Stack Size Exceeded" at console.log(err) in the code below:
function _fillResponses(globalSurvey, optionsToSelectRegular, optionsToSelectPiped, responseIds, callback) {
Response.find({'_id': {$in: responseIds}}).exec(function(err, responses) {
if (err) { return callback(err); }
if (globalSurvey.questions.length) {
responses.forEach(function(response) {
console.log("Filling response: " + response._id);
response.answers = [];
globalAnswers = {};
globalSurvey.questions.forEach(function(question) {
ans = _getAnswer(question, optionsToSelectRegular, optionsToSelectPiped, response);
globalAnswers[question._id] = ans;
response.answers.push(ans);
});
});
Response.collection.insert(responses, function(err, responsesResult) {
console.log(err);
callback()
});
} else {
callback();
}
});
}
So similar to: https://stackoverflow.com/questions/24356859/mongoose-maximum-call-stack-size-exceeded
Perhaps it's something about the format of the responses array that Mongoose returns that means I can't directly insert using MongoDB natively? I've tried .toJSON() on each response but no luck.
I still get the error even with a very small amount of data but looping through and calling the Mongoose save on each document individually works fine.
EDIT: I think it is related to this issue: http://howtosjava.blogspot.com.au/2012/05/nodejs-mongoose-rangeerror-maximum-call.html
My schema for responses is:
var ResponseSchema = new Schema({
user: {
type: Schema.ObjectId,
ref: 'User'
},
randomUUID: String,
status: String,
submitted: Date,
initialEmailId: String,
survey: String,
answers: [AnswerSchema]
});
So, answers are a sub-document within responses. Not sure how to fix it though....
I was having this same issue and I started digging through the mongoose source code (version 3.8.14). Eventually it led me to this line within
mongoose/node_modules/mongodb/lib/mongodb/collection/core.js -> insert(...) -> insertWithWriteCommands(...) ->
mongoose/node_modules/mongodb/lib/mongodb/collection/batch/ordered.js -> bulk.insert(docs[i]) -> addToOperationsList(...) -> bson.calculateObjectSize(document, false);
var bsonSize = bson.calculateObjectSize(document, false);
Apparently, this calls BSON.calculateObjectSize, which calls calculateObjectSize which then infinitely recurses. I wasn't able to dig that far in to what caused it, but figured that it may have something to do with the mongoose wrapper binding functions to the Schema. Since I was inserting raw data into mongoDB, once I decided to change the bulk insert in mongoose to a standard javascript object, the problem went away and bulk inserts happened correctly. You might be able to do something similar.
Essentially, my code went from
//EDIT: mongoose.model needs lowercase 'm' for getter method
var myModel = mongoose.model('MyCollection');
var toInsert = myModel();
var array = [toInsert];
myModel.collection.insert(array, {}, function(err, docs) {});
to
//EDIT: mongoose.model needs lowercase 'm' for getter method
var myModel = mongoose.model('MyCollection');
var toInsert = { //stuff in here
name: 'john',
date: new Date()
};
var array = [toInsert];
myModel.collection.insert(array, {}, function(err, docs) {});
Confirmed, but not a bug. Model.collection.insert() bypasses Mongoose and so you're telling the node driver to insert an object that contains mongoose internals like $__, etc. The stack overflow is probably because bson is trying to compute the size of an object that references itself indirectly.
Long story short, use Document.toObject(), that's what its for: http://mongoosejs.com/docs/api.html#document_Document-toObject
Response.find({}).exec(function(err, responses) {
if (err) {
return callback(err);
}
if (true) {
var toInsert = [];
responses.forEach(function(response) {
console.log("Filling response: " + response._id);
response.answers = [];
[{ name: 'test' }].forEach(function(ans) {
response.answers.push(ans);
});
toInsert.push(response.toObject());
});
Response.collection.insert(toInsert, function(err, responsesResult) {
console.log(err);
});
} else {
callback();
}
});
Also, the code you specified won't work even if you fix the stack overflow. Since you're trying to insert() docs that are already in the database, all the inserts will fail because of _id conflicts. You'd really be much better off just using a stream() to read the results one at a time and then save() them back into the db.
guys! I've faced that weird error today. It happened because of I had a Schema with ref properties and tried to pass in create/update whole related document. I've changed argument to _id only and that did the trick. Works like a charm. I found the answer here (scroll down to February 21, 2013, 8:05 pm gustavohenke comment).
I have faced similar issue.
//manyvalues is array of objects
schema.methods.somemethod = function(manyvalues,callback) {
this.model(collection).collection.insertMany(manyvalues,callback);
}
But this caused error [RangeError: Maximum call stack size exceeded].
So I have created new model from manyvalues and used it as below and it worked.
schema.methods.somemethod = function(manyvalues,callback){
var list = JSON.parse(JSON.stringify(manyvalues));//created a new object.
this.model(collection).collection.insertMany(list,callback);
}
The problem may be caused if manyvalues is changed internally.
This also happens if there's a duplication of of the _id value. Most situations will be when you might create an new record from an existing record.
Deleting the _id and inserting the record and letting Mongoose/MongoDb take care of the creation of the id.
I had the same issue. Mongoose version is 5.13.14. My stack trace is:
RangeError: Maximum call stack size exceeded
at minimize (...\node_modules\mongoose\lib\document.js:3564:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
at minimize (...\node_modules\mongoose\lib\document.js:3576:18)
I found 2 ways to fix the issue:
Using toObject() method:
const model = await MyModel.findOne(conditions);
return model?.toObject();
Using minimize: false in toJSON option of the schema:
export const MySchema = new Schema({
...
}, {
...
toJSON: {
getters: true,
// !!! HERE !!!
minimize: false,
},
...
});
Check for circular references in the responses object. I Faced a similar issue due to circular references.
I had a similar problem, it was that I was querying a field that didn't exist in the schema using the $ne(other query operators may have a similar problem)
var TestSchema = new Schema({
test:[]
});
...
models.Test.findOne({"test2": {$ne: "t"} })...
In the example above I am testing for test2 instead of test
I am trying to parse a multilevel json file, create a model and then add that model to a backbone collection but i can't seem to figure out how to push the model to the collection. This should be a pretty easy problem to solve, i just can't seem to figure it out. Thanks in advance for your help. Below is my model and collection code:
var Performer = Backbone.Model.extend({
defaults: {
name: null,
top5 : [],
bottom5 : []
},
initialize: function(){
console.log("==> NEW Performer");
// you can add event handlers here...
}
});
var Performers = Backbone.Collection.extend({
url:'../json_samples/performers.json',
model:Performer,
parse : function(data) {
// 'data' contains the raw JSON object
console.log("performer collection - "+data.response.success);
if(data.response.success)
{
_.each(data.result.performers, function(item,key,list){
console.log("running for "+key);
var tmpObject = {};
tmpObject.name = key;
tmpObject.top5 = item.top5;
tmpObject.bottom5 = item.bottom5;
var tmpModel = new Performer(tmpObject);
this.models.push(tmpModel);
});
}
else
{
console.log("Failed to load performers");
}
}
});
As has been said in comments to your question, parse() is not intended to be used this way. If data.results.performers was an Array, all you would have to do is returning it. In your case the code will be slightly different.
var Performers = Backbone.Collection.extend({
...
parse: function(resp, options) {
return _.map(resp.result.performers, function(item, key) {
return _.extend(item, {name: key});
});
}
...
});
On the advice side, if you have the chance to change the API server-side, you'd probably be better off treating collections of objects as arrays and not as objects. Even if it is sometimes convenient to access an object by some ad-hoc key, the data really is an array.
You'll be able to transform it later when you need performers-by-name with a function like underscore's IndexBy
I just recently started using Backbone.js and I'm working on an app now using Brunch that does a JSONP request to an external API to populate my collection and models. I'm following these previous posts (this and this) on doing JSONP requests with Backbone, but my collection still isn't getting the data for some reason.
My model (app/models/model.js):
module.exports = Backbone.Model.extend({
});
My collection (app/models/collection.js):
var Post = require('./model');
module.exports = Backbone.Collection.extend({
model: Post,
url: "http://somedata.com/api/posts/list/stuff",
sync: function(method, model, options) {
options.timeout = 10000;
options.dataType = "jsonp";
options.jsonp = "JSONPcallback";
return Backbone.sync(method, model, options);
},
parse: function(response) {
if (response) {
var parsed = [];
for(var i = 0; i < response.results.length; i++) {
parsed.push(response.results[i][0]);
}
return parsed;
}
}
});
Then, in the initialize method in app/application.js I'm calling it by:
var Category = require('models/collection');
this.cat = new Category();
this.cat.fetch();
Now, when I look at the parse function in console.log, I see the data being fetched, so the request is going through successfully. However, when my views are rendered and I do console.log(application.cat.models) in app/views/view.js, I get nothing -- why's this happening? Is there anything wrong with the code on my model/collection?
Also, the JSONP data has the following format, which is why looping through for response.results[i][0] and returning an array with all of it, that should do the trick, right?
{"results":[
{"0":{"id":xxx,"title":xxx,"link":xxx},
"description":xxx},
{"0":{"id":xxx,"title":xxx,"link":xxx},
"description":xxx},
{"0":{"id":xxx,"title":xxx,"link":xxx},
"description":xxx},...
]}
Would really appreciate any help...
I have 2 comments here :
I see that you have names both your model and collection as module.exports , a common practice is to make the model as singular (module.export) and make the collection for those models plural module.exports , just common practice , nothing "wrong" otherwise
You can have 2 callbacks in your code , when the collection is done fetching data(asynchronous event) also considering module.exports as your collection here ,
A. You could do this :
module.exports.fetch({
success : function(data){
console.log(JSON.stringiy(data));
//do remaining programming here
}
});
B. you could have a event listener for reset , from the documentation here , the collection fires a reset event when it completes the fetch , so could add an event listener on the collection like this :
module.exports.on('reset',function(data){
console.log(JSON.stringify(data));
//do remaining programming here
},this);
I need to pass an id to a collection for use in the url (e.g. /user/1234/projects.json) but am not sure how to do this, an example would be wonderful.
The way my application is structured is on launch a collection of 'users' is pulled and rendered, I then want when a user is clicked their 'documents' are pulled from the server into a new collection and rendered in a new view. The issue is getting the user id into the documents collection to give the relevant URL for the documents.fetch().
think I've got it, here is an example:
//in the the view initialize function
this.collection = new Docs();
this.collection.project_id = this.options.project_id;
this.collection.fetch();
//in the collection
url: function() {
return '/project/api/' +this.project_id+'/docs';
}
Your user collection url should be set to /user. Once that's set, your models should utilize that url in order to do their magic. I believe (not completely positive) that if a model is in a collection, calling the 'url' method will return /user/:id. So all your typical REST-ish functionality will be utilized on '/user/:id'. If you are trying to do something with a relationship (a user has many documents) it's kind of rinse and repeat. So, for your documents collection (which belogs to user correct?) you'd set the url to 'user_instance.url/documents'.
To show a one to many relationship with a backbone model, you'd do something like this (upgrade to backbone 0.5.1 for urlRoot):
var User = Backbone.Model.extend({
initialize: function() {
// note, you are passing the function url. This is important if you are
// creating a new user that's not been sync'd to the server yet. If you
// did something like: {user_url: this.url()} it wouldn't contain the id
// yet... and any sync through docs would fail... even if you sync'd the
// user model!
this.docs = new Docs([], {user_url: this.url});
},
urlRoot: '/user'
});
var Doc = Backbone.Model.extend();
var Docs = Backbone.Collection.extend({
initialize: function(models, args) {
this.url = function() { args.user_url() + '/documents'; };
}
});
var user = new User([{id: 1234}]);
user.docs.fetch({ success: function() { alert('win') });
Why do you need to override the URL property of the collection with a function?.. you could do:
this.collection = new Docs();
this.collection.project_id = this.options.project_id;
this.collection.url = '/project/api/' + this.options.project_id + '/docs';
this.collection.fetch();
I like the answer from Craig Monson, but to get it working I needed to fix two things:
Binding the User url method before passing it to the Docs
A return statement from the url function in Docs
Updated example:
var User = Backbone.Model.extend({
initialize: function() {
// note, you are passing the function url. This is important if you are
// creating a new user that's not been sync'd to the server yet. If you
// did something like: {user_url: this.url()} it wouldn't contain the id
// yet... and any sync through docs would fail... even if you sync'd the
// user model!
this.docs = new Docs([], { user_url: this.url.bind(this) });
},
urlRoot: '/user'
});
var Doc = Backbone.Model.extend();
var Docs = Backbone.Collection.extend({
initialize: function(models, args) {
this.url = function() { return args.user_url() + '/documents'; };
}
});
var user = new User([{id: 1234}]);
user.docs.fetch({ success: function() { alert('win') });