I've been trying to update a single field using db.put() but couldn't make it work properly. Every time I update a single field by a given ID, it deletes all other entries. Here is an example code:
var schema = {
stores: [
{
name: 'items',
keyPath: 'id'
},
{
name: 'config',
keyPath: 'id'
}
]
};
var db = new ydn.db.Storage('initial', schema);
var items = [{
id: 1,
itemId: 'GTA5',
date:'03/25/2013',
description:'Great game'
}, {
id: 2,
itemId: 'Simcity',
date:'12/01/2012',
description:'Awesome gameplay'
}];
var config = {
id: 1,
currency: 'USD'
};
db.put('items', items);
db.put('config', config);
var updateTable = function(){
var req = $.when(db.count('items'),db.values('items'),db.get('config', 1));
var disp = function(s) {
var e = document.createElement('div');
e.textContent = s;
document.body.appendChild(e);
};
req.done(function(count,r,config) {
var currency = config.currency;
if(count > 0){
var n = r.length;
for (var i = 0; i < n; i++) {
var id = r[i].id;
var itemId = r[i].itemId;
var date = r[i].date;
var description = r[i].description
disp('ID: '+id+' itemID: '+itemId+' Currency: '+currency+' Date: '+date+' Description: '+description);
}
}
});
}
updateTable();
$('a').click(function(e){
e.preventDefault();
db.put('items',{id:2,description:'Borring'}).done(function(){
updateTable();
});
});
Here is a working example of whats happening JSFiddle. If you click the "change" link, the specified field is updated but all other fields are 'undefined'
Yeah, SimCity is boring now, but Tomorrow will bring excitement again.
IndexedDB is essentially a key-document store. You have to read or write a record as a whole. It is NOT possible to update only certain field(s). Event if you want small update, you have to read and write back whole record.
Reading and write back the whole record is OK, but there is an important consideration for consistency. When you write back, you must ensure that the the record you have was not modified by other thread. Even though javascript is single thread, since both read and write operations are asynchronous and each operation could have different database state. It seems extremely rare, but often happen. For example, when user click, nothing happen and then click again. These user interactions are queued and execute in parallel from async database perspective.
A common technique is using single transaction for both operations. In YDN-DB, you can do in three ways.
Using explicit transaction:
db.run(function(tx_db) {
tx_db.get('items', 2).done(function(item) {
item.description = 'boring until Tomorrow';
tx_db.put(item).done(function(k) {
updateTable();
}
}
}, ['items'], 'readwrite');
Using an atomic database operation:
var iter = ydn.db.ValueIterator.where('items', '=', 2);
db.open(function(cursor) {
var item = cursor.getValue();
item.description = 'boring until Tomorrow';
cursor.update(item);
}, iter, 'readwrite');
EDIT:
Using query wrapper:
db.from('items', '=', 2).patch({description: 'boring until Tomorrow'});
Related
For example:
object1(1) = {
name: 'Rhodok Sergeant',
speciality: 'Hand to hand battle'
}
then I want to update only the speciality field, into:
object1(1) = {
name: 'Rhodok Sergeant',
speciality: 'Long range battle'
}
Thank you.
This is possible using following steps -
fetch the item first using idbcursor
update that item
call cursor.update to store updated data in indexedb.
An example code is -
const transaction = db.transaction(['rushAlbumList'], 'readwrite');
const objectStore = transaction.objectStore('rushAlbumList');
objectStore.openCursor().onsuccess = function(event) {
const cursor = event.target.result;
if (cursor) {
if (cursor.value.albumTitle === 'A farewell to kings') {
const updateData = cursor.value;
updateData.year = 2050;
const request = cursor.update(updateData);
request.onsuccess = function() {
console.log('data updated');
};
};
cursor.continue();
}
};
Check out this link for more info - https://developer.mozilla.org/en-US/docs/Web/API/IDBCursor/update
Note:- In above code, i am looping through all records which is not efficient in case you want to fetch the particular record based on some condition. So you can use idbKeyRange or some other idb query alternative for this.
you cannot do partial updates, you can only overwrite an entire object
read the object in from memory, change it, and then write it back
I want to copy the value from issues in one project to issues in another that depend on it.
That is what I have:
var entities = require('#jetbrains/youtrack-scripting-api/entities');
var workflow = require('#jetbrains/youtrack-scripting-api/workflow');
exports.rule = entities.Issue.onChange({
// TODO: give the rule a human-readable title
title: 'Date-propagation',
guard: function(ctx) {
var links = ctx.issue.links['depends on'];
return ctx.issue.isChanged("Date") || !links.added.isEmpty() || !links.removed.isEmpty();
},
action: function(ctx) {
var issue = ctx.issue;
var links = issue.links['depends on'];
function updateIssue(normalIssue){
normalIssue.fields.DueDate = issue.fields.Date.value;
}
function checkList(list){
if(list.isNotEmpty())list.forEach(function(normalIssue){updateIssue(normalIssue);}) ;
}
//checkList(links.removed);
checkList(links);
// TODO: specify what to do when a change is applied to an issue
},
requirements: {
Date: {
type: entities.Field.dateType,
},
Depend: {
type: entities.IssueLinkPrototype,
outward: 'is required for',
inward: "depends on"
}
}
});
The problem is in this line:
normalIssue.fields.DueDate = issue.fields.Date;
How should it be done?
Most probably, you do not have a 'DueDate' field on your instance (as the default field is called 'Due Date'). If so, your code line should look like this:
normalIssue.fields['Due Date'] = issue.fields.Date;
I have pretty complex query that I am trying to write and can't seem to wrap my head around the best way to write it. Ultimately I am trying to avoid having to write it all out manually. What I am trying to do is to take a dynamic field and go through each document to see if that field exists and if it does then update it. The issue is that the field can exist more than once in document since it can exist on an embedded level in multiple embedded documents per single parent document.
Here is what a typical object would look like:
applications: {
_id: 5368hss76sd9f,
ProgramAreaId: 1,
ProgramArea: 'Education',
StrategyId: 16,
Strategy: 'Graduate Program',
AlternateMapping: [{
ProgramAreaId: 2,
ProgramArea: 'Private Sector',
StrategyId: 13,
Strategy: 'Higher Education Fund'
}],
Funding: [{
Amount: '500000'
StrategyId: 16
Strategy: 'Graduate Program'
},
{
Amount: '500000'
StrategyId: 13
Strategy: 'Higher Education Fund'
}
]
}
I may have several thousand of these that I will need to update at a time. The ultimate goal would be to do it in one query. I have made it work for a single field at the base level but was wondering if there was a way to make it work for all of the fields that match the dynamic name even in embedded documents.
Here is what I have tried so far:
var fieldId = obj.Type + 'Id'; //obj.Type could equal 'Strategy' or 'ProgramArea'
var field = obj.Type;
var id = 13; //This could be any number of ids and ultimately was what I want to match on.
var qry = {
$where: function() {
var deepIterate = function(o, value) {
for (var field in o) {
if (field == fieldId && obj[field] == value) {
console.log()
return true;
}
var found = false;
if (typeof o[field] === 'object') {
found = deepIterate(o[field], value)
if (found) {
return true;
}
}
}
return false;
};
return deepIterate(this, id)
}
};
var setQry = { $set: {} };
setQry.$set[field] = obj.Name;//Field here would be 'Strategy' or 'ProgramArea' or any number of other fields I could be updateing and Name is what I want the value of that field to equal.
mongo.collection('applications').updateMany(qry, setQry, function(err, result) {
if (err)
callback(null);
else
callback(result);
});
The above query will find me any 'application' that contains the field name equal to the field name I am asking for and it will even search through embedded documents to see if that field is there. The issue with the above query is that it will only update that field on the parent level rather than updating the children as well.
So I think I have found the best solution to the above code. I created this following code to accomplish the above issue and it works wonderfully!
var resultsArray = [];
var fieldId = obj.Type + 'Id';
var field = obj.Type;
if (coll == 'statuses') {
fieldId = "StatusId";
field = "Status";
}
var altmapField = 'AltMaps.' + fieldId,
altfundField = 'AltFunds.' + fieldId,
paymentField = 'Payments.' + fieldId,
reportField = 'Reports.' + fieldId,
crosswalkField = 'Crosswalk.' + fieldId,
regionField = 'RegionBreakdowns.' + fieldId,
sectorField = 'SectorBreakdowns.' + fieldId;
var qry = [];
qry.push({
$match: {
$or: [{
}, {
}, {
}, {
}, {
}, {
}, {
}, {
}]
}
});
qry[0].$match.$or[0][fieldId] = id;
qry[0].$match.$or[1][altmapField] = id;
qry[0].$match.$or[2][altfundField] = id;
qry[0].$match.$or[3][paymentField] = id;
qry[0].$match.$or[4][reportField] = id;
qry[0].$match.$or[5][crosswalkField] = id;
qry[0].$match.$or[6][regionField] = id;
qry[0].$match.$or[7][sectorField] = id;
var cursor = mongo.collection(collectionPrefix + 'applications').aggregate(qry, { allowDiskUse: true, explain: false }, null);
Essentially all I did was build out a query dynamically and then pass that into mongo aggregation and it read it like a champ.
What I am trying to do is get the Game ID that is created by the APIService.postData for the game. I need to then take that Game ID and put it into the Angular foreach loops so that on the RESTful side, the foreign key constraints hold true.
How can I get that game ID out of there?
P.S. I am well aware of the scope issue
this.createGame = function() {
APIService.postData('game', '', $scope.newGameData).then(function (data) {
$scope.newGameID = data.id;
});
// Looping through each added class and adding the game_id onto the object in
// order for the DB insertion to go smoothly on the RESTful side.
angular.forEach($scope.newGameData.classes, function (key, value) {
$scope.newGameData.classes[value].game_id = $scope.newGameID;
APIService.postData('game-class', '', $scope.newGameData.classes[value]);
});
// Looping through each added race and pushing the game_id onto the object in
// order for the DB insertion to go smoothly on the RESTful side.
angular.forEach($scope.newGameData.races, function (key, value) {
$scope.newGameData.races[value].game_id = $scope.newGameID;
APIService.postData('game-race', '', $scope.newGameData.races[value]);
});
$scope.newGameData = {
name: ""
};
$scope.race_counter = 0;
$scope.class_counter = 0;
$scope.newGameData.classes = [{id: $scope.class_counter}];
$scope.newGameData.races = [{id: $scope.race_counter}];
$scope.successMessage = "New 'Game' has been added!";
$scope.action = 'showGames'; // Default action.
this.getGames();
$window.scrollTo(0, 0);
};
Figured it out. The foreach loops needed to go into the 'then' callback. The GameID kept coming up undefined because the POST request hadn't actually finished yet. On top of that, the $scope.newGameData was getting screwed up, so I assigned both arrays to their own local variable and it works great.
this.createGame = function() {
var newGameID = '';
var classData = $scope.newGameData.classes;
var raceData = $scope.newGameData.races;
APIService.postData('game', '', $scope.newGameData).then(function (data) {
newGameID = data.id;
// Looping through each added class and adding the game_id onto the object in
// order for the DB insertion to go smoothly on the RESTful side.
angular.forEach(classData, function (key, value) {
classData[value].game_id = newGameID;
APIService.postData('game-class', '', classData[value]);
});
// Looping through each added race and pushing the game_id onto the object in
// order for the DB insertion to go smoothly on the RESTful side.
angular.forEach($scope.newGameData.races, function (key, value) {
raceData[value].game_id = newGameID;
APIService.postData('game-race', '', raceData[value]);
});
});
$scope.newGameData = {
name: ""
};
$scope.race_counter = 0;
$scope.class_counter = 0;
$scope.newGameData.classes = [{id: $scope.class_counter}];
$scope.newGameData.races = [{id: $scope.race_counter}];
$scope.successMessage = "New 'Game' has been added!";
$scope.action = 'showGames'; // Default action.
this.getGames();
$window.scrollTo(0, 0);
};
I need to limit the number of fields sent to the client from a publish function after applying a transform that requires access to the original doc.
I'm basically trying to avoid sending potentially huge arrays down to the client, and run a bunch of checks to return a nice neat object to work with.
Heres the function I've got now - it works, just the not the way I'd like, basically limiting the fields given to observe function. is there a way to add the projection after the observe / transform.
Meteor.publish('network', function() {
var self = this;
// get the user values initially
var user = Meteor.users.findOne(self.userId);
var followingUsers = user.following ? user.following.users || [] : [];
var followingChannels = user.following ? user.following.channels || [] : [];
var transformMedia = function(doc) {
// get the user each time to keep this publication reactive
votesUp = doc.votes ? doc.votes.up || [] : [];
votesDown = doc.votes ? doc.votes.down || [] : [];
favourites = doc.votes ? doc.votes.favourites || [] : [];
doc.userActions = {
votedUp: _.contains(votesUp, doc._id) ? 1 : 0,
votedDown: _.contains(votesDown, doc._id) ? 1 : 0,
isFavourite: _.contains(favourites, doc._id) ? 1 : 0,
played: _.contains(doc.played, self.userId) ? 1 : 0,
};
return doc;
};
var networkQuery = Media.find({
$and: [
{
$and: [
{processedAt: { $exists: true} },
{processedStatus: 'successful'},
{publishStatus: 'published'}
]
},
{
// if created by this user, user they follow or channels they subscribe to
$or: [
{createdBy: self.userId },
{createdBy: { $in: followingUsers} },
{channels: { $in: followingChannels} },
]
}
// TODO : add not banned or trashed once implemented
]
}, mediaModifiers).observe({
added: function(doc) {
self.added('media', doc._id, transformMedia(doc));
},
changed: function(doc, oldDoc) {
self.changed('media', doc._id, transformMedia(doc));
},
removed: function(doc) {
self.removed('media', doc._id, transformMedia(doc));
},
});
self.onStop(function() {
networkQuery.stop();
});
self.ready();
});
I had a similar issue once. I dealt with it using cursor.observe()+ a custom function (as you did) and I just added a _.pick() to filter the unnecessary fields. Have a look at this publication code for an example (the white list docToPublish part especially):
var self = this;
// Modify the document we are sending to the client.
function filter(doc) {
var length = doc.item.length;
// White list the fields you want to publish.
var docToPublish = _.pick(doc, [
'someOtherField'
]);
// Add your custom fields.
docToPublish.itemLength = length;
return docToPublish;
}
var handle = myCollection.find({}, {fields: {item:1, someOtherField:1}})
// Use observe since it gives us the the old and new document when something is changing.
// If this becomes a performance issue then consider using observeChanges,
// but its usually a lot simpler to use observe in cases like this.
.observe({
added: function(doc) {
self.added("myCollection", doc._id, filter(doc));
},
changed: function(newDocument, oldDocument)
// When the item count is changing, send update to client.
if (newDocument.item.length !== oldDocument.item.length)
self.changed("myCollection", newDocument._id, filter(newDocument));
},
removed: function(doc) {
self.removed("myCollection", doc._id);
});
self.ready();
self.onStop(function () {
handle.stop();
});
This code is borrowed from #datacarl answer to my topic mentioned above.
Note that if you scale up to several servers, the cons of this approach is that each server will have to run the cursor.observe() function.
You also forgot to make your publication ready and dispose of your observers at the end of your publication (it might be because you didn't paste all the pub). It would look like this :
self.ready();
self.onStop(function () {
networkQuery.stop();
});