Reverse sorting backbonejs - javascript

I am using the below function to sort a backbone collection. I am sorting the collection with reference to Date(creationdate). The output I am fetching has oldest item first, But I want the output to be as latest item first and the oldest item last.
var campaignsNewCampaigns = _.sortBy(campaigns, function(campaign) {
if (campaign.creationdate) {
var getDate = campaign.creationdate.replace("-", "").replace("-", "").replace(":", "").replace(":", "");
return -Number(getDate);
}
return 0;
});

In backbone.js, you have a comparator field and sort function.
To Set your collection sort, you should add this function on your collection :
//...
sortByDate : function(){
this.comparator = function(campaign){
// I consider creationDate is a Date Object
return -campaign.get('creationdate').getTime();
};
this.sort();
},
//...
And now, to sort your collection, call this method like this :
var campaignsNewCampaigns = campaigns.sortByDate();

I got the solution for this.
if (campaign.creationdate) {
var getDate = (new Date(campaign.creationdate)).getTime();
return -Number(getDate);
}

Related

Merge and create an unique case sensitive array in javascript

It's a complicated scenario but will try to explain as much as possible. I have one object or arrays and one array. Now i have to compare selectedmodel values with mappedmodels, if the value(Case insensitive) matches with that object then fetch all the values of it and push it into selected model and combine both. Hope the example will clear what i am trying to achive.
var mappedModels = { 'CC605': ['cc605', 'CC605', 'cC605'], 'TC75X': ['TC75X'] };
var selectedModels = ['CC605', 'tc76'];
var desiredOutput = ["CC605", "tc76", "cc605", "cC605"];
I already wrote a solution to achieve it but i need a better code in terms of performance. Here is the solution:
function combineModelCases(selectedModels) {
const modelCases = [];
selectedModels.forEach(elm => {
const existingModels = mappedModels[elm.toUpperCase()];
if (existingModels) {
for (const key of existingModels) {
if (elm.toUpperCase() !== key) {
modelCases.push(key);
}
}
}
});
return selectedModels.concat(modelCases);
}
Here is Fiddle
I am using typescript and underscore js for your references. Any help would be very helpful.
You could use flatMap to get a flattened array of values for each key in selectedModels. Then, create a Set to get a unique collection models. Use Array.from() to convert the set to an array.
const mappedModels = { 'CC605': ['cc605', 'CC605', 'cC605'], 'TC75X': ['TC75X'] },
selectedModels = ['CC605', 'tc76'];
const models = selectedModels.flatMap(m => mappedModels[m] || []),
unique = Array.from(new Set([...selectedModels, ...models]));
console.log(unique)
you can do the following, :
var mappedModels = { 'CC605': ['cc605', 'CC605', 'cC605'], 'TC75X': ['TC75X'] };
var selectedModels = ['CC605', 'tc76'];
var desiredOutput;
function combineModelCases(selectedValue){
if(mappedModels.hasOwnProperty(selectedValue)){
desiredOutput = [... new Set([...mappedModels[selectedValue], ...selectedModels])]
return desiredOutput;
}
}
console.log(combineModelCases('CC605'));
here is the working demo : https://jsfiddle.net/wzo4d6uy/2/:

Adding new element to the object based on the comparison

I have the following objects:
var empAry= [{"empid":"101","name":"David"},{"empid":"102","name":"Sam"}..];//2000 records
var empAry2= [{"empid":"101","name":"David"},{"empid":"105","name":"Kevin"},{"empid":"109","name":"Robert"},{"empid":"110","name":"Rob"}..];//30000 records
I need to add new element to the empAry object and populate new element value based on the availability of that particular record in empAry2.
Expected Output:-
empAry= [{"empid":"101","name":"David", **"FounInempAry2":"Yes"**},{"empid":"102","name":"Sam", **"FounInempAry2":"No"}**..];//2000 records
If we can do it by jquery that would be good. Please help me.
It's hard to make sense of what FounInempAry2 is since the object structures are identical in both samples. I will assume that other properties exist and will use jQuery $.extend() to "merge" the properties.
First it is likely most efficient to loop through the big array once and create an object using the empid as keys.
var tmp = {};
$.each( empAry2, function(_, item){
tmp[ item.empid ] = item;
});
This creates an object like:
{
"101" : {"empid":"101","name":"David"},
"102" : {"empid":"102","name":"Sam"}
}
Now loop through first array and extend with whatever is in matching object in the tmp object
$.each( empAry, function(_, item){
$.extend( item, tmp[ item.empid ]);
});
Reference: $.extend() Docs
Try this:
var entry = {"empid":"<some id>","name":"<some name>"}
var filter = empAry2.filter(function(o){
return o.empid==entry.empid;
});
entry.FounInempAry2=(filter && filter.length>0)?"Yes":"No";
empAry2.push(entry);
Or
var entry = {"empid":"<some id>","name":"<some name>","FounInempAry2":"No"}
for(var i=0,length=empAry2.length;i<length;i++){
if(empAry2[i].empid==entry.empid){
entry.FounInempAry2="Yes";
break;
}
}
empAry2.push(entry);

How to use javascript to loop through key , values and add up one key's value when the other's match

I have a dataset of records that look like this :
[{
"d1d":"2015-05-28T00:00:00.000Z",
"d1h":0,
"d15m":0,
"ct":3
},
{
"d1d":"2015-05-28T00:00:00.000Z",
"d1h":0,
"d15m":0,
"ct":1
}
]
The ct value changes in every record. If d1d, d1h, and d15m are the same in one or more records, I need to combine those records into one with the sum of all the ct values.
I do have jquery, can I use grep for this?
I realize the server side could do a better job of getting me this data , but I have zero control over that.
You don't have to use jQuery for this, vanilla JavaScript will do.
I'll show you two solutions to your problem;
Example 1: Abusing Array#reduce as an iterator
var intermediaryArray = [];
dataset.reduce(function(prev, curr) {
if(prev.d1d === curr.d1d && prev.d1h === curr.d1h && prev.d15m === curr.d15m) {
intermediaryArray.push({
d1d: prev.d1d,
d1h: prev.d1h,
d15m: prev.d15m,
ct: prev.ct + curr.ct
});
} else {
// push the one that wasn't the same
intermediaryArray.push(curr);
}
// return current element so reduce has something to work on
// for the next iteration.
return curr;
});
Example 2: Using Array#Map and Array#Reduce in conjunction
This example utilises underscore.js to demonstrate the logic behind what you want to do.
.map() produces the new array of grouped objects.
.groupBy() produces an array of subarrays containing the objects that pass the predicate that all objects must share the same d1d or grouping function.
.reduce() boils all subarrays down to one value, your object with both cts added to each other.
var merged = _.map(_.groupBy(a, 'd1d'), function(subGroup) {
return subGroup.reduce(function(prev, curr) {
return {
d1d: prev.d1d,
d1h: prev.d1h,
d15m: prev.d15m,
ct: prev.ct + curr.ct
};
});
});
Here's one possible solution:
var dataset = [{
"d1d":"2015-05-28T00:00:00.000Z",
"d1h":0,
"d15m":0,
"ct":3
},
{
"d1d":"2015-05-28T00:00:00.000Z",
"d1h":0,
"d15m":0,
"ct":1
}
]
function addCt(dataset) {
var ctMap = {}
var d1d, d1h, d15m, ct, key, value
for (var ii=0, record; record=dataset[ii]; ii++) {
key = record.d1d+"|"+record.d1h+"|"+record.d15m
value = ctMap[key]
if (!value) {
value = 0
}
value += record.ct
ctMap[key] = value
}
return ctMap
}
ctMap = addCt(dataset)
console.log(ctMap)
// { "2015-05-28T00:00:00.000Z|0|0": 4 }
You may want to construct the key in a different way. You may want set the value to an object containing the d1d, d1h, d15m and cumulated ct values, with a single object for all matching d1d, d1h and d15m values.

Reconstruct JSON after duplicates have been removed

I have the following JSON -
{
"node1":[
{
"one":"foo",
"two":"foo",
"three":"foo",
"four":"foo"
},
{
"one":"bar",
"two":"bar",
"three":"bar",
"four":"bar"
},
{
"one":"foo",
"two":"foo",
"three":"foo",
"four":"foo"
}
],
"node2":[
{
"link":"baz",
"link2":"baz"
},
{
"link":"baz",
"link2":"baz"
},
{
"link":"qux",
"link2":"qux"
},
]
};
I have the following javascript that will remove duplicates from the node1 section -
function groupBy(items, propertyName) {
var result = [];
$.each(items, function (index, item) {
if ($.inArray(item[propertyName], result) == -1) {
result.push(item[propertyName]);
}
});
return result;
}
groupBy(catalog.node1, 'one');
However this does not account for dupicates in node2.
The resulting JSON I require is to look like -
{
"node1":[
{
"one":"foo",
"two":"foo",
"three":"foo",
"four":"foo"
},
{
"one":"bar",
"two":"bar",
"three":"bar",
"four":"bar"
}
],
"node2":[
{
"link":"baz",
"link2":"baz"
},
{
"link":"qux",
"link2":"qux"
},
]
};
However I cannot get this to work and groupBy only returns a string with the duplicates removed not a restructured JSON?
You should probably look for some good implementation of a JavaScript set and use that to represent your node objects. The set data structure would ensure that you only keep unique items.
On the other hand, you may try to write your own dedup algorithm. This is one example
function dedup(data, equals){
if(data.length > 1){
return data.reduce(function(set, item){
var alreadyExist = set.some(function(unique){
return equals(unique, item);
});
if(!alreadyExist){
set.push(item)
}
return set;
},[]);
}
return [].concat(data);
}
Unfortunately, the performance of this algorithm is not too good, I think somewhat like O(n^2/2) since I check the set of unique items every time to verify if a given item exists. This won't be a big deal if your structure is really that small. But at any rate, this is where a hash-based or a tree-based algorithm would probably be better.
You can also see that I have abstracted away the definition of what is "equal". So you can provide that in a secondary function. Most likely the use of JSON.stringify is a bad idea because it takes time to serialize an object. If you can write your own customized algorithm to compare key by key that'd be probably better.
So, a naive (not recommended) implementation of equals could be somewhat like the proposed in the other answer:
var equals = function(left, right){
return JSON.stringify(left) === JSON.stringify(right);
};
And then you could simply do:
var res = Object.keys(source).reduce(function(res, key){
res[key] = dedup(source[key], equals);
return res;
},{});
Here is my version:
var obj = {} // JSON object provided in the post.
var result = Object.keys(obj);
var test = result.map(function(o){
obj[o] = obj[o].reduce(function(a,c){
if (!a.some(function(item){
return JSON.stringify(item) === JSON.stringify(c); })){
a.push(c);
}
return a;
},[]); return obj[o]; });
console.log(obj);//outputs the expected result
Using Array.prototype.reduce along with Array.prototype.some I searched for all the items being added into the new array generated into Array.prototype.reduce in the var named a by doing:
a.some(function(item){ return JSON.stringify(item) === JSON.stringify(c); })
Array.prototype.some will loop trough this new array and compare the existing items against the new item c using JSON.stringify.
Try this:
var duplicatedDataArray = [];
var DuplicatedArray = [];
//Avoiding Duplicate in Array Datas
var givenData = {givenDataForDuplication : givenArray};
$.each(givenData.givenDataForDuplication, function (index, value) {
if ($.inArray(value.ItemName, duplicatedDataArray) == -1) {
duplicatedDataArray.push(value.ItemName);
DuplicatedArray.push(value);
}
});

mongodb mapreduce does incorrect reducing

I'm running into some trouble with a very simple mapreduce, I can't figure out what I've done wrong. I'm trying to merge two collections together, and this first, db.Pos looks like this
"chr" : "chr1", "begin" : 39401, "end" : 39442
The other collection, db.Gene has the following format
"chr" : "chr1", "begin" : 39401, "end" : 39442, "gene" : "GENE1"
My code looks like this:
var mapPos = function(){
emit({chr: this.chr, begin:this.begin, end:this.end},{gene:""});
}
var mapGene = function() {
emit({chr: this.chr, begin:this.begin, end:this.end},{gene:this.gene});
}
r = function(key,values){
var result = {gene:""}
values.forEach(function(value){
result.gene = value.gene;
});
return result;
}
res = db.Pos.mapReduce(mapPos, r, {out: {reduce: 'joined'}});
res = db.Gene.mapReduce(mapGene, r, {out: {reduce: 'joined'}});
So what I'd like to see is a collection where entries that are matching by chr, begin, and end are merged and the gene field is filled in from the db.Gene collection.
Instead, I'm getting the "gene" field in my "joined" collection updated to something other than 0 even when there is no matching doc in db.Gene that has a gene field.
What did I do wrong?
After reflexion, i think you should use merge and not reduce for your out.
The reason why you don't have the good value :
The problem is when the reduce is applied between the joined collection content and the result of the db.Gene.mapReduce.
The function reduce don't know which value is the newest, so the result.gene returned is the last value.gene of the values array.
To distinguish the value that will override the value existing in the collection, you can add a flag.
res = db.Pos.mapReduce(
function() {
emit({chr: this.chr, begin:this.begin, end:this.end},{gene:this.gene || ''});
},
function(key,values){
var result = {};
values.forEach(function(value){
if (value)
result.gene = value.gene;
});
},
{out: {reduce: 'joined'}}
);
res = db.Gene.mapReduce(
function() {
//Add a flag override here
emit({chr: this.chr, begin:this.begin, end:this.end},{gene:this.gene || '', override: true});
},
function(key,values){
var result = {};
values.forEach(function(value){
if (value.override)
result.gene = value.gene;
});
return result;
},
{out: {reduce: 'joined'}}
);
Hope it's clear :)

Categories