i don't get why ngrx pop this error while i'm trying to send to my api a simple object, could you give me some advice about ngrx and the reason why it refuse to serialize my object ?
I tried to put strictActionSerializability to false , no error but no object sent to my api...
Error :
Error: Detected unserializable action at "createdPath"
How i call my action :
this.storePath.dispatch(PathActions.createPath({ createdPath }));
In actions.ts file :
export const createPath = createAction('[BOT/GROUP] CREATE PATH', props<{ createdPath: Path }>());
And my effect :
createPath$ = createEffect(() =>
this.actions$.pipe(
ofType(PathActions.createPath),
map(action => action.createdPath),
exhaustMap((createdPath: Path) =>
this.pathService.createPath(createdPath).pipe(
map(createdPath => PathActions.createPathSuccess({ createdPath })),
catchError(error => of(PathActions.createPathFailure({ error }))))
)
)
);
My object sent as JSON :
{
"monsterLevel": [],
"monsterQuantity": [],
"monsterCapture": [],
"pathAction": [
{
"actions": [
{
"order": 1,
"fightAction": {
"isAlone": false
}
},
{
"order": 2,
"moveAction": {
"direction": [
"Right",
"Bottom"
],
"toGoBank": false,
"toBackBank": false
}
}
],
"mapPos": "-14;-53"
},
{
"actions": [
{
"order": 1,
"fightAction": {
"isAlone": false
}
},
{
"order": 2,
"moveAction": {
"direction": [
"Top",
"Right"
],
"toGoBank": false,
"toBackBank": false
}
}
],
"mapPos": "-14;-52"
},
{
"actions": [
{
"order": 1,
"fightAction": {
"isAlone": false
}
},
{
"order": 2,
"moveAction": {
"direction": [
"Top",
"Left"
],
"toGoBank": false,
"toBackBank": false
}
}
],
"mapPos": "-13;-52"
},
{
"actions": [
{
"order": 1,
"fightAction": {
"isAlone": false
}
},
{
"order": 2,
"moveAction": {
"direction": [
"Left",
"Bottom"
],
"toGoBank": false,
"toBackBank": false
}
}
],
"mapPos": "-13;-53"
},
{
"actions": [
{
"order": 1,
"moveAction": {
"direction": [
"Bottom"
],
"toGoBank": true,
"toBackBank": false
}
}
],
"mapPos": "-14;-51"
},
{
"actions": [
{
"order": 1,
"moveAction": {
"direction": [
"Bottom"
],
"toGoBank": true,
"toBackBank": false
}
}
],
"mapPos": "-14;-50"
},
{
"actions": [
{
"order": 1,
"moveAction": {
"direction": [
"Bottom"
],
"toGoBank": true,
"toBackBank": false
}
}
],
"mapPos": "-14;-49"
},
{
"actions": [
{
"order": 1,
"moveAction": {
"direction": [
"Bottom"
],
"toGoBank": true,
"toBackBank": false
}
}
],
"mapPos": "-14;-48"
},
{
"actions": [
{
"order": 1,
"moveAction": {
"cellId": 150,
"toGoBank": true,
"toBackBank": false
}
},
{
"order": 2,
"zaapAction": {
"destination": "-32,-58",
"zaapId": 1,
"toBackBank": false,
"toGoBank": true
}
}
],
"mapPos": "-14;-47"
}
],
"name": "feef",
"type": 0,
"monsterQuantityMin": 0,
"monsterQuantityMax": 8,
"groupLevelMin": 0,
"groupLevelMax": 999,
"maxPod": 51,
"leaderBank": true
}
Class used:
export class Path {
name: string;
type: number; /* 0 fight , 1 gather */
maxPod: number=80;
monsterQuantityMin: number =0;
monsterQuantityMax: number =8;
groupLevelMin: number =0;
groupLevelMax: number=9999;
isCapture: boolean =false;
leaderBank: boolean = false;
captureItem: number;
monsterLevel?: SpecificMonsterLevel[];
monsterQuantity?: SpecificMonsterQuantity[];
monsterCapture?: CaptureMonsterQuantity[];
pathAction: PathAction[];
}
have a good day, and thanks for your help !
For a pure data class object you can use
JSON.parse(JSON.stringify(product))
Otherwise, I suggest adding a toJSON() serialization method (which is automatically used by JSON.stringify)
public class Foo{
private _bar:string;
constructor(){ this._bar='Baz'; }
get bar():string{return this._bar}
toJSON() {
return {bar: _bar};
}
static fromJSON(json) {
...
}
}
Reference - Angular 2 (or 4) object serialization
#Andrew Allen resolved my issue by stringify and re parse my object :
this.storePath.dispatch(PathActions.createPath({ createdPath: JSON.parse(JSON.stringify(createdPath)) }));
Related
I'm trying to get the output of getQueryResults using the below code:
var AWS = require('aws-sdk');
var athena = new AWS.Athena();
const DEBUG = process.env.DEBUG;
const GLOCA_ENVID = process.env.GLOCA_ENVID;
const GLOCA_AWS_ACCOUNTID = process.env.GLOCA_AWS_ACCOUNTID;
const GLOCA_AWS_REGION = process.env.GLOCA_AWS_REGION;
exports.handler = function(event, context, callback) {
athena.getQueryResults({
QueryExecutionId: "a1b2c3d4-5678-90ab-cdef-EXAMPLE11111"
},function(err,data){
if (err) console.log(err);
else {
console.log("Body: ", data);
}
});
}
Below is the output:
{
UpdateCount: 0,
ResultSet: { Rows: [ [Object] ], ResultSetMetadata: { ColumnInfo: [Array] } }
}
The output should look something like this:
{
"ResultSet": {
"Rows": [
{
"Data": [
{
"VarCharValue": "date"
},
{
"VarCharValue": "location"
},
{
"VarCharValue": "browser"
},
{
"VarCharValue": "uri"
},
{
"VarCharValue": "status"
}
]
},
{
"Data": [
{
"VarCharValue": "2014-07-05"
},
{
"VarCharValue": "SFO4"
},
{
"VarCharValue": "Safari"
},
{
"VarCharValue": "/test-image-2.jpeg"
},
{
"VarCharValue": "200"
}
]
},
{
"Data": [
{
"VarCharValue": "2014-07-05"
},
{
"VarCharValue": "SFO4"
},
{
"VarCharValue": "IE"
},
{
"VarCharValue": "/test-image-2.jpeg"
},
{
"VarCharValue": "200"
}
]
}
],
"ResultSetMetadata": {
"ColumnInfo": [
{
"CatalogName": "hive",
"SchemaName": "",
"TableName": "",
"Name": "date",
"Label": "date",
"Type": "date",
"Precision": 0,
"Scale": 0,
"Nullable": "UNKNOWN",
"CaseSensitive": false
},
{
"CatalogName": "hive",
"SchemaName": "",
"TableName": "",
"Name": "location",
"Label": "location",
"Type": "varchar",
"Precision": 2147483647,
"Data": [
"Scale": 0,
"Nullable": "UNKNOWN",
"CaseSensitive": true
},
{
"CatalogName": "hive",
"SchemaName": "",
"TableName": "",
"Name": "browser",
"Label": "browser",
"Type": "varchar",
"Precision": 2147483647,
"Scale": 0,
"Nullable": "UNKNOWN",
"CaseSensitive": true
}
]
}
},
"UpdateCount": 0
}
The above output is an example output, but a similar outcome is what I'm expecting. When I run in AWS CLI:
aws athena --region "us-west-2" get-query-results --query-execution-id a1b2c3d4-5678-90ab-cdef-EXAMPLE11111
I get the expected output, so I'm unable to understand why I can't get the same result via lambda.
Thank you so much for all the help! :)
It actually looks like the code is fine. Looking at the response is shows that there is an Object within the ResultSet.Rows. Try to stringify the result before logging such that the handler looks like this:
exports.handler = function(event, context, callback) {
athena.getQueryResults({
QueryExecutionId: "a1b2c3d4-5678-90ab-cdef-EXAMPLE11111"
},function(err,data){
if (err) console.log(err);
else {
console.log("Body: ", JSON.stringify(data, null, 2));
}
});
}
Give the title:
row.push({"ticker" : PX_Hist[j]['ticker']});
Calcluate data with different timeframe parameters
const timeframes = [5,10,90,120,250,500];
for (let t = 0; t <= timeframes.length - 1; t++){
row.push({"move" : ((PX_Hist[j]['px'][PX_Hist[j]['px'].length-1]["adjusted_close"] / PX_Hist[j]['px'][PX_Hist[j]['px'].length-timeframes[t]]["adjusted_close"] -1))});
}
I am creating the following output with this code.
[
[
{
"ticker": "JPM"
},
{ "move": 0.01405944118170499 },
{ "move": 0.0337445573294628 },
{ "move": 0.1692882281117576 },
{ "move": 0.07636499188035195 },
{ "move": 0.8151371865267423 },
{ "move": 0.4537049320855997 }
],
[
{
"ticker": "C"
},
{ "move": -0.01295986622073586 },
{ "move": 0.002689694224235595 },
{ "move": 0.05544868117343582 },
{ "move": -0.0457495911125243 },
{ "move": 0.7837535634777528 },
{ "move": 0.05665004788714745 }
],
[
{
"ticker": "C"
},
{ "move": -0.01295986622073586 },
{ "move": 0.002689694224235595 },
{ "move": 0.05544868117343582 },
{ "move": -0.0457495911125243 },
{ "move": 0.7837535634777528 },
{ "move": 0.05665004788714745 }
],
]
I need to transpose the above array to something that I can easily bind to a table like below:
[{"ticker": "JPM", "5": 0.01405944118170499,"10": 0.0337445573294628,"90":
0.1692882281117576,"120": 0.07636499188035195,"250": 0.8151371865267423,"500":
0.4537049320855997}]
Any words of advice about how to do this in an elegant way?
You can so something like this, using map and for loop. But honestly I find this unnecessary. You could just do this from the get go in your loop. instead of pushing to row, you could try:
row[0][timeframes[t]] = "that long thing you have there"
const arr = [
[{
"ticker": "JPM"
},
{
"move": 0.01405944118170499
},
{
"move": 0.0337445573294628
},
{
"move": 0.1692882281117576
},
{
"move": 0.07636499188035195
},
{
"move": 0.8151371865267423
},
{
"move": 0.4537049320855997
}
],
[{
"ticker": "C"
},
{
"move": -0.01295986622073586
},
{
"move": 0.002689694224235595
},
{
"move": 0.05544868117343582
},
{
"move": -0.0457495911125243
},
{
"move": 0.7837535634777528
},
{
"move": 0.05665004788714745
}
],
[{
"ticker": "C"
},
{
"move": -0.01295986622073586
},
{
"move": 0.002689694224235595
},
{
"move": 0.05544868117343582
},
{
"move": -0.0457495911125243
},
{
"move": 0.7837535634777528
},
{
"move": 0.05665004788714745
}
],
]
const flatObj = (arr) => {
const flatObject = {};
const keys = ["ticker", "5", "10", "90", "120", "250", "500"]
for (let i = 0; i < arr.length; i++) {
for (const property in arr[i]) {
flatObject[keys[i]] = arr[i][property];
}
};
return flatObject;
}
const result = arr.map(ar => flatObj(ar))
console.log(result)
I'm trying to query my dataset for two purposes:
Match a term (resellable = true)
Order the results by their price
lowest to highest
Data set/doc is:
"data" : {
"resellable" : true,
"startingPrice" : 0,
"id" : "4emEe_r_x5DRCc5",
"buyNowPrice" : 0.006493, //Changes per object
"sub_title" : "test 1",
"title" : "test 1",
"category" : "Education",
}
//THREE OBJECTS WITH THE VALUES OF 0.006, 0.7, 1.05 FOR BUYNOWPRICE
I have three objects of these with different buyNowPrice
Query with agg is:
{
"query": {
"bool": {
"must": [
{
"term": {
"data.resellable": true
}
}
]
}
},
"from": 0,
"size": 5,
"aggs": {
"lowestPrice": {
"terms": {
"field": "data.buyNowPrice",
"order": {
"lowest_price": "desc"
}
},
"aggs": {
"lowest_price": {
"min": {
"field": "data.buyNowPrice"
}
},
"lowest_price_top_hits": {
"top_hits": {
"size": 5,
"sort": [
{
"data.buyNowPrice": {
"order": "desc"
}
}
]
}
}
}
}
}
}
The query works fine, and the results are 3 objects that have resellable = true
The issue is, the agg is not organizing the results based off the lowest buy now price.
Each result, the order of buyNowPrice is: 1.06, 0.006, 0.7 - which is not ordered properly.
Switching to desc has no affect, so I don't believe the agg is running at all?
EDIT:
Using the suggestion below my query now looks like:
{
"query": {
"bool": {
"must": [
{
"term": {
"data.resellable": true
}
}
]
}
},
"from": 0,
"size": 5,
"aggs": {
"lowestPrice": {
"terms": {
"field": "data.buyNowPrice",
"order": {
"lowest_price": "asc"
}
},
"aggs": {
"lowest_price": {
"min": {
"field": "data.buyNowPrice"
}
},
"lowest_price_top_hits": {
"top_hits": {
"size": 5
}
}
}
}
}
}
With the results of the query being:
total: { value: 3, relation: 'eq' },
max_score: 0.2876821,
hits: [
{
_index: 'education',
_type: 'listing',
_id: '4emEe_r_x5DRCc5', <--- buyNowPrice of 0.006
_score: 0.2876821,
_source: [Object]
},
{
_index: 'education',
_type: 'listing',
_id: '4ee_r_x5DRCc5', <--- buyNowPrice of 1.006
_score: 0.18232156,
_source: [Object]
},
{
_index: 'education',
_type: 'listing',
_id: '4444_r_x5DRCc5', <--- buyNowPrice of 0.7
_score: 0.18232156,
_source: [Object]
}
]
}
EDIT 2:
Removing the query for resellable = true the aggregation will sort properly and return the items in the proper order. But with the query for resellable included, it does not.
I'm assuming this has to do with the _score property overriding the sorting from agg? How would this be fixed
You can use a bucket sort aggregation that is a parent pipeline
aggregation which sorts the buckets of its parent multi-bucket
aggregation. Zero or more sort fields may be specified together with
the corresponding sort order.
Adding a working example (using the same index data as given in the question), search query, and search result
Search Query:
{
"query": {
"bool": {
"must": [
{
"term": {
"data.resellable": true
}
}
]
}
},
"from": 0,
"size": 5,
"aggs": {
"source": {
"terms": {
"field": "data.buyNowPrice"
},
"aggs": {
"latest": {
"top_hits": {
"_source": {
"includes": [
"data.buyNowPrice",
"data.id"
]
}
}
},
"highest_price": {
"max": {
"field": "data.buyNowPrice"
}
},
"bucket_sort_order": {
"bucket_sort": {
"sort": {
"highest_price": {
"order": "desc"
}
}
}
}
}
}
}
}
Search Result:
"buckets": [
{
"key": 1.0499999523162842,
"doc_count": 1,
"highest_price": {
"value": 1.0499999523162842
},
"latest": {
"hits": {
"total": {
"value": 1,
"relation": "eq"
},
"max_score": 0.08701137,
"hits": [
{
"_index": "stof_64364468",
"_type": "_doc",
"_id": "3",
"_score": 0.08701137,
"_source": {
"data": {
"id": "4emEe_r_x5DRCc5",
"buyNowPrice": 1.05 <-- note this
}
}
}
]
}
}
},
{
"key": 0.699999988079071,
"doc_count": 1,
"highest_price": {
"value": 0.699999988079071
},
"latest": {
"hits": {
"total": {
"value": 1,
"relation": "eq"
},
"max_score": 0.08701137,
"hits": [
{
"_index": "stof_64364468",
"_type": "_doc",
"_id": "2",
"_score": 0.08701137,
"_source": {
"data": {
"id": "4emEe_r_x5DRCc5",
"buyNowPrice": 0.7 <-- note this
}
}
}
]
}
}
},
{
"key": 0.006000000052154064,
"doc_count": 1,
"highest_price": {
"value": 0.006000000052154064
},
"latest": {
"hits": {
"total": {
"value": 1,
"relation": "eq"
},
"max_score": 0.08701137,
"hits": [
{
"_index": "stof_64364468",
"_type": "_doc",
"_id": "1",
"_score": 0.08701137,
"_source": {
"data": {
"id": "4emEe_r_x5DRCc5",
"buyNowPrice": 0.006 <-- note this
}
}
}
]
}
}
}
]
Update 1:
If you modify your search query as :
{
"query": {
"bool": {
"must": [
{
"term": {
"data.resellable": true
}
}
]
}
},
"aggs": {
"lowestPrice": {
"terms": {
"field": "data.buyNowPrice",
"order": {
"lowest_price": "asc" <-- change the order here
}
},
"aggs": {
"lowest_price": {
"min": {
"field": "data.buyNowPrice"
}
},
"lowest_price_top_hits": {
"top_hits": {
"size": 5
}
}
}
}
}
}
Running the above search query also, you will get your required results.
I'm new on mongodb, I have read documentation and I try to insert a document page with referenced id's of other documents but I have a validation fail.
That's my schema validation rules :
db.createCollection("page", {
capped: true,
size: 4500000000,
max: 6,
validator: {
$jsonSchema: {
bsonType: "object",
required: [ "title", "url", "elements" ],
additionalProperties: false,
properties: {
title: {
bsonType: "object",
required: [ "content_id" ],
properties: {
content_id: {
bsonType: "objectId"
}
}
},
url: {
bsonType: "string"
},
elements: {
bsonType: "array",
items: {
bsonType: "object",
required: [ "order" , "element_id" ],
properties: {
order: {
bsonType: "int"
},
element_id: {
bsonType: "objectId"
}
}
}
}
}
}
}
});
And this is what i try to insert (i add in variable the futur id Object and variable content and document already have the id's i need)
var page1 = ObjectId();
db.page.insertOne(
{
"_id": page1,
"title": {
"content_id": content5
},
"url": "/home",
"elements": [
{
"order": 1,
"element_id": element1
},
{
"order": 2,
"element_id": element2
},
{
"order": 3,
"element_id": element3
},
{
"order": 4,
"element_id": element4
}
]
}
);
Please why do I have this error? I don't understand what the problem is, is this schema good with what I'm trying to insert?
2020-04-07T18:55:35.513+0200 E QUERY [js] WriteError({
"index" : 0,
"code" : 121,
"errmsg" : "Document failed validation",
"op" : {
"_id" : ObjectId("5e8c72698d808f037e6adede"),
"title" : {
"content_id" : ObjectId("5e8c72128d808f037e6aded6")
},
"url" : "/home",
"elements" : [
{
"order" : 1,
"element_id" : ObjectId("5e8c724d8d808f037e6adeda")
},
{
"order" : 2,
"element_id" : ObjectId("5e8c724d8d808f037e6adedb")
},
{
"order" : 3,
"element_id" : ObjectId("5e8c724d8d808f037e6adedc")
},
{
"order" : 4,
"element_id" : ObjectId("5e8c724d8d808f037e6adedd")
}
]
}
}) :
WriteError({
"index" : 0,
"code" : 121,
"errmsg" : "Document failed validation",
"op" : {
"_id" : ObjectId("5e8c72698d808f037e6adede"),
"title" : {
"content_id" : ObjectId("5e8c72128d808f037e6aded6")
},
"url" : "/home",
"elements" : [
{
"order" : 1,
"element_id" : ObjectId("5e8c724d8d808f037e6adeda")
},
{
"order" : 2,
"element_id" : ObjectId("5e8c724d8d808f037e6adedb")
},
{
"order" : 3,
"element_id" : ObjectId("5e8c724d8d808f037e6adedc")
},
{
"order" : 4,
"element_id" : ObjectId("5e8c724d8d808f037e6adedd")
}
]
}
})
WriteError#src/mongo/shell/bulk_api.js:458:48
mergeBatchResults#src/mongo/shell/bulk_api.js:855:49
executeBatch#src/mongo/shell/bulk_api.js:919:13
Bulk/this.execute#src/mongo/shell/bulk_api.js:1163:21
DBCollection.prototype.insertOne#src/mongo/shell/crud_api.js:264:9
#(shell):1:1
Thank you for your answers
Okay, I got the solution, the problem was the additional rule Properties. So if it's on false, you have to add the _id property before inserting data because it considers _id as an additional property.
The bsonType: "int" can also make an error, so use number.
So with this validator rule i can insert my data ->
db.createCollection("page", {
capped: true,
size: 4500000000,
max: 6,
validator: {
$jsonSchema: {
bsonType: "object",
required: [ "_id", "title", "url", "elements" ],
additionalProperties: false,
properties: {
_id: {
bsonType: "objectId"
},
title: {
bsonType: "object",
required: [ "content_id" ],
properties: {
content_id: {
bsonType: "objectId"
}
}
},
url: {
bsonType: "string"
},
elements: {
bsonType: "array",
items: {
bsonType: "object",
required: [ "order" , "element_id" ],
properties: {
order: {
bsonType: "number"
},
element_id: {
bsonType: "objectId"
}
}
}
}
}
}
}
});
I have these kind of data structure from an API, and they told me to group them accordingly.
INPUT
{
0: {
id: 0,
name: 'foo',
categories: [
'Category001',
'Category002/sub-category001'
]
},
1: {
id: 1,
name: 'bar',
categories: [
'Category002/sub-category001'
]
},
2: {
id: 2,
name: 'bazz',
categories: [
'Category001',
'Category002',
'Category003'
]
},
3: {
id: 3,
name: 'rem',
categories: [
'Category001/sub-category002/nth-category008',
'Category001/sub-category004',
'Category003/sub-category001'
]
}
}
DESIRED OUTPUT
{
0: {
"name": "Category001",
"isCategory": true,
"children": [
{
"id": 0,
"name": "foo",
"categoryPath": "Category001",
"isCategory": false
},
{
"id": 2,
"name": "bazz",
"categoryPath": "Category001",
"isCategory": false
},
{
"name": "sub-category004",
"categoryPath": "Category001/sub-category004",
"children": [
{
"id": 3,
"name": "rem",
"isCategory": false,
}
],
"isCategory": true
},
{
"name": "sub-category002",
"categoryPath": "Category001/sub-category002",
"children": [
{
"name": "sub-category008",
"categoryPath": "Category001/sub-category002/nth-category008",
"children": [
{
"id": 3,
"name": "rem",
"isCategory": false
}
],
"isCategory": true
},
],
"isCategory": true
},
{
"name": "sub-category002",
"categoryPath": "Category001/sub-category002",
"isCategory": true
}
],
"categoryPath": ""
},
1: {
"name": "Category002",
"isCategory": true,
"children": [
{
"id": 2,
"name": "bazz",
"categoryPath": "Category002",
"isCategory": false
},
{
"name": "sub-category001",
"categoryPath": "Category002/sub-category001",
"children": [
{
"id": 0,
"name": "foo",
"isCategory": false,
},
{
"id": 1,
"name": "bar",
"isCategory": false,
}
],
"isCategory": true
}
],
"categoryPath": ""
},
2: {
"name": "Category003",
"isCategory": true,
"children": [
{
"id": 2,
"name": "bazz",
"categoryPath": "Category002",
"isCategory": false
},
{
"name": "sub-category001",
"categoryPath": "Category003/sub-category001",
"children": [
{
"id": 0,
"name": "foo",
"isCategory": false,
}
],
"isCategory": true
}
],
"categoryPath": ""
}
}
Question
Is there an easy way of doing it in lodash?
A simple groupby won't do it though, LOL
var groups = _.chain(items)
.groupBy('categories')
.pairs()
.value();
Custom processing can not be avoided here in addition to lodash functions. The following is an attempt to use lodash at most:
var transformed = _(input)
.transform(function (result, item) {
_(item.categories)
.map(function (categoryPath) {
return categoryPath.split('/');
})
.each(function (categoryPathParts) {
var dict = result;
var par;
var fullpath = _.reduce(categoryPathParts, function (path, category, i) {
path += (i > 0 ? '/' : '') + category;
if (!(par = _.find(dict, 'name', category))) {
dict.push(par = {
name: category,
categoryPath: (i > 0 ? path : ''),
isCategory: true,
children: []
});
}
dict = _.find(dict, 'name', category).children;
return path;
}, "")
par.children.push({
id: item.id,
name: item.name,
isCategory: false,
categoryPath: fullpath,
});
}).value();
}, [])
.transform(function (resObj, resCat, i) {
resObj[i] = resCat;
}, {});
var input = {
0: {
id: 0,
name: 'foo',
categories: [
'Category001',
'Category002/sub-category001']
},
1: {
id: 1,
name: 'bar',
categories: [
'Category002/sub-category001']
},
2: {
id: 2,
name: 'bazz',
categories: [
'Category001',
'Category002',
'Category003']
},
3: {
id: 3,
name: 'rem',
categories: [
'Category001/sub-category002/nth-category008',
'Category001/sub-category004',
'Category003/sub-category001']
}
};
var transformed = _(input)
.transform(function (result, item) {
_(item.categories)
.map(function (categoryPath) {
return categoryPath.split('/');
})
.each(function (categoryPathParts) {
var dict = result;
var par;
var fullpath = _.reduce(categoryPathParts, function (path, category, i) {
path += (i > 0 ? '/' : '') + category;
if (!(par = _.find(dict, 'name', category))) {
dict.push(par = {
name: category,
categoryPath: (i > 0 ? path : ''),
isCategory: true,
children: []
});
}
dict = _.find(dict, 'name', category).children;
return path;
}, "")
par.children.push({
id: item.id,
name: item.name,
isCategory: false,
categoryPath: fullpath,
});
}).value();
}, [])
.transform(function (resObj, resCat, i) {
resObj[i] = resCat;
}, {});
document.getElementById('resultArea').textContent = JSON.stringify(transformed, null, 2);
textarea {
width: 100%;
}
<script src="http://cdnjs.cloudflare.com/ajax/libs/lodash.js/3.10.0/lodash.min.js"></script>
<textarea id="resultArea" rows="111" ></textarea>