convert object of array to object javascript - javascript

I have an object includes arrays of arrays like below
objArray= { hh:[['a','b'],['c','d']],jj:[['x','y'],['z','w']]}
and what I want is here to change array to object:
convertedObject
{
hh:[
{id:'a', name:'b'},
{id:'c', name:'d'}
],
jj:[
{id:'x', name:'y'},
{id:'z', name:'w'}
],
}
I wrote 2 functions to do this but it needs a little change
function convertToObjects(arr) {
return Object.values(arr).map(e => {
return { id: e[0], name: e[1] };
});
}
function convertChoicesToObjects(choicesArray) {
return Object.keys(choicesArray).map((key) => (convertToObjects(choicesArray[key])));
}
const convertedObject=convertChoicesToObjects(objArray)
My function output is:
{
0:[
{id:'a', name:'b'},
{id:'c', name:'d'}
],
1:[
{id:'x', name:'y'},
{id:'z', name:'w'}
],
}

iterate over keys and use map
const objArray = {"hh": [["a", "b"], ["c", "d"]], "jj": [["x", "y"], ["z", "w"]]};
const output = {};
Object.keys(objArray).forEach(key => {
output[key] = objArray[key].map(item => ({"id": item[0], "name": item[1]}));
});
console.log(output);

You could use map and forEach methods.
objArray= { a:[['a','b'],['c','d']],b:[['x','y'],['z','w']]}
Object.keys(objArray).forEach((key) => {
objArray[key] = objArray[key].map(([id, name]) => ({id, name}));
});
console.log(objArray);

The output can be acheived using a simple for...in loop and using .map() method of arrays:
const input = {
a: [['a', 'b'], ['c', 'd']],
b: [['x', 'y'],['z', 'w']]
};
const transform = (input) => {
const output = {};
for (key in input) {
output[key] = input[key].map(([id, name]) => ({id, name}));
}
return output;
};
console.log(transform(input));

You can use reduce()
const objArray = { a:[['a','b'],['c','d']],b:[['x','y'],['z','w']]};
const data = Object.keys(objArray).reduce((prev, key) => {
prev[key] = objArray[key].reduce((res, arr) => {
res.push({id: arr[0], name: arr[1] });
return res;
}, []);
return prev;
}, {});
console.log(data);

You could build new object with Object.fromEntries.
This approach uses another array for the wanted keys of the objects.
var data = { a: [['a', 'b'],['c','d']],b:[['x','y'],['z','w']]},
keys = ['id', 'name'],
result = Object.fromEntries(
Object
.entries(data)
.map(([k, v]) => [
k,
v.map(a => Object.fromEntries(keys.map((k, i) => [k, a[i]])))
])
);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }

You can write programs like little stories using prgm -
const myConvert = (o = {}) =>
prgm // given input object, o
( o // starting with o
, Object.entries // get its entries
, map (convert1) // then map over them using convert1
, Object.fromEntries // then make a new object
)
const convert1 = ([ key, values ]) =>
prgm // given input key and values
( values // starting with values
, map (([ id, name ]) => ({ id, name })) // map over arrays creating objs
, r => [ key, r ] // then create a key/result pair
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)
// => { ... }
To make this possible, we need -
const prgm = (x, ...fs) =>
fs .reduce ((r, f) => f (r), x)
const map = f => xs =>
xs .map (x => f (x))
But perhaps a better name for myConvert is objectMap. To make it generic, we will make the conversion function convert1 a parameter. And since there's no need to modify the keys of the input object, we will only call the conversion function on the object's values -
const identity = x =>
x
const objectMap = (f = identity) => (o = {}) =>
prgm // given mapper, f, and object, o
( o // starting with o
, Object.entries // get its entries
, map (([ k, v ]) => [ k, f (v) ]) // transform each v using f
, Object.fromEntries // then make a new object
)
Now using generic function objectMap, we can write myConvert as a specialization. This isolates the unique essence of your transformation and detangles it from the rest of your program -
const myConvert =
objectMap // using generic objectMap
( map (([ id, name ]) => ({ id, name })) // convert arrays to objects
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)
// => { ... }
Hopefully this shows the power of thinking about your programs from different perspectives. Run the snippet below to confirm the results in your browser -
const prgm = (x, ...fs) =>
fs .reduce ((r, f) => f (r), x)
const map = f => xs =>
xs .map (x => f (x))
const identity = x =>
x
const objectMap = (f = identity) => (o = {}) =>
prgm
( o
, Object.entries
, map (([ k, v ]) => [ k, f (v) ])
, Object.fromEntries
)
// ---
const myConvert =
objectMap
( map (([ id, name ]) => ({ id, name }))
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)

Related

Javascript groupBy implementation only produces 1 group

I am trying to implement my own groupBy method and, everything I see says this should work, but I only get 1 group when I use it with an array, even though the grouping is fine. What am I missing:
const merge = (array) => array.reduce((a, b) => Object.keys(a).map(key => {
return {
[key]: a[key].concat(b[key] || [])
};
}).reduce(((a,b) => Object.assign({},a,b))))
Array.prototype.groupBy = function (grouper) {
const groups = this.map(e => {
return {
[grouper(e)]: [e]
};
})
console.log("Groups:\n",JSON.stringify(groups))
return merge(groups)
}
const one = {
1: [1,2,3],
0: [4,5,6]
}
const two = {
1: [7,8,9],
0: [10,11,12]
}
const three = {
1: [13],
0: [16]
}
const array1 = merge([one,two,three])
console.log("case1:\n",JSON.stringify(array1,null,4))
const array2 = [1,2,3,4,5,6,7,9,10].groupBy(e => e % 2)
console.log("case2:\n",JSON.stringify(array2,null,4))
Outputs below, expected is 'case1':
case1:
{
"0": [
4,
5,
6,
10,
11,
12,
16
],
"1": [
1,
2,
3,
7,
8,
9,
13
]
}
Groups:
[{"1":[1]},{"0":[2]},{"1":[3]},{"0":[4]},{"1":[5]},{"0":[6]},{"1":[7]},{"1":[9]},{"0":[10]}]
case2:
{
"1": [
1,
3,
5,
7,
9
]
}
The first reduce in your merge method has a dependency on the keys of the first object in the array.
objs.reduce((a, b) => Object
.keys(a)
// ^-- Takes only the keys from `a`
.map(key => ({ [key]: a[key].concat(b[key] || []) })
// ^^^^^^-- only merges in those keys from `b`
)
To see the issue in action, take away the 0 or 1 key from your one object.
To fix it without deviating from your current approach too much, you could make sure you take both keys from a and b:
objs.reduce((a, b) => Object
.keys(Object.assign({}, a, b))
// etc...
)
It still feels a bit wasteful to first map to key-value-pair type objects and then merge those.
Final solution (removes another bug):
Array.prototype.groupBy = function (grouper) {
const keysOf = (...objs) => Object.keys(Object.assign({}, objs))
const groups = this.map(e => {
return {
[grouper(e)]: [e]
};
})
const merge = (array) => array.reduce((a, b) =>
keysOf(a, b).map(key => {
return {
[key]: (a[key] || []).concat(b[key] || [])
};
}).reduce((a, b) => Object.assign({}, a, b)))
return merge(groups)
}
const array2 = [1,2,3,4,5,6,7,9,10].groupBy(e => e % 2)
console.log("case2:\n",JSON.stringify(array2,null,2))
#user3297291 points out the issue. I would recommend a different merge altogether. First we write merge2 helper which destructively merges b into a -
function merge2 (a, b)
{ for (const [k, v] of Object.entries(b))
if (a[k])
a[k] = [ ...a[k], ...v]
else
a[k] = v
return a
}
Now you can write merge to accept any number of objects. Since it initialises the reduce with a fresh {}, no input objects will be mutated -
const merge = (...all) =>
all.reduce(merge2, {})
Now groupBy works the way you write it, simply applying the mapped elements to merge -
const groupBy = (arr, f) =>
merge(...arr.map(v => ({ [f(v)]: [v] })))
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
Expand the snippet below to verify the result in your own browser -
function merge2 (a, b)
{ for (const [k, v] of Object.entries(b))
if (a[k])
a[k] = [ ...a[k], ...v]
else
a[k] = v
return a
}
const merge = (...all) =>
all.reduce(merge2, {})
const groupBy = (arr, f) =>
merge(...arr.map(v => ({ [f(v)]: [v] })))
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))
{"0":[2,4,6,10],"1":[1,3,5,7,9]}
If you want to make merge2 using a pure functional expression, you can write it as -
const merge2 = (a, b) =>
Object
.entries(b)
.reduce
( (r, [k, v]) =>
r[k]
? Object.assign(r, { [k]: [...r[k], ...v] })
: Object.assign(r, { [k]: v })
, a
)
You could skip the whole merge song and dance and write groupBy in a more direct way -
const call = (f, v) =>
f(v)
const groupBy = (arr, f) =>
arr.reduce
( (r, v) =>
call
( k =>
({ ...r, [k]: r[k] ? [...r[k], v] : [v] })
, f(v)
)
, {}
)
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))
{"0":[2,4,6,10],"1":[1,3,5,7,9]}
Another option is to use Map as it was designed, and convert to an Object after -
const call = (f, v) =>
f(v)
const groupBy = (arr, f) =>
call
( m =>
Array.from
( m.entries()
, ([ k, v ]) => ({ [k]: v })
)
, arr.reduce
( (r, v) =>
call
( k =>
r.set
( k
, r.has(k)
? r.get(k).concat([v])
: [v]
)
, f(v)
)
, new Map
)
)
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))

Javascript filter but return keys rather than values

This function works fine but I don't want it to return the values, but the keys:
const search = [2342,1900,1800,2310]
search = search.filter(function (el) { return el > 2200; });
So it should return [0,3] rather than [2342,2310]
You can get the keys, then use them to access the array, and filter based on that:
const search = [2342,1900,1800,2310];
const result = Object.keys(search)
.filter(key => search[key] > 2200)
.map(key => Number(key))
console.dir(result)
Or, for something more interesting:
const search = [2342,1900,1800,2310];
const pipe = (...fns) => x => fns.reduce((v, f) => f(v), x);
const filter = f => a => a.filter(f);
const map = f => a => a.map(f);
const toPairs = o => Object.keys(o)
.map(k => [k, o[k]]);
const take = i => a => a[i];
const gte = a => b => b >= a;
const toNumber = x => Number(x);
const bigEnough = gte(2200);
const itemOneIsBigEnough = pipe(
take(1),
bigEnough
);
const getTheFirstItemAsANumber = pipe(take(0), toNumber);
const doTheThing = pipe(
toPairs,
filter(
itemOneIsBigEnough
),
map(getTheFirstItemAsANumber),
);
const result = doTheThing(search);
console.dir(result);
The easiest way is to just Array#reduce the items:
const search = [2342,1900,1800,2310]
const result = search.reduce(function (result, el, index) {
if(el > 2200) {
result.push(index);
}
return result;
}, []);
console.log(result);
You could get the keys and filter by value check.
const
search = [2342, 1900, 1800, 2310],
indices = [...search.keys()].filter(i => search[i] > 2200);
console.log(indices);
let search = [2342, 1900, 1800, 2310]
search = search.map((el, i) => {
if (el > 2200) return i;
}).filter(el => el !== undefined);
console.log(search);

converting lodash's pickby in to javascript

how can I make lodash's pickby function in javascript? I have found the following one in "you don't need lodash"
function pickBy(object) {
const obj = {};
for (const key in object) {
if (object[key] !== null && object[key] !== false && object[key] !== undefined) {
obj[key] = object[key];
}
}
return obj;
}
but wondering other implementations
You could add a predicate function, as _.pickBy describes and use the entries and filter the data and build a new object.
function pickBy(object, predicate = v => v) {
return Object.assign(
...Object
.entries(object)
.filter(([, v]) => predicate(v))
.map(([k, v]) => ({ [k]: v }))
);
}
To create _.pickBy(), you can use for...of with Object.entries(). If the predicate returns a truthy answer for the value, assign the key and value to the result object.
Note: if you need _.pickBy(), and you don't want to entire lodash package, you can import the pickBy module.
function pickBy(object, predicate = v => v) {
const obj = {};
for (const [key, value] of Object.entries(object)) {
if (predicate(value)) obj[key] = value;
}
return obj;
}
console.log(pickBy({ a: 1, b: 0, c: 3 }));
console.log(pickBy({ a: 1, b: 0, c: 3 }, v => v < 3 ));
Now days you can get the entries, filter by the value, and convert back to an object with Object.fromEntries():
const pickBy = (object, predicate = v => v) =>
Object.fromEntries(Object.entries(object).filter(([, v]) => predicate(v)))
console.log(pickBy({ a: 1, b: 0, c: 3 }));
console.log(pickBy({ a: 1, b: 0, c: 3 }, v => v < 3 ));
Being
var object = { 'a': 1, 'b': '2', 'c': 3 };
and callback a method which is applied just to the values of the object (_.isNumber):
Object.keys(obj).reduce((prev, x) => callback(obj[x]) ? { ...prev, [x] : obj[x] } : prev , {});
Nina's solution fails when the object is {}.
Here is mine:
const pickBy = (object, predicate = v => v) =>
Object.entries(object)
.filter(([k, v]) => predicate(v))
.reduce((acc, [k, v]) => ({ ...acc, [k]: v }), {});

Reversing an Object.entries conversion

I am using Object.entries in order to get some values out of a nested object and filter it.
obj = Object.entries(obj)
.filter(([k, v]) => {
return true; // some irrelevant conditions here
});
My object ends up as an array of arrays, of keys and vals.
[['key1', val]['key2', val]['key3', val]]
Is there a straightforward way to map these back into an object? The original object structure is:
{ key:val, key2:val2, key3:val3 }
Sure, just use .reduce to assign to a new object:
const input = { key:'val', key2:'val2', key3:'val3' };
const output = Object.entries(input)
.filter(([k, v]) => {
return true; // some irrelevant conditions here
})
.reduce((accum, [k, v]) => {
accum[k] = v;
return accum;
}, {});
console.log(output);
In modern browsers, you can also use Object.fromEntries which makes this even easier - you can just pass an array of entries, and it'll create the object from those entries.
const input = { key:'val', key2:'val2', key3:'val3' };
const output = Object.fromEntries(
Object.entries(input)
.filter(([k, v]) => {
return true; // some irrelevant conditions here
})
);
console.log(output);
For new browsers, use Object.fromEntries:
Object.fromEntries(arr);
For older js, it can still be a one liner.
arr.reduce((acc,[k,v])=>(acc[k]=v,acc),{})
Example:
Object.entries(sampleObject) // Turn object to array
.reduce((acc,[k,v])=>(acc[k]=v,acc),{}) // Turn it back to object.
Using Object.assign with a map that maps [k,v] => {[k]: v}
For example, the code below will only keep keys beginning with key
var obj = {
key: 1,
key2: 2,
key3: 3,
removed: 4,
alsoRemoved: 5
}
obj = Object.assign({}, ...Object.entries(obj)
.filter(([k, v]) => {
return k.startsWith('key');
})
.map(([k, v]) => ({[k]: v}))
);
console.log(obj);
Using reduce with deconstruction and comma operator:
const input = { key:'val', key2:'val2', key3:'val3' };
const output = Object.entries(input)
.filter(([k, v]) => {
return true; // some irrelevant conditions here
})
.reduce((acc, [k, v]) => (acc[k] = v, acc), {});
which should give the same functionality as CertainPerformance's answer with a bit more concise syntax
let entries = Object.entries({e: 'e', q: 'q'});
let reverse = entries.map(([t, r]) => ({[t]: r})).reduce((pv, cv) =>{return Object.assign(pv, cv)});
console.log(reverse);
If you know exactly which entries you want to exclude, you can use object deconstruction combined with spreading:
function clean(obj) {
const { unwanted1, unwanted2, ...wanted } = obj;
return { ...wanted };
}
For some cases, this might be the cleanest solution.
function undoEntries(entered){
let output = {};
entered.forEach(item => {
output[item[0]] = item[1]
});
return output;
};
// Example
const obj = { a: 1, b: 2, c: 3};
const input = Object.entries(obj);
const output = undoEntries(input);
console.log(output);

Recursive post order tree traversal without creating new nodes

I want to define a generalized tail recursive tree traversal that works for all kinds of multi-way trees. This works fine with pre-order and level-order, but I'm having trouble to implement post order traversals. Here is the multi-way tree I am working with:
Desired order: EKFBCGHIJDA
As long as I don't care about tail recursion post order traversal is easy:
const postOrder = ([x, xs]) => {
xs.forEach(postOrder);
console.log(`${x}`);
};
const Node = (x, ...xs) => ([x, xs]);
const tree = Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")));
postOrder(tree);
The tail recursive approach, on the other hand, is quite cumbersome:
const postOrder = (p, q) => node => {
const rec = ({[p]: x, [q]: forest}, stack) => {
if (forest.length > 0) {
const [node, ...forest_] = forest;
stack.unshift(...forest_, Node(x));
return rec(node, stack);
}
else {
console.log(x);
if (stack.length > 0) {
const node = stack.shift();
return rec(node, stack);
}
else return null;
}
};
return rec(node, []);
};
const Node = (x, ...xs) => ([x, xs]);
const tree = Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")));
postOrder(0, 1) (tree);
In particular, I'd like to avoid creating new nodes so that I can traverse arbitrary trees without having to know anything about their constructors. Is there a way to do this and still remain tail recursive?
stack-safe
My first answer solves this problem by writing our own functional iterator protocol. Admittedly, I was eager to share this approach as it's something I've explored in the past. Writing your own data structures is really fun and it can yield creative solutions to your problem - and you'd be bored if I gave out the easy answers first, wouldn't you?
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const loop = (acc, [ node = Empty, ...nodes ], cont) =>
isEmpty (node)
? cont (acc)
: ???
return loop (acc, [ node ], identity)
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
Full solution included below for other readers...
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const identity = x =>
x
// tail recursive
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const loop = (acc, [ node = Empty, ...nodes ], cont) =>
isEmpty (node)
? cont (acc)
: loop (acc, Node.children (node), nextAcc =>
loop (f (nextAcc, node), nodes, cont))
return loop (acc, [ node ], identity)
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
const tree =
Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")))
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
mutual recursion
Somehow it's your questions that allow me to canvas my most inspired works. Back in the headspace of tree traversals, I came up with this sort of pseudo-applicative sum type Now and Later.
Later does not have a proper tail call but I thought the solution was too neat not to share it
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const Now = node =>
(acc, nodes) =>
loop (f (acc, node), nodes)
const Later = node =>
(acc, nodes) =>
loop (acc, [ ...Node.children (node) .map (Later), Now (node), ...nodes ])
const loop = (acc, [ reducer = Empty, ...rest ]) =>
isEmpty (reducer)
? acc
: reducer (acc, rest)
// return loop (acc, [ ...Node.children (node) .map (Later), Now (node) ])
// or more simply ...
return Later (node) (acc, [])
}
Mutual recursion demonstration
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const Now = node =>
(acc, nodes) =>
loop (f (acc, node), nodes)
const Later = node =>
(acc, nodes) =>
loop (acc, [ ...Node.children (node) .map (Later), Now (node), ...nodes ])
const loop = (acc, [ reducer = Empty, ...rest ]) =>
isEmpty (reducer)
? acc
: reducer (acc, rest)
// return loop (acc, [ ...Node.children (node) .map (Later), Now (node) ])
// or more simply ...
return Later (node) (acc, [])
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
const tree =
Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")))
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
We start by writing Node.value and Node.children which get the two values from your Node
// -- Node -----------------------------------------------
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
Next, we create a generic Iterator type. This one imitates the native iterable behavior, only our iterators are persistent (immutable)
// -- Empty ----------------------------------------------
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
// -- Iterator -------------------------------------------
const Yield = (value = Empty, it = Iterator ()) =>
isEmpty (value)
? { done: true }
: { done: false, value, next: it.next }
const Iterator = (next = Yield) =>
({ next })
const Generator = function* (it = Iterator ())
{
while (it = it.next ())
if (it.done)
break
else
yield it.value
}
Lastly, we can implement PostorderIterator
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: Node.children (node)
.reduceRight ( (it, node) => PostorderIterator (node, it)
, PostorderIterator (node, backtrack, true)
)
.next ())
And we can see it working with your tree here
// -- Demo ---------------------------------------------
const tree =
Node ("a",
Node ("b",
Node ("e"),
Node ("f",
Node ("k"))),
Node ("c"),
Node ("d",
Node ("g"),
Node ("h"),
Node ("i"),
Node ("j")));
const postOrderValues =
Array.from (Generator (PostorderIterator (tree)), Node.value)
console.log (postOrderValues)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
Program demonstration
// -- Node ----------------------------------------------
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
// -- Empty ---------------------------------------------
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
// -- Iterator ------------------------------------------
const Yield = (value = Empty, it = Iterator ()) =>
isEmpty (value)
? { done: true }
: { done: false, value, next: it.next }
const Iterator = (next = Yield) =>
({ next })
const Generator = function* (it = Iterator ())
{
while (it = it.next ())
if (it.done)
break
else
yield it.value
}
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: Node.children (node)
.reduceRight ( (it, node) => PostorderIterator (node, it)
, PostorderIterator (node, backtrack, true)
)
.next ())
// -- Demo --------------------------------------------
const tree =
Node ("a",
Node ("b",
Node ("e"),
Node ("f",
Node ("k"))),
Node ("c"),
Node ("d",
Node ("g"),
Node ("h"),
Node ("i"),
Node ("j")));
const postOrderValues =
Array.from (Generator (PostorderIterator (tree)), Node.value)
console.log (postOrderValues)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
The variadic children field makes the algorithm a little more complicated compare to a Node type that only has left and right fields
The simplified implementation of these iterators makes them a bit easier to compare. Writing support for variadic children in the other iterators is left as an exercise to the reader
// -- Node ---------------------------------------------
const Node = (value, left = Empty, right = Empty) =>
({ value, left, right })
// -- Iterators ----------------------------------------
const PreorderIterator = (node = Empty, backtrack = Iterator ()) =>
Iterator (() =>
isEmpty (node)
? backtrack.next ()
: Yield (node,
PreorderIterator (node.left,
PreorderIterator (node.right, backtrack))))
const InorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: InorderIterator (node.left,
InorderIterator (node,
InorderIterator (node.right, backtrack), true)) .next ())
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: PostorderIterator (node.left,
PostorderIterator (node.right,
PostorderIterator (node, backtrack, true))) .next ())
And a very special LevelorderIterator, just because I think you can handle it
const LevelorderIterator = (node = Empty, queue = Queue ()) =>
Iterator (() =>
isEmpty (node)
? queue.isEmpty ()
? Yield ()
: queue.pop ((x, q) =>
LevelorderIterator (x, q) .next ())
: Yield (node,
LevelorderIterator (Empty,
queue.push (node.left) .push (node.right))))
// -- Queue ---------------------------------------------
const Queue = (front = Empty, back = Empty) => ({
isEmpty: () =>
isEmpty (front),
push: x =>
front
? Queue (front, Pair (x, back))
: Queue (Pair (x, front), back),
pop: k =>
front ? front.right ? k (front.left, Queue (front.right, back))
: k (front.left, Queue (List (back) .reverse () .pair, Empty))
: k (undefined, undefined)
})
// -- List ----------------------------------------------
const List = (pair = Empty) => ({
pair:
pair,
reverse: () =>
List (List (pair) .foldl ((acc, x) => Pair (x, acc), Empty)),
foldl: (f, acc) =>
{
while (pair)
(acc = f (acc, pair.left), pair = pair.right)
return acc
}
})
// -- Pair ----------------------------------------------
const Pair = (left, right) =>
({ left, right })
Over-engineered? Guilty. You can swap out the interfaces above for nothing but JavaScript primitives. Here we trade the lazy stream for an eager array of values
const postOrderValues = (node = Empty, backtrack = () => [], visit = false) =>
() => visit
? [ node, ...backtrack () ]
: isEmpty (node)
? backtrack ()
: Node.children (node)
.reduceRight ( (bt, node) => postOrderValues (node, bt)
, postOrderValues (node, backtrack, true)
)
()
postOrderValues (tree) () .map (Node.value)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]

Categories