Recursive post order tree traversal without creating new nodes - javascript

I want to define a generalized tail recursive tree traversal that works for all kinds of multi-way trees. This works fine with pre-order and level-order, but I'm having trouble to implement post order traversals. Here is the multi-way tree I am working with:
Desired order: EKFBCGHIJDA
As long as I don't care about tail recursion post order traversal is easy:
const postOrder = ([x, xs]) => {
xs.forEach(postOrder);
console.log(`${x}`);
};
const Node = (x, ...xs) => ([x, xs]);
const tree = Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")));
postOrder(tree);
The tail recursive approach, on the other hand, is quite cumbersome:
const postOrder = (p, q) => node => {
const rec = ({[p]: x, [q]: forest}, stack) => {
if (forest.length > 0) {
const [node, ...forest_] = forest;
stack.unshift(...forest_, Node(x));
return rec(node, stack);
}
else {
console.log(x);
if (stack.length > 0) {
const node = stack.shift();
return rec(node, stack);
}
else return null;
}
};
return rec(node, []);
};
const Node = (x, ...xs) => ([x, xs]);
const tree = Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")));
postOrder(0, 1) (tree);
In particular, I'd like to avoid creating new nodes so that I can traverse arbitrary trees without having to know anything about their constructors. Is there a way to do this and still remain tail recursive?

stack-safe
My first answer solves this problem by writing our own functional iterator protocol. Admittedly, I was eager to share this approach as it's something I've explored in the past. Writing your own data structures is really fun and it can yield creative solutions to your problem - and you'd be bored if I gave out the easy answers first, wouldn't you?
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const loop = (acc, [ node = Empty, ...nodes ], cont) =>
isEmpty (node)
? cont (acc)
: ???
return loop (acc, [ node ], identity)
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
Full solution included below for other readers...
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const identity = x =>
x
// tail recursive
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const loop = (acc, [ node = Empty, ...nodes ], cont) =>
isEmpty (node)
? cont (acc)
: loop (acc, Node.children (node), nextAcc =>
loop (f (nextAcc, node), nodes, cont))
return loop (acc, [ node ], identity)
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
const tree =
Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")))
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
mutual recursion
Somehow it's your questions that allow me to canvas my most inspired works. Back in the headspace of tree traversals, I came up with this sort of pseudo-applicative sum type Now and Later.
Later does not have a proper tail call but I thought the solution was too neat not to share it
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const Now = node =>
(acc, nodes) =>
loop (f (acc, node), nodes)
const Later = node =>
(acc, nodes) =>
loop (acc, [ ...Node.children (node) .map (Later), Now (node), ...nodes ])
const loop = (acc, [ reducer = Empty, ...rest ]) =>
isEmpty (reducer)
? acc
: reducer (acc, rest)
// return loop (acc, [ ...Node.children (node) .map (Later), Now (node) ])
// or more simply ...
return Later (node) (acc, [])
}
Mutual recursion demonstration
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
const postOrderFold = (f = (a, b) => a, acc = null, node = Empty) =>
{
const Now = node =>
(acc, nodes) =>
loop (f (acc, node), nodes)
const Later = node =>
(acc, nodes) =>
loop (acc, [ ...Node.children (node) .map (Later), Now (node), ...nodes ])
const loop = (acc, [ reducer = Empty, ...rest ]) =>
isEmpty (reducer)
? acc
: reducer (acc, rest)
// return loop (acc, [ ...Node.children (node) .map (Later), Now (node) ])
// or more simply ...
return Later (node) (acc, [])
}
const postOrderValues = (node = Empty) =>
postOrderFold ((acc, node) => [ ...acc, Node.value (node) ], [], node)
const tree =
Node("a",
Node("b",
Node("e"),
Node("f",
Node("k"))),
Node("c"),
Node("d",
Node("g"),
Node("h"),
Node("i"),
Node("j")))
console.log (postOrderValues (tree))
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]

We start by writing Node.value and Node.children which get the two values from your Node
// -- Node -----------------------------------------------
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
Next, we create a generic Iterator type. This one imitates the native iterable behavior, only our iterators are persistent (immutable)
// -- Empty ----------------------------------------------
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
// -- Iterator -------------------------------------------
const Yield = (value = Empty, it = Iterator ()) =>
isEmpty (value)
? { done: true }
: { done: false, value, next: it.next }
const Iterator = (next = Yield) =>
({ next })
const Generator = function* (it = Iterator ())
{
while (it = it.next ())
if (it.done)
break
else
yield it.value
}
Lastly, we can implement PostorderIterator
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: Node.children (node)
.reduceRight ( (it, node) => PostorderIterator (node, it)
, PostorderIterator (node, backtrack, true)
)
.next ())
And we can see it working with your tree here
// -- Demo ---------------------------------------------
const tree =
Node ("a",
Node ("b",
Node ("e"),
Node ("f",
Node ("k"))),
Node ("c"),
Node ("d",
Node ("g"),
Node ("h"),
Node ("i"),
Node ("j")));
const postOrderValues =
Array.from (Generator (PostorderIterator (tree)), Node.value)
console.log (postOrderValues)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
Program demonstration
// -- Node ----------------------------------------------
const Node = (x, ...xs) =>
[ x, xs ]
Node.value = ([ value, _ ]) =>
value
Node.children = ([ _, children ]) =>
children
// -- Empty ---------------------------------------------
const Empty =
Symbol ()
const isEmpty = x =>
x === Empty
// -- Iterator ------------------------------------------
const Yield = (value = Empty, it = Iterator ()) =>
isEmpty (value)
? { done: true }
: { done: false, value, next: it.next }
const Iterator = (next = Yield) =>
({ next })
const Generator = function* (it = Iterator ())
{
while (it = it.next ())
if (it.done)
break
else
yield it.value
}
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: Node.children (node)
.reduceRight ( (it, node) => PostorderIterator (node, it)
, PostorderIterator (node, backtrack, true)
)
.next ())
// -- Demo --------------------------------------------
const tree =
Node ("a",
Node ("b",
Node ("e"),
Node ("f",
Node ("k"))),
Node ("c"),
Node ("d",
Node ("g"),
Node ("h"),
Node ("i"),
Node ("j")));
const postOrderValues =
Array.from (Generator (PostorderIterator (tree)), Node.value)
console.log (postOrderValues)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]
The variadic children field makes the algorithm a little more complicated compare to a Node type that only has left and right fields
The simplified implementation of these iterators makes them a bit easier to compare. Writing support for variadic children in the other iterators is left as an exercise to the reader
// -- Node ---------------------------------------------
const Node = (value, left = Empty, right = Empty) =>
({ value, left, right })
// -- Iterators ----------------------------------------
const PreorderIterator = (node = Empty, backtrack = Iterator ()) =>
Iterator (() =>
isEmpty (node)
? backtrack.next ()
: Yield (node,
PreorderIterator (node.left,
PreorderIterator (node.right, backtrack))))
const InorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: InorderIterator (node.left,
InorderIterator (node,
InorderIterator (node.right, backtrack), true)) .next ())
const PostorderIterator = (node = Empty, backtrack = Iterator (), visit = false) =>
Iterator (() =>
visit
? Yield (node, backtrack)
: isEmpty (node)
? backtrack.next ()
: PostorderIterator (node.left,
PostorderIterator (node.right,
PostorderIterator (node, backtrack, true))) .next ())
And a very special LevelorderIterator, just because I think you can handle it
const LevelorderIterator = (node = Empty, queue = Queue ()) =>
Iterator (() =>
isEmpty (node)
? queue.isEmpty ()
? Yield ()
: queue.pop ((x, q) =>
LevelorderIterator (x, q) .next ())
: Yield (node,
LevelorderIterator (Empty,
queue.push (node.left) .push (node.right))))
// -- Queue ---------------------------------------------
const Queue = (front = Empty, back = Empty) => ({
isEmpty: () =>
isEmpty (front),
push: x =>
front
? Queue (front, Pair (x, back))
: Queue (Pair (x, front), back),
pop: k =>
front ? front.right ? k (front.left, Queue (front.right, back))
: k (front.left, Queue (List (back) .reverse () .pair, Empty))
: k (undefined, undefined)
})
// -- List ----------------------------------------------
const List = (pair = Empty) => ({
pair:
pair,
reverse: () =>
List (List (pair) .foldl ((acc, x) => Pair (x, acc), Empty)),
foldl: (f, acc) =>
{
while (pair)
(acc = f (acc, pair.left), pair = pair.right)
return acc
}
})
// -- Pair ----------------------------------------------
const Pair = (left, right) =>
({ left, right })
Over-engineered? Guilty. You can swap out the interfaces above for nothing but JavaScript primitives. Here we trade the lazy stream for an eager array of values
const postOrderValues = (node = Empty, backtrack = () => [], visit = false) =>
() => visit
? [ node, ...backtrack () ]
: isEmpty (node)
? backtrack ()
: Node.children (node)
.reduceRight ( (bt, node) => postOrderValues (node, bt)
, postOrderValues (node, backtrack, true)
)
()
postOrderValues (tree) () .map (Node.value)
// [ 'e', 'k', 'f', 'b', 'c', 'g', 'h', 'i', 'j', 'd', 'a' ]

Related

Javascript groupBy implementation only produces 1 group

I am trying to implement my own groupBy method and, everything I see says this should work, but I only get 1 group when I use it with an array, even though the grouping is fine. What am I missing:
const merge = (array) => array.reduce((a, b) => Object.keys(a).map(key => {
return {
[key]: a[key].concat(b[key] || [])
};
}).reduce(((a,b) => Object.assign({},a,b))))
Array.prototype.groupBy = function (grouper) {
const groups = this.map(e => {
return {
[grouper(e)]: [e]
};
})
console.log("Groups:\n",JSON.stringify(groups))
return merge(groups)
}
const one = {
1: [1,2,3],
0: [4,5,6]
}
const two = {
1: [7,8,9],
0: [10,11,12]
}
const three = {
1: [13],
0: [16]
}
const array1 = merge([one,two,three])
console.log("case1:\n",JSON.stringify(array1,null,4))
const array2 = [1,2,3,4,5,6,7,9,10].groupBy(e => e % 2)
console.log("case2:\n",JSON.stringify(array2,null,4))
Outputs below, expected is 'case1':
case1:
{
"0": [
4,
5,
6,
10,
11,
12,
16
],
"1": [
1,
2,
3,
7,
8,
9,
13
]
}
Groups:
[{"1":[1]},{"0":[2]},{"1":[3]},{"0":[4]},{"1":[5]},{"0":[6]},{"1":[7]},{"1":[9]},{"0":[10]}]
case2:
{
"1": [
1,
3,
5,
7,
9
]
}
The first reduce in your merge method has a dependency on the keys of the first object in the array.
objs.reduce((a, b) => Object
.keys(a)
// ^-- Takes only the keys from `a`
.map(key => ({ [key]: a[key].concat(b[key] || []) })
// ^^^^^^-- only merges in those keys from `b`
)
To see the issue in action, take away the 0 or 1 key from your one object.
To fix it without deviating from your current approach too much, you could make sure you take both keys from a and b:
objs.reduce((a, b) => Object
.keys(Object.assign({}, a, b))
// etc...
)
It still feels a bit wasteful to first map to key-value-pair type objects and then merge those.
Final solution (removes another bug):
Array.prototype.groupBy = function (grouper) {
const keysOf = (...objs) => Object.keys(Object.assign({}, objs))
const groups = this.map(e => {
return {
[grouper(e)]: [e]
};
})
const merge = (array) => array.reduce((a, b) =>
keysOf(a, b).map(key => {
return {
[key]: (a[key] || []).concat(b[key] || [])
};
}).reduce((a, b) => Object.assign({}, a, b)))
return merge(groups)
}
const array2 = [1,2,3,4,5,6,7,9,10].groupBy(e => e % 2)
console.log("case2:\n",JSON.stringify(array2,null,2))
#user3297291 points out the issue. I would recommend a different merge altogether. First we write merge2 helper which destructively merges b into a -
function merge2 (a, b)
{ for (const [k, v] of Object.entries(b))
if (a[k])
a[k] = [ ...a[k], ...v]
else
a[k] = v
return a
}
Now you can write merge to accept any number of objects. Since it initialises the reduce with a fresh {}, no input objects will be mutated -
const merge = (...all) =>
all.reduce(merge2, {})
Now groupBy works the way you write it, simply applying the mapped elements to merge -
const groupBy = (arr, f) =>
merge(...arr.map(v => ({ [f(v)]: [v] })))
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
Expand the snippet below to verify the result in your own browser -
function merge2 (a, b)
{ for (const [k, v] of Object.entries(b))
if (a[k])
a[k] = [ ...a[k], ...v]
else
a[k] = v
return a
}
const merge = (...all) =>
all.reduce(merge2, {})
const groupBy = (arr, f) =>
merge(...arr.map(v => ({ [f(v)]: [v] })))
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))
{"0":[2,4,6,10],"1":[1,3,5,7,9]}
If you want to make merge2 using a pure functional expression, you can write it as -
const merge2 = (a, b) =>
Object
.entries(b)
.reduce
( (r, [k, v]) =>
r[k]
? Object.assign(r, { [k]: [...r[k], ...v] })
: Object.assign(r, { [k]: v })
, a
)
You could skip the whole merge song and dance and write groupBy in a more direct way -
const call = (f, v) =>
f(v)
const groupBy = (arr, f) =>
arr.reduce
( (r, v) =>
call
( k =>
({ ...r, [k]: r[k] ? [...r[k], v] : [v] })
, f(v)
)
, {}
)
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))
{"0":[2,4,6,10],"1":[1,3,5,7,9]}
Another option is to use Map as it was designed, and convert to an Object after -
const call = (f, v) =>
f(v)
const groupBy = (arr, f) =>
call
( m =>
Array.from
( m.entries()
, ([ k, v ]) => ({ [k]: v })
)
, arr.reduce
( (r, v) =>
call
( k =>
r.set
( k
, r.has(k)
? r.get(k).concat([v])
: [v]
)
, f(v)
)
, new Map
)
)
const result =
groupBy([1,2,3,4,5,6,7,9,10], e => e % 2)
console.log(JSON.stringify(result))

convert object of array to object javascript

I have an object includes arrays of arrays like below
objArray= { hh:[['a','b'],['c','d']],jj:[['x','y'],['z','w']]}
and what I want is here to change array to object:
convertedObject
{
hh:[
{id:'a', name:'b'},
{id:'c', name:'d'}
],
jj:[
{id:'x', name:'y'},
{id:'z', name:'w'}
],
}
I wrote 2 functions to do this but it needs a little change
function convertToObjects(arr) {
return Object.values(arr).map(e => {
return { id: e[0], name: e[1] };
});
}
function convertChoicesToObjects(choicesArray) {
return Object.keys(choicesArray).map((key) => (convertToObjects(choicesArray[key])));
}
const convertedObject=convertChoicesToObjects(objArray)
My function output is:
{
0:[
{id:'a', name:'b'},
{id:'c', name:'d'}
],
1:[
{id:'x', name:'y'},
{id:'z', name:'w'}
],
}
iterate over keys and use map
const objArray = {"hh": [["a", "b"], ["c", "d"]], "jj": [["x", "y"], ["z", "w"]]};
const output = {};
Object.keys(objArray).forEach(key => {
output[key] = objArray[key].map(item => ({"id": item[0], "name": item[1]}));
});
console.log(output);
You could use map and forEach methods.
objArray= { a:[['a','b'],['c','d']],b:[['x','y'],['z','w']]}
Object.keys(objArray).forEach((key) => {
objArray[key] = objArray[key].map(([id, name]) => ({id, name}));
});
console.log(objArray);
The output can be acheived using a simple for...in loop and using .map() method of arrays:
const input = {
a: [['a', 'b'], ['c', 'd']],
b: [['x', 'y'],['z', 'w']]
};
const transform = (input) => {
const output = {};
for (key in input) {
output[key] = input[key].map(([id, name]) => ({id, name}));
}
return output;
};
console.log(transform(input));
You can use reduce()
const objArray = { a:[['a','b'],['c','d']],b:[['x','y'],['z','w']]};
const data = Object.keys(objArray).reduce((prev, key) => {
prev[key] = objArray[key].reduce((res, arr) => {
res.push({id: arr[0], name: arr[1] });
return res;
}, []);
return prev;
}, {});
console.log(data);
You could build new object with Object.fromEntries.
This approach uses another array for the wanted keys of the objects.
var data = { a: [['a', 'b'],['c','d']],b:[['x','y'],['z','w']]},
keys = ['id', 'name'],
result = Object.fromEntries(
Object
.entries(data)
.map(([k, v]) => [
k,
v.map(a => Object.fromEntries(keys.map((k, i) => [k, a[i]])))
])
);
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can write programs like little stories using prgm -
const myConvert = (o = {}) =>
prgm // given input object, o
( o // starting with o
, Object.entries // get its entries
, map (convert1) // then map over them using convert1
, Object.fromEntries // then make a new object
)
const convert1 = ([ key, values ]) =>
prgm // given input key and values
( values // starting with values
, map (([ id, name ]) => ({ id, name })) // map over arrays creating objs
, r => [ key, r ] // then create a key/result pair
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)
// => { ... }
To make this possible, we need -
const prgm = (x, ...fs) =>
fs .reduce ((r, f) => f (r), x)
const map = f => xs =>
xs .map (x => f (x))
But perhaps a better name for myConvert is objectMap. To make it generic, we will make the conversion function convert1 a parameter. And since there's no need to modify the keys of the input object, we will only call the conversion function on the object's values -
const identity = x =>
x
const objectMap = (f = identity) => (o = {}) =>
prgm // given mapper, f, and object, o
( o // starting with o
, Object.entries // get its entries
, map (([ k, v ]) => [ k, f (v) ]) // transform each v using f
, Object.fromEntries // then make a new object
)
Now using generic function objectMap, we can write myConvert as a specialization. This isolates the unique essence of your transformation and detangles it from the rest of your program -
const myConvert =
objectMap // using generic objectMap
( map (([ id, name ]) => ({ id, name })) // convert arrays to objects
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)
// => { ... }
Hopefully this shows the power of thinking about your programs from different perspectives. Run the snippet below to confirm the results in your browser -
const prgm = (x, ...fs) =>
fs .reduce ((r, f) => f (r), x)
const map = f => xs =>
xs .map (x => f (x))
const identity = x =>
x
const objectMap = (f = identity) => (o = {}) =>
prgm
( o
, Object.entries
, map (([ k, v ]) => [ k, f (v) ])
, Object.fromEntries
)
// ---
const myConvert =
objectMap
( map (([ id, name ]) => ({ id, name }))
)
const input =
{ a: [ ['a','b']
, ['c','d']
]
, b: [ ['x','y']
, ['z','w']
]
}
console.log
( myConvert (input)
)

Ramda: How can I make this imperative reducer more declarative?

I have the following reducer function:
The first argument to the reducers is the aggregated value, and the second argument is the next value. The below reducer function is reducing over the same reaction argument but aggregating the state$ value. Each reducer function yields a new aggregated value.
/**
* Applies all the reducers to create a state object.
*/
function reactionReducer(reaction: ReactionObject): ReactionObject {
let state$ = reactionDescriptionReducer({}, reaction);
state$ = reactionDisabledReducer(state$, reaction);
state$ = reactionIconReducer(state$, reaction);
state$ = reactionOrderReducer(state$, reaction);
state$ = reactionStyleReducer(state$, reaction);
state$ = reactionTitleReducer(state$, reaction);
state$ = reactionTooltipReducer(state$, reaction);
state$ = reactionVisibleReducer(state$, reaction);
return state$;
}
const state = reactionReducer(value);
The above works but the function is fixed with the list of reducers. It seems like I should be able to do something like this with RamdaJS.
const state = R.????({}, value, [reactionDescriptionReducer
reactionDisabledReducer,
reactionIconReducer,
reactionOrderReducer,
reactionStyleReducer,
reactionTitleReducer,
reactionTooltipReducer,
reactionVisibleReducer]);
I am new to RamdaJS so forgive me if this is a noob question.
How can I execute a chain of reducers using just RamdaJS?
and constructs a new reducer, (r, x) => ..., by combining the two (2) input reducers, f and g -
const and = (f, g) =>
(r, x) => g (f (r, x), x)
all, by use of and, constructs a new reducer by combining an arbitrary number of reducers -
const identity = x =>
x
const all = (f = identity, ...more) =>
more .reduce (and, f)
Define myReducer using all -
const myReducer =
all
( reactionDisabledReducer
, reactionIconReducer
, reactionOrderReducer
// ...
)
Given a mocked implementation for these three (3) reducers -
const reactionDisabledReducer = (s, x) =>
x < 0
? { ...s, disabled: true }
: s
const reactionIconReducer = (s, x) =>
({ ...s, icon: `${x}.png` })
const reactionOrderReducer = (s, x) =>
x > 10
? { ...s, error: "over 10" }
: s
Run myReducer to see the outputs
const initState =
{ foo: "bar" }
myReducer (initState, 10)
// { foo: 'bar', icon: '10.png' }
myReducer (initState, -1)
// { foo: 'bar', disabled: true, icon: '-1.png' }
myReducer (initState, 100)
// { foo: 'bar', icon: '100.png', error: 'over 10' }
Expand the snippet below to verify the results in your browser -
const identity = x =>
x
const and = (f, g) =>
(r, x) => g (f (r, x), x)
const all = (f, ...more) =>
more .reduce (and, f)
const reactionDisabledReducer = (s, x) =>
x < 0
? { ...s, disabled: true }
: s
const reactionIconReducer = (s, x) =>
({ ...s, icon: `${x}.png` })
const reactionOrderReducer = (s, x) =>
x > 10
? { ...s, error: "over 10" }
: s
const myReducer =
all
( reactionDisabledReducer
, reactionIconReducer
, reactionOrderReducer
// ...
)
const initState =
{ foo: "bar" }
console .log (myReducer (initState, 10))
// { foo: 'bar', icon: '10.png' }
console .log (myReducer (initState, -1))
// { foo: 'bar', disabled: true, icon: '-1.png' }
console .log (myReducer (initState, 100))
// { foo: 'bar', icon: '100.png', error: 'over 10' }
You can choose whatever names you like for and and all. I could see them as part of a reducer module, like reducer.and and reducer.all
One option of utilising Ramda here would be to make use of the fact that it supports passing functions as a monad instance to R.chain (otherwise known as the Reader monad).
This lets you sequence a number of functions together that share some common environment - in your case, reaction.
We can make use of R.pipeWith(R.chain) to allow composing a series of these functions that take some input (e.g. your $state threading through each function) and returns a function that takes the environment, producing a result to pass on to the next function in the pipeline.
// Some mock functions to demonstrate
const reactionDescriptionReducer = ({...state}, reaction) =>
({ description: reaction, ...state })
const reactionDisabledReducer = ({...state}, reaction) =>
({ disabled: reaction, ...state })
const reactionIconReducer = ({...state}, reaction) =>
({ icon: reaction, ...state })
// effectively `R.pipeK`
const kleisli = R.pipeWith(R.chain)
// we need the functions going into chain to be curried
const curried = f => a => b => f(a, b)
// finally, compose the series of functions together
const reactReducer = kleisli([
curried(reactionDescriptionReducer),
curried(reactionDisabledReducer),
curried(reactionIconReducer)
])({})
// and if all goes well...
console.log(
reactReducer("someCommonReactionValue")
)
<script src="//cdnjs.cloudflare.com/ajax/libs/ramda/0.26.1/ramda.min.js"></script>
My first attempt would not involve Ramda at all, just a simple:
const makeReducer = (...fns) => (x) => fns .reduce ( (s, fn) => fn (s, x), {} )
const fn = makeReducer (
(state$, reaction) => ({...state$, foo: `<<-${reaction.foo}->>`}),
(state$, reaction) => ({...state$, bar: `=*=${reaction.bar}=*=`}),
(state$, reaction) => ({...state$, baz: `-=-${reaction.baz}-=-`})
)
console .log (
fn ( {foo: 'a', bar: 'b', baz: 'c'} )
) //~> {foo: '<<-a->>', bar: '=*=b=*=', baz: '-=-c-=-'}
While you could choose to use Ramda's reduce and flip, it doesn't seem as though they'll add much here.

Javascript filter but return keys rather than values

This function works fine but I don't want it to return the values, but the keys:
const search = [2342,1900,1800,2310]
search = search.filter(function (el) { return el > 2200; });
So it should return [0,3] rather than [2342,2310]
You can get the keys, then use them to access the array, and filter based on that:
const search = [2342,1900,1800,2310];
const result = Object.keys(search)
.filter(key => search[key] > 2200)
.map(key => Number(key))
console.dir(result)
Or, for something more interesting:
const search = [2342,1900,1800,2310];
const pipe = (...fns) => x => fns.reduce((v, f) => f(v), x);
const filter = f => a => a.filter(f);
const map = f => a => a.map(f);
const toPairs = o => Object.keys(o)
.map(k => [k, o[k]]);
const take = i => a => a[i];
const gte = a => b => b >= a;
const toNumber = x => Number(x);
const bigEnough = gte(2200);
const itemOneIsBigEnough = pipe(
take(1),
bigEnough
);
const getTheFirstItemAsANumber = pipe(take(0), toNumber);
const doTheThing = pipe(
toPairs,
filter(
itemOneIsBigEnough
),
map(getTheFirstItemAsANumber),
);
const result = doTheThing(search);
console.dir(result);
The easiest way is to just Array#reduce the items:
const search = [2342,1900,1800,2310]
const result = search.reduce(function (result, el, index) {
if(el > 2200) {
result.push(index);
}
return result;
}, []);
console.log(result);
You could get the keys and filter by value check.
const
search = [2342, 1900, 1800, 2310],
indices = [...search.keys()].filter(i => search[i] > 2200);
console.log(indices);
let search = [2342, 1900, 1800, 2310]
search = search.map((el, i) => {
if (el > 2200) return i;
}).filter(el => el !== undefined);
console.log(search);

converting lodash's pickby in to javascript

how can I make lodash's pickby function in javascript? I have found the following one in "you don't need lodash"
function pickBy(object) {
const obj = {};
for (const key in object) {
if (object[key] !== null && object[key] !== false && object[key] !== undefined) {
obj[key] = object[key];
}
}
return obj;
}
but wondering other implementations
You could add a predicate function, as _.pickBy describes and use the entries and filter the data and build a new object.
function pickBy(object, predicate = v => v) {
return Object.assign(
...Object
.entries(object)
.filter(([, v]) => predicate(v))
.map(([k, v]) => ({ [k]: v }))
);
}
To create _.pickBy(), you can use for...of with Object.entries(). If the predicate returns a truthy answer for the value, assign the key and value to the result object.
Note: if you need _.pickBy(), and you don't want to entire lodash package, you can import the pickBy module.
function pickBy(object, predicate = v => v) {
const obj = {};
for (const [key, value] of Object.entries(object)) {
if (predicate(value)) obj[key] = value;
}
return obj;
}
console.log(pickBy({ a: 1, b: 0, c: 3 }));
console.log(pickBy({ a: 1, b: 0, c: 3 }, v => v < 3 ));
Now days you can get the entries, filter by the value, and convert back to an object with Object.fromEntries():
const pickBy = (object, predicate = v => v) =>
Object.fromEntries(Object.entries(object).filter(([, v]) => predicate(v)))
console.log(pickBy({ a: 1, b: 0, c: 3 }));
console.log(pickBy({ a: 1, b: 0, c: 3 }, v => v < 3 ));
Being
var object = { 'a': 1, 'b': '2', 'c': 3 };
and callback a method which is applied just to the values of the object (_.isNumber):
Object.keys(obj).reduce((prev, x) => callback(obj[x]) ? { ...prev, [x] : obj[x] } : prev , {});
Nina's solution fails when the object is {}.
Here is mine:
const pickBy = (object, predicate = v => v) =>
Object.entries(object)
.filter(([k, v]) => predicate(v))
.reduce((acc, [k, v]) => ({ ...acc, [k]: v }), {});

Categories