I have a JavaScript object e.g.:
const testFixture = {
a: [
{b:1},
{b:2},
{b:3},
],
b: {c: {d: 44, e: "foo", f: [1,2,3]}}
c: 3,
d: false,
f: "Blah",
}
I'd like to have a function I could pass this object to that would mutate it to remove random properties from it, so that I can test whether the thing that uses this object displays an error state, rather than silently erroring.
Edit:
To be clear, I mean any deeply nested property. e.g. it might remove a.b.c.d.e.f[1] or a[2].b
Edit 2:
Here's a buggy solution I'm working on based on ideas from Eureka and mkaatman's answers.
It seems to be changing key names to "undefined" which I wasn't expecting. It's also changing numbers to {} which I wasn't expecting. Not sure why.
var testFixture2 = {
a: [{
b: 1, c: 2
},
{
b: 2, c: 2
},
{
b: 3, c: 2, d: "bar"
},
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3]
}
},
c: 3,
d: false,
f: "Blah"
};
function getRandomIndex(max) {
return Math.floor(Math.random() * max);
}
function chaosMonkey(thing) {
if (typeof thing === "object") {
console.log("object", Object.keys(thing).length, thing);
const newlyDeformedObject = { ...thing};
// Make a list of all the keys
const keys = Object.keys(thing);
// Choose one at random
const iKey = getRandomIndex(keys.length);
let target = newlyDeformedObject[keys[iKey]];
const shouldDelete = getRandomIndex(3) === 0;
if (shouldDelete) {
delete target;
console.log("Object deleted", keys[iKey]);
} else {
console.log("+++ Going deeper", thing);
newlyDeformedObject[keys[iKey]] = chaosMonkey({ ...newlyDeformedObject[keys[iKey]] });
}
return newlyDeformedObject;
} else if (typeof thing === "array") {
console.log(array);
const iKey = getRandomIndex(thing.length);
const shouldDelete = getRandomIndex(3) === 0;
if (shouldDelete) {
delete array[iKey];
console.log("Array deleted", iKey);
} else {
array[iKey] = chaosMonkey(array[iKey]);
return array;
}
} else {
//#todo do something bad based on type e.g. number -> NaN, string -> '', but these are less likely to break something
delete thing;
return;
}
}
console.log(JSON.stringify(chaosMonkey(testFixture2), null, 2));
NB: the chances of any object key or array item being recursed into are equal, in order to make modifications equally likely anywhere in the object.
Edit 3:
Additional Requirement:
It MUST always remove at least one thing.
Bonus points for:
ways to control the number of things that get deleted
any way to limit which properties get deleted or recursed into.
i.e. allow/deny lists, where:
allowRemovalList = properties that it's ok to remove
denyRemovalList = properties that it's not ok to remove
(It could be that you have some properties that it's ok to remove entirely, but they should not be recursed into and inner parts of them removed.)
NB: Originally I asked for whitelist/blacklist but this caused confusion (and I wouldn't want anyone copying this code to be surprised when they use it) and some answers have implemented it so that blacklist = properties to always remove. I won't penalise any answer for that (and it's trivial to change anyway).
I took a stab at it because I thought the question was interesting and unique. This is a bit sloppy but maybe it's a start if someone else is wondering how to do this in the future.
const testFixture = {
a: [{
b: 1
},
{
b: 2
},
{
b: 3
},
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3]
}
},
c: 3,
d: false,
f: "Blah"
};
function getRandomInt(max) {
return Math.floor(Math.random() * max);
}
function chaosMonkey(object) {
console.log(Object.keys(testFixture).length, object);
const newlyDeformedObject = { ...object
};
Object.keys(testFixture).forEach((item, index) => {
const shouldDelete = getRandomInt(2);
console.log(index, shouldDelete);
if (shouldDelete) {
delete newlyDeformedObject[item];
} else {
if (typeof newlyDeformedObject[item] === "object") {
console.log("+++ Going deeper", { ...newlyDeformedObject[item]
});
newlyDeformedObject[item] = chaosMonkey({ ...newlyDeformedObject[item]
});
}
}
});
return newlyDeformedObject;
}
console.log(chaosMonkey(testFixture));
Assuming you mean random properties of the root of the object (not properties of properties or properties of array elements)
const testFixture = {
a: [{
b: 1
},
{
b: 2
},
{
b: 3
},
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3]
}
},
c: 3,
d: false,
f: "Blah",
}
// Make a list of all the keys
const keys = Object.keys(testFixture);
// Choose one at random
const iKey = Math.floor(Math.random() * keys.length);
// (For simplicity we are making the assumption that there will always be at least one key)
const deleteKey = keys[iKey]
// Build a new object, that has the all the properties of the old one, except the property selected for deletion.
const out = {};
keys.forEach(key => {
if (key !== deleteKey) {
out[key] = testFixture[key]
}
})
console.log(out)
Modifying the OP's code, to achieve deep deletion
Great that you have joined in the coding and shown a nearly-complete answer! That is much more likely to engage people's curiosity. I think your code is nearly there: just make sure to do your deletion directly from the target object, otherwise you only delete a temporary variable "target".
Does the below do what you want? (Only one line changed)
var testFixture2 = {
a: [{
b: 1,
c: 2
},
{
b: 2,
c: 2
},
{
b: 3,
c: 2,
d: "bar"
},
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3]
}
},
c: 3,
d: false,
f: "Blah"
};
function getRandomIndex(max) {
return Math.floor(Math.random() * max);
}
function chaosMonkey(thing) {
if (typeof thing === "object") {
console.log("object", Object.keys(thing).length, thing);
const newlyDeformedObject = { ...thing
};
// Make a list of all the keys
const keys = Object.keys(thing);
// Choose one at random
const iKey = getRandomIndex(keys.length);
let target = newlyDeformedObject[keys[iKey]];
const shouldDelete = getRandomIndex(3) === 0;
if (shouldDelete) {
// In this line below, we delete the property from "newlyDeformedObject", not just delete the variable "target"
delete newlyDeformedObject[keys[iKey]];
console.log("Object deleted", keys[iKey]);
} else {
console.log("+++ Going deeper", thing);
newlyDeformedObject[keys[iKey]] = chaosMonkey({ ...newlyDeformedObject[keys[iKey]]
});
}
return newlyDeformedObject;
} else if (typeof thing === "array") {
console.log(array);
const iKey = getRandomIndex(thing.length);
const shouldDelete = getRandomIndex(3) === 0;
if (shouldDelete) {
delete array[iKey];
console.log("Array deleted", iKey);
} else {
array[iKey] = chaosMonkey(array[iKey]);
return array;
}
} else {
//#todo do something bad based on type e.g. number -> NaN, string -> '', but these are less likely to break something
delete thing;
return;
}
}
console.log(JSON.stringify(chaosMonkey(testFixture2), null, 2));
Here is a solution that features whitelisting and blacklisting, considering blacklisting takes priority over whitelisting:
const testFixture = {
a: [{ b: 1 }, { b: 2 }, { b: 3 }],
b: { c: { d: 44, e: "foo", f: [1, 2, 3] } },
c: 3,
d: false,
f: "Blah",
};
const whiteList = [
["a", "2", "b"],
["b", "c", "e"],
];
const blackList = [
["a", "1", "b"],
["b", "c", "d"],
];
// Partial match because if a sub-property is whitelisted, the full path has to remain untouched
const isInWhiteList = (input) =>
whiteList.some((x) =>
input.reduce((acc, cur, i) => cur === x[i] && acc, true)
);
// Exact match
const isInBlackList = (input) =>
blackList.some(
(x) =>
x.length === input.length &&
input.reduce((acc, cur, i) => cur === x[i] && acc, true)
);
const chaosMonkey = (
input,
chanceOfBeingDeleted = 0.2, // Probability of property deletion
deep = true, // Remove only the deepest properties? If set to false, removes intermediate ones as well
path = []
) => {
if (typeof input !== "object") return;
const propsToDelete = [];
const itemsDeletedArr = [];
// Calculate properties to delete
for (const item in input) {
const currentPath = [...path, item];
if (
(isInBlackList(currentPath) ||
(!isInWhiteList(currentPath) &&
Math.random() < chanceOfBeingDeleted)) &&
(!deep || typeof input[item] !== "object")
) {
propsToDelete.push(item);
} else {
const itemsDeleted = chaosMonkey(
input[item],
chanceOfBeingDeleted,
deep,
currentPath
);
itemsDeletedArr.push(itemsDeleted);
}
}
// Delete properties
if (input instanceof Array) {
// Delete indexes in reverse direction to prevent indexes shifting
propsToDelete.reverse().forEach((x) => input.splice(x, 1));
} else {
propsToDelete.forEach((x) => delete input[x]);
}
// Has deleted at least one property?
return (
!!propsToDelete.length ||
itemsDeletedArr.reduce((acc, cur) => acc || cur, false)
);
};
// Optionally pass a chance of being deleted as second parameter
while (!chaosMonkey(testFixture)) {
console.log("chaosMonkey didn't change anything, retrying");
}
console.log(testFixture);
I have a very different solution that depending on your needs might be a clever one or not very useful.
Javascript can define a Proxy to be the interface between an object and whatever script is using that object. For example:
const obj = { a: 2 }
const proxy = new Proxy(obj, {
get: () => 3
})
console.log(obj.a) // 2
console.log(proxy.a) // 3
So I'm proposing a russian doll of proxies so that every time something tries to access any property of your object, deeply nested or not, there is some likelihood to receive undefined instead.
const obj = {
a: [
{ b: 1 },
{ b: 2 },
{ b: 3 },
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3],
},
},
c: 3,
d: false,
f: "Blah",
}
function makeRandomlyInaccessible(object, likelihood) {
const handler = {
get(target, key, receiver) {
if (
key in target
&& Math.random() < likelihood
) {
return undefined
}
const result = Reflect.get(target, key, receiver)
if (typeof result === "object") {
return new Proxy(result, handler)
}
return result
},
}
return new Proxy(object, handler)
}
const monkeyObj = makeRandomlyInaccessible(obj, .1)
console.log(monkeyObj.a[1].b) // 2
console.log(monkeyObj.a[1].b) // 2
console.log(monkeyObj.a[1].b) // undefined
console.log(monkeyObj.a[1].b) // 2
console.log(monkeyObj.a[1].b) // Uncaught TypeError: Cannot read property '1' of undefined
The property get I'm defining in this proxy is called a "trap". This one traps every call to get a property value. But there are many other traps you could play with depending on what you need / want to try.
If you need the object to be stable (meaning that random properties are missing, but if they are found they are always found, and if they are undefined they are always undefined), you could just memoize the results of the traps like this:
function makeRandomlyInaccessible(object, likelihood) {
const trapMap = new Map()
const handler = {
get(target, key) {
if (!trapMap.has(target)) {
trapMap.set(target, {})
}
const traps = trapMap.get(target)
if (key in traps) {
return traps[key]
}
if (
!(key in traps)
&& Math.random() < likelihood
) {
traps[key] = undefined
return undefined
}
const result = target[key]
if (typeof result === "object" && result !== null) {
traps[key] = new Proxy(result, handler)
} else {
traps[key] = result
}
return traps[key]
},
}
return new Proxy(object, handler)
}
This approach allows you to define lists to customize the behavior, where each property is defined as a "path to the property" (for example "b.c.f[1]"), as well as the exact deleteCount.
doNotRemove: properties specified here must not be removed, which implies that all of their parents must not be removed either
mustRemove: properties specified here must be removed
canRemoveButNotModify: properties specified here can be removed, but all of their children must not be removed
This approach could easily be tweaked to use a "delete ratio" instead of an absolute count.
The basic principle is:
we recursively go over the entire object (breadth first) and construct a list of "path to a property", with a JS-like grammar (for example "b.c.f[1]")
we match the obtained list with doNotRemove, mustRemove, and canRemoveButNotModify (thanks to a few very basic utility functions) to get a list of properties to delete
we delete the properties in the list
const obj = {
a: [
{ b: 1 },
{ b: 2 },
{ b: 3 },
],
b: {
c: {
d: 44,
e: "foo",
f: [1, 2, 3],
},
},
c: 3,
d: false,
f: "Blah",
}
deleteRandomProps(obj, {
deleteCount: 5,
doNotRemove: [
'a[1]',
'b.c',
],
mustRemove: [
'a[2]',
'b.c.e',
],
canRemoveButNotModify: [
"b.c.f"
],
})
console.log('result', obj)
function deleteRandomProps(obj, {
deleteCount = 0, // number of deletion
doNotRemove = [], // prevent deletion of property and all its children, of the shape "a[0].b"
mustRemove = [], // force deletion of property, of the shape "a[0].b"
canRemoveButNotModify = [], // can delete property entirely but not modify its children, of the shape "a[0].b"
}) {
// list all possible property paths
const listOfAllPaths = listPaths(obj)
// prevent deletion of doNotRemove items
doNotRemove
.flatMap(path => allPathsThatLeadToPath(path))
.forEach(path => {
// remove all of doNotRemove from full list
removeItemFromList(path, listOfAllPaths)
// remove all of doNotRemove from mustRemove
removeItemFromList(path, mustRemove)
})
// prevent deletion of items that are children of canRemoveButNotModify items
canRemoveButNotModify
.forEach(path => {
// remove from full list
removeChildPaths(path, listOfAllPaths)
// remove from mustRemove
removeChildPaths(path, mustRemove)
})
// remove from list all properties that are children of a property in mustRemove
mustRemove.forEach(path => {
removeItemFromList(path, listOfAllPaths)
removeChildPaths(path, listOfAllPaths)
})
// start from mustRemove and add until deleteCount is reached
const deletions = [...mustRemove]
while (deletions.length < deleteCount && listOfAllPaths.length > 0) {
const path = removeRandomItemFromList(listOfAllPaths)
// remove from list all properties that are children of the one we're deleting
removeItemFromList(path, listOfAllPaths)
removeChildPaths(path, listOfAllPaths)
// remove from deletions all properties that are children of the new one
removeItemFromList(path, deletions)
removeChildPaths(path, deletions)
deletions.push(path)
}
// delete properties from object
console.log('deleting props', deletions)
deletions.forEach(path => {
deleteFromPath(obj, path)
})
return obj
}
// create a list of all possible property paths, of the shape "a[0].b"
function listPaths(obj, prefix = '') {
if (typeof obj !== 'object') {
return []
}
const pureKeys = Object.keys(obj)
const keys = prefix
? pureKeys.map(key => `${prefix}.${key}`)
: pureKeys
const values = Object.values(obj)
const more = []
values.forEach((value, index) => {
if (Array.isArray(value)) {
value.forEach((item, i) => {
const newKey = `${keys[index]}[${i}]`
more.push(newKey)
more.push(...listPaths(item, newKey))
})
} else if (typeof value === 'object') {
more.push(...listPaths(value, keys[index]))
}
})
return [...keys, ...more]
}
// recursively find property based on list of keys, of the shape ["a", "0", "b"]
function findFromArray(obj, array) {
if (array.length === 0) {
return obj
}
const [key, ...rest] = array
if (key in obj) {
return findFromArray(obj[key], rest)
}
}
// turn path into list of keys ("a[0].b" => ["a", "0", "b"])
function pathToParticles(path) {
const regex = /([a-z0-9])/gi
const particles = path.match(regex)
return particles || []
}
// deletes a property based on property path, of the shape "a[0].b"
function deleteFromPath(obj, path) {
const particles = pathToParticles(path)
const last = particles.pop()
const lastObject = findFromArray(obj, particles)
if (lastObject) {
delete lastObject[last]
}
}
// turn path into list of path that lead to it ("a[0].b" => ["a", "a[0]", "a[0].b"])
function allPathsThatLeadToPath(path) {
const regex = /[\[\.]/gi
const list = [path]
let match
while ((match = regex.exec(path)) !== null) {
list.push(path.substring(0, match.index))
}
return list
}
// remove item from an array
function removeItemFromList(item, list) {
const index = list.indexOf(item)
if (index !== -1) {
list.splice(index, 1)
}
return list
}
// remove from list all property paths that are children of a property path
// for example "a[0]" will remove "a[0].b", "a[0].b.c", ... but not "a[0]" itself
function removeChildPaths(path, list) {
for(let i = list.length; i > 0; i--) {
const item = list[i-1]
if (item !== path && item.startsWith(path)) {
list.splice(i-1, 1)
}
}
}
// remove a random prop from a list
function removeRandomItemFromList(list) {
const index = Math.floor(Math.random() * list.length)
const [item] = list.splice(index, 1)
return item
}
I went for a more simplistic approach. My function builds out a list of all key-value pairs and selects one at random each time it runs. For the sake of this example, I log the name of the removed key each time the function runs along with the stringified object, then run it again, until all keys are successfully removed.
Solution #1
const testFixture = {
a: [{b:1},{b:2},{b:3},],
b: {c: {d: 44, e: "foo", f: [1,2,3]}},
c: 3,
d: false,
f: "Blah",
};
const chaosMonkey = obj => {
const getObjKeysPairs = (obj, parent = false) => {
const objKeysPairs = [];
for (const [key, value] of Object.entries(obj)) {
const parentKeyPair = parent ? parent + "." + key : key;
objKeysPairs.push([obj, key, parentKeyPair]);
if (typeof value === 'object' && value !== null) {
objKeysPairs.push(...getObjKeysPairs(value, parentKeyPair));
}
}
return objKeysPairs;
};
const objKeyPairs = getObjKeysPairs(obj),
[object, key, path] = objKeyPairs[Math.floor(Math.random() * objKeyPairs.length)];
console.log("Deleting " + path + " === " + object[key]);
delete object[key];
};
while (Object.keys(testFixture).length) {
chaosMonkey(testFixture);
}
console.log("testFixture:", testFixture);
if (Object.keys(testFixture).length === 0) console.log("testFixture emptied successfully");
This solution works as-is and deletes keys recursively for both arrays and objects. Using the delete operator however, array items are deleted but not entirely removed, leaving an empty value remaining.
This does not throw any errors, even silently, because empty values are not recognized as valid key-value pairs and therefore are not "re-attempted" as values once deleted initially.
NOTE: The below explanation is for the first version of this solution. It is still valuable and so I am not removing it, but please see the older version of this solution to see its relevance.
One JS concept worthy of a deeper explanation is this:
if ([Object, Array].includes(value.constructor)) { /* ... */ }
Every data type in JavaScript is built upon classes. Up until recently, classes were defined using a functional syntax. ES6+ introduced a new class declaration, which makes creating new classes arguably much easier and essentially decorates the classical syntax.
Each class's constructor can be referenced by its name like a variable (without quotes) or by its name as a string. Here is an example of this:
class CustomObject extends Object {
constructor() {
super();
}
}
const myVar = new CustomObject();
myVar.constructor === CustomObject; // true
myVar.constructor.name === "CustomObject"; // true
Another important concept here is the prototypal chain. Every object and type in JavaScript is a prototyped instance of another object, so if you traverse through the prototypal chain you can evaluate any ancestor prototype of any given object, even simple strings or numbers, until you reach the most primitive object which has no prototype and is the ultimate prototype inherited by all types in JavaScript. Here are a few examples of that:
(5).constructor === (5).__proto__?.constructor; // true
(5).__proto__.constructor; // => Number
(5).__proto__.__proto__?.constructor; // => Object
(5).__proto__.__proto__.__proto__?.constructor; // => undefined
"string".constructor === "string".__proto__?.constructor; // true
"string".__proto__.constructor; // => String
"string".__proto__.__proto__?.constructor; // => Object
"string".__proto__.__proto__.__proto__?.constructor; // => undefined
class CustomObject extends Object { constructor() { super(); }}
const myVar = new CustomObject();
myVar.__proto__.constructor; // => CustomObject
myVar.__proto__.__proto__?.constructor; // => Object
myVar.__proto__.__proto__.__proto__?.constructor; // => undefined
In your example, this is all very useful for us for the simple purpose of checking whether the value of one of the object's values, regardless of depth, is a nested object or array. Like the examples above, arrays and objects also have unique constructors. The constructor for arrays is Array and the constructor for objects is Object. I am simply checking to see if the value being evaluated is either one of those types and if so evaluate its own child properties/elements via recursion.
Recursion, if you aren't already familiar with it is the process by which a function calls itself, often passing information allowing the function to reach whatever depth necessary to gather all the information available. This is exactly what we need in your case to build the complete list of all properties in the main object.
Lastly, my purpose in using [Object, Array].includes(...) is simply because is a bit easier than using value.constructor === Object || value.constructor === Array. Using the includes Array.prototype method allows us in this case to check whether the value we are checking, value.constructor is equal to either Object or Array.
There is a spec floating around out there for use of the bitwise OR operator | to do something like this so we can write these more naturally like this: value.constructor === Object | Array. However, this is purely conceptual at this point and may only work in specific use-cases such as XHTTP requests and fetch.
Solution #2
In my second solution below, I add an additional check at the end of the function to test whether object.constructor === Array and if so, I use the splice Array.prototype method to completely remove the array element rather than only deleting it, which leaves an empty value behind at the deleted index.
If however, you would prefer to remove the array items altogether as the function runs, you can do so using the splice array method and simply run the splice conditionally based on the constructor.
Here is how that would work:
const testFixture = {
a: [{b:1},{b:2},{b:3},],
b: {c: {d: 44, e: "foo", f: [1,2,3]}},
c: 3,
d: false,
f: "Blah",
};
const chaosMonkey = obj => {
const getObjKeysPairs = (obj, parent = false) => {
const objKeysPairs = [];
for (const [key, value] of Object.entries(obj)) {
const parentKeyPair = parent ? parent + "." + key : key;
objKeysPairs.push([obj, key, parentKeyPair]);
if (typeof value === 'object' && value !== null) {
objKeysPairs.push(...getObjKeysPairs(value, parentKeyPair));
}
}
return objKeysPairs;
};
const objKeyPairs = getObjKeysPairs(obj),
[object, key, path] = objKeyPairs[Math.floor(Math.random() * objKeyPairs.length)];
console.log("Deleting " + path + " === " + object[key]);
delete object[key];
if (Array.isArray(object)) object.splice(key, 1);
};
while (Object.keys(testFixture).length) {
chaosMonkey(testFixture);
}
console.log("testFixture:", testFixture);
if (Object.keys(testFixture).length === 0) console.log("testFixture emptied successfully");
Here is an answer I'm not entirely happy with. It does randomly remove leaf properties from an object, and it does that fairly well.
But -- and while the question did not specify this, I think it's an interesting possible extension -- this version only removes leaf nodes; it might be preferable to also remove other branches. While I did think through some possibilities here, nothing really seemed to gel. It's not a matter of implementation, but of coming up with good requirements. Do we treat all paths equally? What does it mean to remove a branch when we want to keep some of its subbranches? Should we weight the chances of removal higher for leaves than for heavy-weight branches? And so on.
But, for what this does do, I'm pretty happy with this solution. It basically converts an object to an array of pairs representing paths to a node and that node's value. Then we randomly delete some entries from that array and reconstitute an object from the remaining values.
It looks like this:
// utility functions
const pathEntries = (obj) =>
Object (obj) === obj
? Object .entries (obj) .flatMap (
([k, x]) => pathEntries (x) .map (([p, v]) => [[Array.isArray(obj) ? Number(k) : k, ... p], v])
)
: [[[], obj]]
const setPath = ([p, ...ps]) => (v) => (o) =>
p == undefined ? v : Object .assign (
Array .isArray (o) || Number.isInteger (p) ? [] : {},
{...o, [p]: setPath (ps) (v) ((o || {}) [p])}
)
const hydrate = (xs) =>
xs .reduce ((a, [p, v]) => setPath (p) (v) (a), {})
// helper function
const randomRemove = (xs, count = 1, i = Math .floor (Math .random () * xs .length)) =>
count <= 0
? [...xs]
: randomRemove ([... xs .slice (0, i), ... xs .slice (i + 1)], count - 1)
// main function
const chaosMonkey = (o, count = 1) =>
hydrate (
randomRemove (pathEntries (o), count)
)
// sample data
const testFixture = {a: [{b: 1}, {b: 2}, {b: 3}], b: {c: {d: 44, e: "foo", f: [1, 2, 3]}}, c: 3, d: false, f: "Blah"}
// demo
console .log (chaosMonkey (testFixture, 3))
.as-console-wrapper {max-height: 100% !important; top: 0}
This is built on three utility functions that I've used often on StackOverflow:
pathEntries creates an array of items such as [['a', 2, 'b'], 3], which describes the property b of the element with index 2 in the array stored in the a property of the root object, and notes that it has value 3.
setPath takes a path and value like that and a base object and sets the corresponding value along that path, creating new nodes as needed.
hydrate simply runs setPath for each of an array of such entries, building a new minimal object with those values.
We also create a simple helper function randomRemove which removes random elements from an array. This is a simple recursion on the number of items to remove, returning the original array if the count is zero, otherwise removing one random element and recurring with one less than count.
On top of those, we write our chaosMonkey function, that simply calls pathEntries on the object, calls randomRemove on the result and then hydrate to build a new object.
Besides the large question mentioned above, there are two possible changes I imagine we might consider. Most importantly, this will allow us to remove entries in the middle of our arrays, leaving us with sparse arrays. We may want to avoid that. This is a naive fix for that problem:
const chaosMonkey = (o, count = 1) =>
hydrate (
randomRemove (pathEntries (o) .filter (xs => !Number .isInteger (xs [xs .length])), count)
)
which avoids removing any array elements at all. You can see it in this snippet:
// utility functions
const pathEntries = (obj) =>
Object (obj) === obj
? Object .entries (obj) .flatMap (
([k, x]) => pathEntries (x) .map (([p, v]) => [[Array.isArray(obj) ? Number(k) : k, ... p], v])
)
: [[[], obj]]
const setPath = ([p, ...ps]) => (v) => (o) =>
p == undefined ? v : Object .assign (
Array .isArray (o) || Number.isInteger (p) ? [] : {},
{...o, [p]: setPath (ps) (v) ((o || {}) [p])}
)
const hydrate = (xs) =>
xs .reduce ((a, [p, v]) => setPath (p) (v) (a), {})
// helper function
const randomRemove = (xs, count = 1, i = Math .floor (Math .random () * xs .length)) =>
count <= 0 ? [...xs] : randomRemove ([... xs .slice (0, i), ... xs .slice (i + 1)], count - 1)
// main function
const chaosMonkey = (o, count = 1) =>
hydrate (
randomRemove (pathEntries (o) .filter (xs => !Number .isInteger (xs [xs .length])), count)
)
// sample data
const testFixture = {a: [{b: 1}, {b: 2}, {b: 3}], b: {c: {d: 44, e: "foo", f: [1, 2, 3]}}, c: 3, d: false, f: "Blah"}
// demo
console .log (chaosMonkey (testFixture, 3))
.as-console-wrapper {max-height: 100% !important; top: 0}
We might consider a more sophisticated version of this that actually does delete array entries but doesn't leave the holes. While we could extend this technique to handle that, it might be more effort than it's worth, and perhaps we would instead write a more sophisticated hydrate.
Second, this requires us to specify the number of paths to delete. It seems more in the spirit of the problem to make that number also random, or to consider a fraction of the paths involved, or do something still more interesting. I think these would mostly be simple variants of the above, and I leave that to the reader.
Given two objects with different but known keys - which may or may not be set - what is a good way to combine them into one object using specific keymaps for both?
Say, for example, obj1 can have keys itemA, itemB, and itemC. Another obj2 can have keys item_a, item_b, and item_c, and takes precedence. The keys refer to the same final keys a, b, and c respectively.
This is what I came up with, but it feels unnecessarily convoluted.
// Given these objects:
const obj1 = { itemA: 1, itemB: 2, itemC: undefined }
const obj2 = { item_a: undefined, item_b: 20, item_c: 30 }
// Desired result: { a: 1, b: 20, c: 30 }
const filter = obj => Object.fromEntries(
Object.entries(obj).filter(entry => entry[1])
)
const serialized1 = (object =>
({ itemA:a, itemB:b, itemC:c } = object)
&& filter({a, b, c}))(obj1)
const serialized2 = (object =>
({ item_a:a, item_b:b, item_c:c } = object)
&& filter({a, b, c}))(obj2)
const finalObject = { ...serialized1, ...serialized2 }
console.log(finalObject)
// { a: 1, b: 20, c: 30 }
I'm roughly trying to have a keymap for each object for which foreign keys map to my local keys, which then merge.
Note: The mentioned key names are examples. In reality it can be arbitrary, like one of im-dimensions and img-res being stored in a key called size.
Using Stringify and Parse,we can achieve that
const obj1 = { itemA: 1, itemB: 2 };
const obj2 = { item_b: 20, item_c: 30 };
let format=(obj)=>{
return JSON.parse(JSON.stringify(obj).toLowerCase().replace(/\"(\w*)([a-c]{1}\":)/g,"\"$2"));
}
let obj3={...format(obj1),...format(obj2)}
console.log(obj3);
.as-console-wrapper { max-height: 100% !important; top: 0; }
I found a solution. You can create a map of oddly named keys to their preferred names, and "rename" them into a new object using the ES6 Initializer Specification. Basically that means you can use a variable as a key name like so:
const obj = {
[key]: value
}
So with our map, we can create a new key with the value from our old object:
const obj = {
[map[key]]: obj[key]
}
In order to automate this for every key in an object, we can use the reduce function in conjunction with the spread syntax:
const mapper = (map, obj) =>
Object.keys(obj)
.filter(key => Boolean(obj[key]))
.reduce((acc, key) =>
({
...acc,
...{ [map[key]]: obj[key] }
}), {})
This is a very versatile and reusable function. All we need is to supply the map and the input object. Here's a full example based on the data from the original question:
// Given these objects:
const obj1 = { itemA: 1, itemB: 2, itemC: undefined }
const obj2 = { item_a: undefined, item_b: 20, item_c: 30 }
// Desired result: { a: 1, b: 20, c: 30 }
// ------
// Map keys to predefined attributes
const map = {
itemA: 'a',
item_a: 'a',
itemB: 'b',
item_b: 'b',
itemC: 'c',
item_c: 'c'
}
const mapper = (map, obj) =>
Object.keys(obj)
.filter(key => Boolean(obj[key]))
.reduce((acc, key) =>
({
...acc,
...{ [map[key]]: obj[key] }
}), {})
const keymap = mapper.bind(null, map)
const attributes = { ...keymap(obj1), ...keymap(obj2) }
console.log(attributes)
// { a: 1, b: 20, c: 30 }
This question already has answers here:
One-liner to take some properties from object in ES 6
(12 answers)
Closed 6 years ago.
Using Object rest destructuring is straightforward to blacklist properties of an object, like in the following example:
const original = {
a: 1,
b: 2,
c: 3,
evil: "evil",
ugly: "ugly",
};
const { evil, ugly, ...sanitized } = original;
console.log(sanitized); // prints { a: 1, b: 2, c: 3 }
I wonder if there exists a similar terse way to do the same, but using a white list of properties (in the example: { a, b, c }). Very often, I have to convert a subset of the available properties as JSON, and such a functionality would make the code much more readable and safer.
I found a similar question, but it is not exactly the same problem:
Is there a more terse way to map properties of one object to another in ES6/ES2015?
Edit: It is a pity that the next code doesn't work, as it is returning the original object instead of the filtered one.
const sanitized = {a, b, c} = original;
// sanitized === original
For this purpose I use 2 helper functions
export const pickProps = (object, ...props) => (
props.reduce((a, x) => {
if (object.hasOwnProperty(x)) a[x] = object[x];
return a;
}, {})
);
export const omitProps = (object, ...props) => {
const no = {...object};
props.forEach(p => delete no[p]);
return no;
};
You can also do
const original = {
a: 1,
b: 2,
c: 3,
evil: "evil",
ugly: "ugly",
};
const { a, b, c } = original;
const filtered = { a, b, c };
I don't think your way to "blacklist" is good, because it unnecessarily assigns original.evil to evil, and original.ugly to ugly.
You can try this approach:
const blacklistFilter = (obj, blacklist) => Object.entries(obj)
.filter(([key, value]) => !blacklist.includes(key))
.reduce((obj, [key, value]) => (obj[key] = value, obj), {})
const whitelistFilter = (obj, whitelist) => Object.entries(obj)
.filter(([key, value]) => whitelist.includes(key))
.reduce((obj, [key, value]) => (obj[key] = value, obj), {})
const original = {
a: 1
,b: 2
,c: 3
,evil: 'evil'
,ugly: 'ugly'
}
console.log(blacklistFilter(original, ['evil', 'ugly']))
console.log(whitelistFilter(original, ['a', 'b', 'c']))
Object.entries() returns an array of object's keys and corresponding values in format [key, value], the filter() method filters keys based on whether they aren't blacklisted or whether they are whitelisted, and reduce() method converts the [key, value] array back to an object (similar method as in this answer).