If we wanted to implement a good copy function, it is important to understand that in the first version:
{ ...memo, [key]: target[key] }
Are you copying memo
for this new temporary object in each iteration and throwing it away. While:
base[prop] = value
It’s just a common assignment.
Since this is in a loop in reduce, the accumulated result is copied again and again in all iterations.
In the first iteration the accumulator has 0 properties, so there is a cost 0, but in the following there is a cost:
1 + 2 + 3 + 4 + ... + n = n?
Where n
is the number of keys.
This sum is like an "additive factorial" and was called "thermal function" by Donald Knuth. It can also be written as the sum of terms in an arithmetic progression:
n? = ( n ^ 2 + n ) / 2
So the first version has complexity quadratic O(n 2) depending on the number of keys. While the second has complexity linear o(n) depending on the number of keys.
In practice, the copy of the first form (spread in the loop) will always be slower even for a small number of keys and more and more the more keys there are.
I wrote the following script to test the effects on my own computer:
let r;
const properlyTypedObjectWith8Keys = {
key7: false,
key1: "here",
key2: "there",
key3: "yey",
key4: new String(),
key5: 123,
key6: /polymorphic/,
key8: {}
};
const numKeys = 10e6;
const propertyValue = "stuff here";
const nonProperlyTypedObjectWith8Keys = {};
for (let i = 0; i < numKeys; i++) {
nonProperlyTypedObjectWith8Keys[
"random-key" + Math.round(Math.random() * 1000)
] = propertyValue;
}
function copyingMerge(o) {
return function run(base, prop) {
return { ...base, [prop]: o[prop] };
};
}
function nonCopyingMerge(o) {
return function run(base, prop) {
base[prop] = o[prop];
return base;
};
}
function runMerge(object, merge) {
return Object.keys(object).reduce(merge(object), {});
}
function assert(bool) {
if (!bool) {
throw new Error("assertion failed");
}
}
function testCopyingMerge() {
const result = runMerge(properlyTypedObjectWith8Keys, copyingMerge);
const resultKeys = Object.keys(result);
assert(resultKeys.length === 8);
resultKeys.forEach(key => {
assert(result[key] === properlyTypedObjectWith8Keys[key]);
});
}
function testNonCopyingMerge() {
const result = runMerge(properlyTypedObjectWith8Keys, nonCopyingMerge);
const resultKeys = Object.keys(result);
assert(resultKeys.length === 8);
resultKeys.forEach(key => {
assert(result[key] === properlyTypedObjectWith8Keys[key]);
});
}
function testCopyingMergeWithUntyped() {
const result = runMerge(nonProperlyTypedObjectWith8Keys, copyingMerge);
const resultKeys = Object.keys(result);
assert(
resultKeys.length === Object.keys(nonProperlyTypedObjectWith8Keys).length
);
resultKeys.forEach(key => {
assert(result[key] === nonProperlyTypedObjectWith8Keys[key]);
});
}
function testNonCopyingMergeWithUntyped() {
const result = runMerge(nonProperlyTypedObjectWith8Keys, nonCopyingMerge);
const resultKeys = Object.keys(result);
assert(
resultKeys.length === Object.keys(nonProperlyTypedObjectWith8Keys).length
);
resultKeys.forEach(key => {
assert(result[key] === nonProperlyTypedObjectWith8Keys[key]);
});
}
function runBenchmark(iterations, name, fn) {
const start = Date.now();
try {
for (let i = 0; i < iterations; i++) {
fn();
}
} catch (err) {
console.error(name, "failed", err.stack);
throw err;
}
const end = Date.now();
const el = document.querySelector('#' + name);
el.innerText = [
name,
"levou",
((end - start) / iterations) + 'ms',
"em média"
].join(' ');
}
runBenchmark(1e6, "copying-result", testCopyingMerge);
runBenchmark(1e6, "noncopying-result", testNonCopyingMerge);
// This is so slow that it can't run a million times.
runBenchmark(100, "copying-result-big", testCopyingMergeWithUntyped);
runBenchmark(100, "noncopying-result-big", testNonCopyingMergeWithUntyped);
body {
font-family: sans-serif;
}
<h1>Com operador spread, 8 chaves</h1>
<div id="copying-result"></div>
<h1>Sem operador spread, 8 chaves</h1>
<div id="noncopying-result"></div>
<h1>Com operador spread, 1 milhão de chaves</h1>
<div id="copying-result-big"></div>
<h1>Sem operador spread, 1 milhão de chaves</h1>
<div id="noncopying-result-big"></div>
It is executable in your own browser.
With 8 keys there is a difference of ~5x. With 10e6 keys here there is ~150x difference.
Whether it matters or not to your general use of building objects will depend on. Usually there are not so many problems in copying objects, even millions of copies should be reasonably cheap with few keys.
However, in the context of writing a good-quality copy function, there seems to be no reason to use a quadratic function rather than a linear one. whereas both are equally legible.
The question starts from mistaken premises. The two codes of the upper half are not equivalent, although they do "similar" things. The first makes shallow copy, the second not exactly. Already the two of the bottom also make shallow copies, neither does deep copy (this only with recursion). The only difference between the two functions is that the use of the spread is totally useless in the first. The comparison should be spread without reduce (as in
const copy = { ...object };)
, vs reduce without spread.– bfavaretto
Hello, @bfavaretto, I agree yes. The example is a shallow copy and only illustrates the cost of the quadratic copy. So I mention that the proper way to write this would be just with a spread or assign. The second has exactly the same output as the first even though it is less efficient.
– yamadapc
– yamadapc
I didn’t quite understand your argument, the test I would do is this: https://jsperf.com/shallow-copy-spread-vs-reduce
– bfavaretto
Yes, but in the discussion there was manipulation.
– yamadapc
Your example is different. The question is given the possibility of manipulation which is the most efficient way to copy the object.
– yamadapc