JavaScript `降低` 性能
JavaScript `reduce` performance
我最近花了一些时间研究传感器(函数式编程中的工具旨在提高性能而不丢失 readability/flexibility 代码),当我开始测试它们的实际速度时,我得到了一些非常令人失望的结果结果。考虑:
const inc = x => x + 1;
const isEven = x => x % 2 === 0;
// simplest, shortest way I would be comfortable with if performance wasn't an issue
const mapFilter = xs => xs.filter(isEven).map(inc);
// transducers way
// function composition
const compose = (...fns) => x => fns.reduceRight((y, f) => f(y), x);
const map = f => step => (a, c) => step(a, f(c));
const filter = p => step => (a, c) => (p(c) ? step(a, c) : a);
// basic reducer for building array
const build = (acc, x) => {
acc.push(x);
return acc;
};
// transducer, it doesn't create intermediate arrays hence should theoretically be faster
const transducers = xs =>
xs.reduce(compose(filter(isEven), map(inc))(build), []);
// native loop for comparison
const nativeLoop = data => {
const result = [];
const l = data.length;
for (let i = 0; i < l; i++) {
const x = data[i];
if (isEven(x)) result.push(inc(x));
}
return result;
};
const data = Array(1000).fill(1);
const base = ["simplest, chained map and filter", () => mapFilter(data)];
const alternative = ["composed transducers", () => transducers(data)];
const alternative2 = ["native loop", () => nativeLoop(data)];
/* console.log(Benchmark) */
console.log("Running benchmarks....");
const suite = new Benchmark.Suite();
suite
.add(...base)
.add(...alternative)
.add(...alternative2)
.on("cycle", function(event) {
console.log(String(event.target));
})
.on("complete", function() {
console.log("Fastest is " + this.filter("fastest").map("name").join(", "));
})
// run async
.run({ async: true });
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/benchmark/2.1.4/benchmark.min.js"></script>
我希望表演的顺序是
原生循环 > 传感器 > 链式 map/filter
与此同时,除了比其他方法快得多的原生方法外,令我非常惊讶的是,reduce/transduce 方法比使用 map/filter 慢得多,并且创建中间数组(更慢,就像Chrome中的一个数量级)。有人可以向我解释一下这个结果的来源吗?
您的基准测试是错误的,因为您在每个 运行 上构建了一个新的换能器链。
const inc = x => x + 1;
const isEven = x => x % 2 === 0;
// simplest, shortest way I would be comfortable with if performance wasn't an issue
const mapFilter = xs => xs.filter(isEven).map(inc);
// transducers way
// function composition
const compose = (...fns) => x => fns.reduceRight((y, f) => f(y), x);
const map = f => step => (a, c) => step(a, f(c));
const filter = p => step => (a, c) => (p(c) ? step(a, c) : a);
// basic reducer for building array
const build = (acc, x) => {
acc.push(x);
return acc;
};
// transducer, it doesn't create intermediate arrays hence should theoretically be faster
const reducer = compose(filter(isEven), map(inc))(build);
const transducers = xs => xs.reduce(reducer, []);
// native loop for comparison
const nativeLoop = data => {
const result = [];
const l = data.length;
for (let i = 0; i < l; i++) {
const x = data[i];
if (isEven(x)) result.push(inc(x));
}
return result;
};
const data = Array(1000).fill(1);
const base = ["simplest, chained map and filter", () => mapFilter(data)];
const alternative = ["composed transducers", () => transducers(data)];
const alternative2 = ["native loop", () => nativeLoop(data)];
/* console.log(Benchmark) */
console.log("Running benchmarks....");
const suite = new Benchmark.Suite();
suite
.add(...base)
.add(...alternative)
.add(...alternative2)
.on("cycle", function(event) {
console.log(String(event.target));
})
.on("complete", function() {
console.log("Fastest is " + this.filter("fastest").map("name").join(", "));
})
// run async
.run({ async: true });
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/benchmark/2.1.4/benchmark.min.js"></script>
如您所见,转换器确实比链式 map
和 filter
方法更快。
基准测试有缺陷。
reducer 不需要做任何工作。
- 创建一个奇数为 1 的统一数组。
- 然后 运行 每个元素的 isEven 函数
- 总是去 return 一个空数组
我们正在对 return空数组的性能进行基准测试。
如果我们用真实数据预填充数组,本地方法将获胜。
不过 Aadit 是正确的,他的传感器是两个传感器实现中最快的。
const data = [];
for (let i = 0; i < 1000; i++) {
data.push(Math.floor(Math.random() * 10));
}
我最近花了一些时间研究传感器(函数式编程中的工具旨在提高性能而不丢失 readability/flexibility 代码),当我开始测试它们的实际速度时,我得到了一些非常令人失望的结果结果。考虑:
const inc = x => x + 1;
const isEven = x => x % 2 === 0;
// simplest, shortest way I would be comfortable with if performance wasn't an issue
const mapFilter = xs => xs.filter(isEven).map(inc);
// transducers way
// function composition
const compose = (...fns) => x => fns.reduceRight((y, f) => f(y), x);
const map = f => step => (a, c) => step(a, f(c));
const filter = p => step => (a, c) => (p(c) ? step(a, c) : a);
// basic reducer for building array
const build = (acc, x) => {
acc.push(x);
return acc;
};
// transducer, it doesn't create intermediate arrays hence should theoretically be faster
const transducers = xs =>
xs.reduce(compose(filter(isEven), map(inc))(build), []);
// native loop for comparison
const nativeLoop = data => {
const result = [];
const l = data.length;
for (let i = 0; i < l; i++) {
const x = data[i];
if (isEven(x)) result.push(inc(x));
}
return result;
};
const data = Array(1000).fill(1);
const base = ["simplest, chained map and filter", () => mapFilter(data)];
const alternative = ["composed transducers", () => transducers(data)];
const alternative2 = ["native loop", () => nativeLoop(data)];
/* console.log(Benchmark) */
console.log("Running benchmarks....");
const suite = new Benchmark.Suite();
suite
.add(...base)
.add(...alternative)
.add(...alternative2)
.on("cycle", function(event) {
console.log(String(event.target));
})
.on("complete", function() {
console.log("Fastest is " + this.filter("fastest").map("name").join(", "));
})
// run async
.run({ async: true });
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/benchmark/2.1.4/benchmark.min.js"></script>
我希望表演的顺序是
原生循环 > 传感器 > 链式 map/filter
与此同时,除了比其他方法快得多的原生方法外,令我非常惊讶的是,reduce/transduce 方法比使用 map/filter 慢得多,并且创建中间数组(更慢,就像Chrome中的一个数量级)。有人可以向我解释一下这个结果的来源吗?
您的基准测试是错误的,因为您在每个 运行 上构建了一个新的换能器链。
const inc = x => x + 1;
const isEven = x => x % 2 === 0;
// simplest, shortest way I would be comfortable with if performance wasn't an issue
const mapFilter = xs => xs.filter(isEven).map(inc);
// transducers way
// function composition
const compose = (...fns) => x => fns.reduceRight((y, f) => f(y), x);
const map = f => step => (a, c) => step(a, f(c));
const filter = p => step => (a, c) => (p(c) ? step(a, c) : a);
// basic reducer for building array
const build = (acc, x) => {
acc.push(x);
return acc;
};
// transducer, it doesn't create intermediate arrays hence should theoretically be faster
const reducer = compose(filter(isEven), map(inc))(build);
const transducers = xs => xs.reduce(reducer, []);
// native loop for comparison
const nativeLoop = data => {
const result = [];
const l = data.length;
for (let i = 0; i < l; i++) {
const x = data[i];
if (isEven(x)) result.push(inc(x));
}
return result;
};
const data = Array(1000).fill(1);
const base = ["simplest, chained map and filter", () => mapFilter(data)];
const alternative = ["composed transducers", () => transducers(data)];
const alternative2 = ["native loop", () => nativeLoop(data)];
/* console.log(Benchmark) */
console.log("Running benchmarks....");
const suite = new Benchmark.Suite();
suite
.add(...base)
.add(...alternative)
.add(...alternative2)
.on("cycle", function(event) {
console.log(String(event.target));
})
.on("complete", function() {
console.log("Fastest is " + this.filter("fastest").map("name").join(", "));
})
// run async
.run({ async: true });
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/benchmark/2.1.4/benchmark.min.js"></script>
如您所见,转换器确实比链式 map
和 filter
方法更快。
基准测试有缺陷。 reducer 不需要做任何工作。
- 创建一个奇数为 1 的统一数组。
- 然后 运行 每个元素的 isEven 函数
- 总是去 return 一个空数组
我们正在对 return空数组的性能进行基准测试。
如果我们用真实数据预填充数组,本地方法将获胜。 不过 Aadit 是正确的,他的传感器是两个传感器实现中最快的。
const data = [];
for (let i = 0; i < 1000; i++) {
data.push(Math.floor(Math.random() * 10));
}