为什么 Javascript "await" 没有等待正确的时间?
Why doesn't Javascript "await" wait for the correct amount of time?
此代码未按预期运行:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i++) {
console.log(diff(start + i * 1000) + `: Waited ${i} seconds...`);
await sleep(i * 1000);
}
console.log(diff(start + loop_count * 1000) + ': Done');
}
demo();
输出为:
$ node test.js
just in time: Waited 0 seconds...
earlier 993 ms: Waited 1 seconds...
earlier 993 ms: Waited 2 seconds...
late 10 ms: Waited 3 seconds...
late 2011 ms: Waited 4 seconds...
late 5013 ms: Done
这种差异从何而来?
Where does this difference come from?
因为你测错了。首先,您在 await sleep(i * 1000);
之前执行 console.log(diff(start + i * 1000) + `: Waited ${i} seconds...`);
。此测试必须在 sleep
.
之后
另一个问题是您没有更改 start
值,并且您始终使用相同的 start
值作为每次迭代的参考值。
所以在 i>1
之后你会得到错误的结果:你已经等了 1
秒,所以 i=2
从 start
开始的总时间是 (1 + 2)
秒,从 i=3
到 start
经过的总时间是 (1 + 2 + 3)
秒,...。
更新代码,使之前睡眠的秒数也包含在计算中,得到预期结果:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
function sumUp(num) {
let res = 0;
for(let i=0; i<=num ; i++) {
res += i;
}
return res;
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i++) {
await sleep(i * 1000);
console.log(diff(start + sumUp(i) * 1000) + `: Waited ${i} seconds...`);
}
console.log(diff(start + sumUp(loop_count) * 1000) + ': Done');
}
demo();
结果将始终是 late
,因为 setTimeout
将等待 至少 给定的时间。对于您测量错误的方式,每个调用的计时器都会堆积起来。
此代码未按预期运行:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i++) {
console.log(diff(start + i * 1000) + `: Waited ${i} seconds...`);
await sleep(i * 1000);
}
console.log(diff(start + loop_count * 1000) + ': Done');
}
demo();
输出为:
$ node test.js
just in time: Waited 0 seconds...
earlier 993 ms: Waited 1 seconds...
earlier 993 ms: Waited 2 seconds...
late 10 ms: Waited 3 seconds...
late 2011 ms: Waited 4 seconds...
late 5013 ms: Done
这种差异从何而来?
Where does this difference come from?
因为你测错了。首先,您在 await sleep(i * 1000);
之前执行 console.log(diff(start + i * 1000) + `: Waited ${i} seconds...`);
。此测试必须在 sleep
.
另一个问题是您没有更改 start
值,并且您始终使用相同的 start
值作为每次迭代的参考值。
所以在 i>1
之后你会得到错误的结果:你已经等了 1
秒,所以 i=2
从 start
开始的总时间是 (1 + 2)
秒,从 i=3
到 start
经过的总时间是 (1 + 2 + 3)
秒,...。
更新代码,使之前睡眠的秒数也包含在计算中,得到预期结果:
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
function diff(expected) {
let x = expected - Date.now();
if (x > 0) {
return `earlier ${x} ms`;
} else if (x < 0) {
return `late ${-x} ms`;
} else {
return `just in time`;
}
}
function sumUp(num) {
let res = 0;
for(let i=0; i<=num ; i++) {
res += i;
}
return res;
}
start = Date.now();
async function demo() {
let loop_count = 5;
for (let i = 0; i < loop_count; i++) {
await sleep(i * 1000);
console.log(diff(start + sumUp(i) * 1000) + `: Waited ${i} seconds...`);
}
console.log(diff(start + sumUp(loop_count) * 1000) + ': Done');
}
demo();
结果将始终是 late
,因为 setTimeout
将等待 至少 给定的时间。对于您测量错误的方式,每个调用的计时器都会堆积起来。