putObject() 在大于 ~1MB 的文件上超时
putObject() times out on files larger than ~1MB
尝试使用 putObject()
函数将 CSV 文件上传到我的 S3 存储桶时,如果文件大于大约 1 MB,它将超时。我能够成功上传的最大 CSV 文件是 1048 KB(25500 行),我尝试上传它的 6 次中只有 4 次成功。任何小于 1 MB 的文件都可以非常快速地上传而不会失败。一旦达到 ~1 MB 阈值,什么会导致函数挂起并超时?
相关代码如下:
function writeToS3(){
var params = {
Bucket: bucketName,
Key: fileName,
Body: fs.createReadStream(fileName)
}
s3.putObject(params, function(err, data){
if(err){
console.log('Error uploading data to S3 bucket: ' + err);
throw err;
} else{
console.log("Data uploaded to S3 bucket: ", util.inspect(data));
}
});
}
错误信息如下:
Error uploading data to S3 bucket: TimeoutError: Connection timed out after 120000ms
C:\Users6782\documents\workspace-sts\lf-rti-file-copier-sql\node_modules\aws-sdk-proxy\node_modules\aws-sdk\lib\request.js:31
throw err;
^
TimeoutError: Connection timed out after 120000ms
at ClientRequest.<anonymous> (C:\Users6782\documents\workspace-sts\lf-rti-file-copier-sql\node_modules\aws-sdk-proxy\node_modules\aws-sdk\lib\http\node.js:83:34)
at ClientRequest.g (events.js:292:16)
at emitNone (events.js:86:13)
at ClientRequest.emit (events.js:185:7)
at Socket.emitTimeout (_http_client.js:630:10)
at Socket.g (events.js:292:16)
at emitNone (events.js:86:13)
at Socket.emit (events.js:185:7)
at Socket._onTimeout (net.js:338:8)
at ontimeout (timers.js:386:11)
at tryOnTimeout (timers.js:250:5)
at Timer.listOnTimeout (timers.js:214:5)
npm ERR! Windows_NT 10.0.14393
npm ERR! argv "C:\Program Files\nodejs\node.exe" "C:\Program Files\nodejs\node_modules\npm\bin\npm-cli.js" "run" "test" "65056"
npm ERR! node v6.11.4
npm ERR! npm v3.10.10
npm ERR! code ELIFECYCLE
npm ERR! lf-rti-file-copier@1.0.0 test: `serverless invoke local -f dataRefresh -s dev -d "65056"`
npm ERR! Exit status 1
npm ERR!
npm ERR! Failed at the lf-rti-file-copier@1.0.0 test script 'serverless invoke local -f dataRefresh -s dev -d "65056"'.
npm ERR! Make sure you have the latest version of node.js and npm installed.
npm ERR! If you do, this is most likely a problem with the lf-rti-file-copier package,
npm ERR! not with npm itself.
npm ERR! Tell the author that this fails on your system:
npm ERR! serverless invoke local -f dataRefresh -s dev -d "65056"
npm ERR! You can get information on how to open an issue for this project with:
npm ERR! npm bugs lf-rti-file-copier
npm ERR! Or if that isn't available, you can get their info via:
npm ERR! npm owner ls lf-rti-file-copier
npm ERR! There is likely additional logging output above.
我遇到了类似的问题,建议你试试这个方法upload.
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Uploads an arbitrarily sized buffer, blob, or stream, using
intelligent concurrent handling of parts if the payload is large
enough.
这是一个如何转换您的代码的示例:
function writeToS3() {
fs.readFile(fileName, 'utf8', function (err, contents) {
if (err) {
console.log('Error during reading file: ' + err);
throw err;
} else {
// Create a binary data from stream
var base64data = Buffer.from(contents, 'binary')
var params = {
Bucket: bucketName,
Key: fileName,
Body: base64data
}
s3.upload(params, function (err, data) {
if (err) {
console.log('Error uploading data to S3 bucket: ' + err);
throw err;
} else {
console.log("Data uploaded to S3 bucket: ", util.inspect(data));
}
});
}
});
}
更新:
另外,请看一下Bluebird
库method,这将帮助您将所有方法转换为promises 以使其更具可读性。
更改超时
Lambda 控制面板中有一个地方可以更改 Timeout
这是它的屏幕截图。请改成5分钟。
尝试使用 putObject()
函数将 CSV 文件上传到我的 S3 存储桶时,如果文件大于大约 1 MB,它将超时。我能够成功上传的最大 CSV 文件是 1048 KB(25500 行),我尝试上传它的 6 次中只有 4 次成功。任何小于 1 MB 的文件都可以非常快速地上传而不会失败。一旦达到 ~1 MB 阈值,什么会导致函数挂起并超时?
相关代码如下:
function writeToS3(){
var params = {
Bucket: bucketName,
Key: fileName,
Body: fs.createReadStream(fileName)
}
s3.putObject(params, function(err, data){
if(err){
console.log('Error uploading data to S3 bucket: ' + err);
throw err;
} else{
console.log("Data uploaded to S3 bucket: ", util.inspect(data));
}
});
}
错误信息如下:
Error uploading data to S3 bucket: TimeoutError: Connection timed out after 120000ms
C:\Users6782\documents\workspace-sts\lf-rti-file-copier-sql\node_modules\aws-sdk-proxy\node_modules\aws-sdk\lib\request.js:31
throw err;
^
TimeoutError: Connection timed out after 120000ms
at ClientRequest.<anonymous> (C:\Users6782\documents\workspace-sts\lf-rti-file-copier-sql\node_modules\aws-sdk-proxy\node_modules\aws-sdk\lib\http\node.js:83:34)
at ClientRequest.g (events.js:292:16)
at emitNone (events.js:86:13)
at ClientRequest.emit (events.js:185:7)
at Socket.emitTimeout (_http_client.js:630:10)
at Socket.g (events.js:292:16)
at emitNone (events.js:86:13)
at Socket.emit (events.js:185:7)
at Socket._onTimeout (net.js:338:8)
at ontimeout (timers.js:386:11)
at tryOnTimeout (timers.js:250:5)
at Timer.listOnTimeout (timers.js:214:5)
npm ERR! Windows_NT 10.0.14393
npm ERR! argv "C:\Program Files\nodejs\node.exe" "C:\Program Files\nodejs\node_modules\npm\bin\npm-cli.js" "run" "test" "65056"
npm ERR! node v6.11.4
npm ERR! npm v3.10.10
npm ERR! code ELIFECYCLE
npm ERR! lf-rti-file-copier@1.0.0 test: `serverless invoke local -f dataRefresh -s dev -d "65056"`
npm ERR! Exit status 1
npm ERR!
npm ERR! Failed at the lf-rti-file-copier@1.0.0 test script 'serverless invoke local -f dataRefresh -s dev -d "65056"'.
npm ERR! Make sure you have the latest version of node.js and npm installed.
npm ERR! If you do, this is most likely a problem with the lf-rti-file-copier package,
npm ERR! not with npm itself.
npm ERR! Tell the author that this fails on your system:
npm ERR! serverless invoke local -f dataRefresh -s dev -d "65056"
npm ERR! You can get information on how to open an issue for this project with:
npm ERR! npm bugs lf-rti-file-copier
npm ERR! Or if that isn't available, you can get their info via:
npm ERR! npm owner ls lf-rti-file-copier
npm ERR! There is likely additional logging output above.
我遇到了类似的问题,建议你试试这个方法upload.
https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
Uploads an arbitrarily sized buffer, blob, or stream, using intelligent concurrent handling of parts if the payload is large enough.
这是一个如何转换您的代码的示例:
function writeToS3() {
fs.readFile(fileName, 'utf8', function (err, contents) {
if (err) {
console.log('Error during reading file: ' + err);
throw err;
} else {
// Create a binary data from stream
var base64data = Buffer.from(contents, 'binary')
var params = {
Bucket: bucketName,
Key: fileName,
Body: base64data
}
s3.upload(params, function (err, data) {
if (err) {
console.log('Error uploading data to S3 bucket: ' + err);
throw err;
} else {
console.log("Data uploaded to S3 bucket: ", util.inspect(data));
}
});
}
});
}
更新:
另外,请看一下Bluebird
库method,这将帮助您将所有方法转换为promises 以使其更具可读性。
更改超时
Lambda 控制面板中有一个地方可以更改 Timeout
这是它的屏幕截图。请改成5分钟。