使用 Gulp 将资产上传到 s3
Uploading Assets to s3 using Gulp
我目前正在尝试使用 aws-sdk 和 gulp 将网站的资产上传到 amazon s3,但现在我只是使用以下代码实现了上传单个文件:
gulp.task('publish', function() {
var AWS = require('aws-sdk'),
fs = require('fs');
AWS.config.accessKeyId = 'access_id';
AWS.config.secretAccessKey = 'secret_key';
AWS.config.region = 'eu-central-1';
var fileStream = fs.createReadStream('folder/filename');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'bucket_name',
Key: 'assets/filename',
Body: fileStream,
ACL:'public-read'
}, function (err) {
if (err) { throw err; }
else { console.log("Upload successfull"); }
});
});
});
由于我既不是 node.js 也不是 JS 开发者,所以我不知道如何将我的所有资产上传到 S3 的资产文件夹中。
理想情况下,应用我用来上传一个文件的操作,但对于每个文件来说会很整洁。这怎么可能?
找到了我的问题的解决方案。使用此代码,我最终成功地将我所有的资产上传到我的存储桶中,并使用了正确的 ACL。希望这可以帮助人们不要像我一样花那么多时间在这样一个愚蠢的问题上。
/*
* Dependencies
*/
var gulp = require('gulp');
var AWS = require('aws-sdk');
var fs = require('fs');
var walk = require('walk');
/*
* Declaration of global variables
*/
var isPaused = false;
/*
* Bucket access informations
*/
AWS.config.accessKeyId = 'access_keyid'
AWS.config.secretAccessKey = 'secret_access_key'
AWS.config.region = 'region';
/*
* Publishing function: uses a stream to push the files on the AWS Bucket
*/
function publishit(filename) {
var file = filename.substring('./'.length);
var key = file.substring('src/'.length);
var fileStream = fs.createReadStream(file);
isPaused = true;
// Check if there is an error on the file
fileStream.on('error', function (err) {
if (err) { throw err; }
});
// Action to do on opening of the file
fileStream.on('open', function () {
var s3 = new AWS.S3();
// Uploading the stream to the bucket
s3.putObject({
Bucket: 'bucket_name',
Key: key,
Body: fileStream,
ACL:'public-read'
}, function (err) {
// Show the error if there is any
if (err) { throw err; }
// If everything went successfully, print which file is being uploaded
else { console.log("Uploading asset "+ file); }
// Closing the stream to avoid leaks and socket timeouts
fileStream.close();
// Changing the status of 'isPaused' to false to continue uploading the other assets
isPaused = false;
});
});
}
gulp.task('assets', function() {
var files = [];
// Walker options (first arg is the folder you want to upload)
var walker = walk.walk('./assets', { followLinks: false });
walker.on('file', function(root, stat, next) {
// Add this file to the list of files
files.push(root + '/' + stat.name);
next();
});
// Action after every file has been added to 'files'
walker.on('end', function() {
for (var filename in files){
// Publish every file added to 'files'
publishit(files[filename]);
// Wait for one push on the server to be done before calling the next one
function waitForIt(){
if (isPaused) {
setTimeout(function(){waitForIt()},100);
}
};
};
});
});
我目前正在尝试使用 aws-sdk 和 gulp 将网站的资产上传到 amazon s3,但现在我只是使用以下代码实现了上传单个文件:
gulp.task('publish', function() {
var AWS = require('aws-sdk'),
fs = require('fs');
AWS.config.accessKeyId = 'access_id';
AWS.config.secretAccessKey = 'secret_key';
AWS.config.region = 'eu-central-1';
var fileStream = fs.createReadStream('folder/filename');
fileStream.on('error', function (err) {
if (err) { throw err; }
});
fileStream.on('open', function () {
var s3 = new AWS.S3();
s3.putObject({
Bucket: 'bucket_name',
Key: 'assets/filename',
Body: fileStream,
ACL:'public-read'
}, function (err) {
if (err) { throw err; }
else { console.log("Upload successfull"); }
});
});
});
由于我既不是 node.js 也不是 JS 开发者,所以我不知道如何将我的所有资产上传到 S3 的资产文件夹中。
理想情况下,应用我用来上传一个文件的操作,但对于每个文件来说会很整洁。这怎么可能?
找到了我的问题的解决方案。使用此代码,我最终成功地将我所有的资产上传到我的存储桶中,并使用了正确的 ACL。希望这可以帮助人们不要像我一样花那么多时间在这样一个愚蠢的问题上。
/*
* Dependencies
*/
var gulp = require('gulp');
var AWS = require('aws-sdk');
var fs = require('fs');
var walk = require('walk');
/*
* Declaration of global variables
*/
var isPaused = false;
/*
* Bucket access informations
*/
AWS.config.accessKeyId = 'access_keyid'
AWS.config.secretAccessKey = 'secret_access_key'
AWS.config.region = 'region';
/*
* Publishing function: uses a stream to push the files on the AWS Bucket
*/
function publishit(filename) {
var file = filename.substring('./'.length);
var key = file.substring('src/'.length);
var fileStream = fs.createReadStream(file);
isPaused = true;
// Check if there is an error on the file
fileStream.on('error', function (err) {
if (err) { throw err; }
});
// Action to do on opening of the file
fileStream.on('open', function () {
var s3 = new AWS.S3();
// Uploading the stream to the bucket
s3.putObject({
Bucket: 'bucket_name',
Key: key,
Body: fileStream,
ACL:'public-read'
}, function (err) {
// Show the error if there is any
if (err) { throw err; }
// If everything went successfully, print which file is being uploaded
else { console.log("Uploading asset "+ file); }
// Closing the stream to avoid leaks and socket timeouts
fileStream.close();
// Changing the status of 'isPaused' to false to continue uploading the other assets
isPaused = false;
});
});
}
gulp.task('assets', function() {
var files = [];
// Walker options (first arg is the folder you want to upload)
var walker = walk.walk('./assets', { followLinks: false });
walker.on('file', function(root, stat, next) {
// Add this file to the list of files
files.push(root + '/' + stat.name);
next();
});
// Action after every file has been added to 'files'
walker.on('end', function() {
for (var filename in files){
// Publish every file added to 'files'
publishit(files[filename]);
// Wait for one push on the server to be done before calling the next one
function waitForIt(){
if (isPaused) {
setTimeout(function(){waitForIt()},100);
}
};
};
});
});