This plugin will upload all built assets to s3
$ npm i webpack-s3-plugin
Note: This plugin needs NodeJS > 0.12.0
I notice a lot of people are setting the directory option when the files are part of their build. Please don't set directory if your uploading your build. Using the directory option reads the files after compilation to upload instead of from the build process.
You can also use a credentials file from AWS.
var S3Plugin = require('webpack-s3-plugin')
var config = {
plugins: [
new S3Plugin({
// Exclude uploading of html
exclude: /.*\.html$/,
// s3Options are required
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: 'us-west-1'
},
s3UploadOptions: {
Bucket: 'MyBucket'
},
cdnizerOptions: {
defaultCDNBase: 'http://asdf.ca'
}
})
]
}
var config = {
plugins: [
new S3Plugin({
// Only upload css and js
include: /.*\.(css|js)/,
// s3Options are required
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'MyBucket'
}
})
]
}
import gitsha from 'gitsha'
var addSha = function() {
return new Promise(function(resolve, reject) {
gitsha(__dirname, function(error, output) {
if(error)
reject(error)
else
// resolve to first 5 characters of sha
resolve(output.slice(0, 5))
})
})
}
var config = {
plugins: [
new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'MyBucket'
},
basePathTransform: addSha
})
]
}
// Will output to /${mySha}/${fileName}
var config = {
plugins: [
new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'MyBucket'
},
cloudfrontInvalidateOptions: {
DistributionId: process.env.CLOUDFRONT_DISTRIBUTION_ID,
Items: ["/*"]
}
})
]
}
var config = {
plugins: [
new S3Plugin({
s3Options: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
},
s3UploadOptions: {
Bucket: 'MyBucket',
ContentEncoding(fileName) {
if (/\.gz/.test(fileName))
return 'gzip'
},
ContentType(fileName) {
if (/\.js/.test(fileName))
return 'application/javascript'
else
return 'text/plain'
}
}
})
]
}
exclude
: Regex to match for excluded contentinclude
: Regex to match for included contents3Options
: Provide keys for upload extention of s3Configs3UploadOptions
: Provide upload options putObjectbasePath
: Provide the namespace where upload files on S3directory
: Provide a directory to upload (if not supplied will upload js/css from compilation)htmlFiles
: Html files to cdnize (defaults to all in output directory)cdnizerCss
: Config for css cdnizer check belownoCdnizer
: Disable cdnizer (defaults true if no cdnizerOptions passed)cdnizerOptions
: options to pass to cdnizerbasePathTransform
: transform the base path to add a folder name. Can return a promise or a stringprogress
: Enable progress bar (defaults true)
All contributions are welcome. Please make a pull request and make sure things still pass after running npm run test
For tests you will need to either have the environment variables set or setup a .env file. There's a .env.sample so you can cp .env.sample .env
and fill it in. Make sure to add any new environment variables.
WARNING: The test suit generates random files for certain checks. Ensure you delete files leftover on your Bucket.
npm run test
- Run test suit (You must have the .env file setup)npm run build
- Run build
- Thanks to @Omer for fixing credentials from
~/.aws/credentials
- Thanks to @lostjimmy for pointing out
path.sep
for Windows compatibility