gulp-s3-upload
Advanced tools
Comparing version 1.4.2 to 1.4.3
# Changelog | ||
## Version 1.4.3 | ||
* Fix issue [issue #26](http://github.com/clineamb/gulp-s3-upload/issues/23) | ||
* Move things into `s3.headObject` call to prevent mutable variable errors. | ||
* Clean up some comments. | ||
* Update Readme to clarify `config` hash in the config section. | ||
## Version 1.4.2 | ||
@@ -4,0 +11,0 @@ |
78
index.js
@@ -28,3 +28,3 @@ var es = require('event-stream') | ||
// *NEW* in v1.1.0 - Async File Uploading | ||
// Async File Uploading | ||
@@ -108,2 +108,4 @@ stream = es.map(function (file, callback) { | ||
// *Note: `options.Metadata` is not filtered out later. | ||
// === manualContentEncoding =========================== | ||
@@ -121,38 +123,4 @@ // Similar to metadataMap to put global / individual | ||
// === maps.ParamNames ================================= | ||
// This is a new mapper object that, if given in the | ||
// options as `maps.ParamName`, and is a function, will | ||
// run the given function and map that param data, given | ||
// that the return value of the `maps.ParamName` function | ||
// returns the appropriate type for that give putObject Param | ||
// { Bucket: ... maps: { 'CacheControl': function()..., 'Expires': function()... }, etc. } | ||
// See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property | ||
// *** NEW in 1.3 *** | ||
if(!_.isUndefined(options.maps)) { | ||
_.each(options.maps, function(mapRoutine, ParamName) { | ||
if(_.isFunction(mapRoutine)) { | ||
options[ParamName] = mapRoutine(keyname); | ||
} | ||
}); | ||
} | ||
// === ETag Hash Comparison ============================= | ||
// *NEW* in 1.1; do a local hash comparison to reduce | ||
// the overhead from calling upload anyway. | ||
// Add the option for a different algorithm, JIC for | ||
// some reason the algorithm is not MD5. | ||
// Available algorithms are those available w/ default | ||
// node `crypto` plugin. (run `crypto.getCiphers()`) | ||
if(!options.etag_hash) { | ||
// If not defined, default to md5 | ||
options.etag_hash = 'md5'; | ||
} | ||
hash = hasha(file._contents, {'algorithm': options.etag_hash}); | ||
// *Note: `options.Metadata` is not filtered out later. | ||
// Check the file that's up in the bucket already | ||
_s3.headObject({ | ||
@@ -165,2 +133,17 @@ 'Bucket': the_bucket, | ||
// === ETag Hash Comparison ============================= | ||
// Do a local hash comparison to reduce | ||
// the overhead from calling upload anyway. | ||
// Add the option for a different algorithm, JIC for | ||
// some reason the algorithm is not MD5. | ||
// Available algorithms are those available w/ default | ||
// node `crypto` plugin. (run `crypto.getCiphers()`) | ||
if(!options.etag_hash) { | ||
// If not defined, default to md5 | ||
options.etag_hash = 'md5'; | ||
} | ||
hash = hasha(file._contents, {'algorithm': options.etag_hash}); | ||
if(head_err && head_err.statusCode !== 404) { | ||
@@ -194,3 +177,3 @@ return callback(new gutil.PluginError(PLUGIN_NAME, "S3 headObject Error: " + head_err.stack)); | ||
if(!_.isNull(metadata)) { | ||
if(!_.isNull(content_encoding)) { | ||
// existing objOpts.ContentEncoding gets overwrriten | ||
@@ -200,2 +183,21 @@ objOpts.ContentEncoding = content_encoding; | ||
// === maps.ParamNames ================================= | ||
// This is a new mapper object that, if given in the | ||
// options as `maps.ParamName`, and is a function, will | ||
// run the given function and map that param data, given | ||
// that the return value of the `maps.ParamName` function | ||
// returns the appropriate type for that give putObject Param | ||
// { Bucket: ... maps: { 'CacheControl': function()..., 'Expires': function()... }, etc. } | ||
// See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property | ||
// This will end up overwriting old Metadata and ContentEncoding | ||
// if they were included in maps hash. | ||
if(!_.isUndefined(options.maps)) { | ||
_.each(options.maps, function(mapRoutine, ParamName) { | ||
if(_.isFunction(mapRoutine)) { | ||
objOpts[ParamName] = mapRoutine(keyname); | ||
} | ||
}); | ||
} | ||
if (options.uploadNewFilesOnly && !head_data || !options.uploadNewFilesOnly) { | ||
@@ -202,0 +204,0 @@ |
{ | ||
"name": "gulp-s3-upload", | ||
"version": "1.4.2", | ||
"version": "1.4.3", | ||
"description": "A gulp task to asynchronous upload/update assets to an AWS S3 Bucket.", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
# gulp-s3-upload | ||
__Version 1.4.2__ | ||
__Version 1.4.3__ | ||
@@ -26,5 +26,19 @@ Use for uploading assets to Amazon S3 servers. | ||
...where config is something like... | ||
```js | ||
var config = { | ||
accessKeyId: "YOURACCESSKEY", | ||
accessKeySecret: "YOUACCESSSECRET" | ||
} | ||
// ...or... | ||
var config = JSON.parse(fs.readFileSync('private/awsaccess.json')); | ||
``` | ||
The optional `config` argument can include any option available (like `region`) available in the [AWS Config Constructor](http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#constructor-property). By default all settings are undefined. | ||
Per AWS best practices, the recommended approach for loading credentials is to use the shared credentials file (`~/.aws/credentials`). You can also set the `aws_access_key_id` and `aws_secret_access_key` environment variables or specify values directly in the gulpfile via the `accessKeyId` and `secretAccessKey` options. If you have multiple profiles configured in your AWS credentials file, you can specify the profile name inline with the call to gulp. | ||
**Per AWS best practices**, the recommended approach for loading credentials is to use the shared credentials file (`~/.aws/credentials`). You can also set the `aws_access_key_id` and `aws_secret_access_key` environment variables or specify values directly in the gulpfile via the `accessKeyId` and `secretAccessKey` options. If you have multiple profiles configured in your AWS credentials file, you can specify the profile name inline with the call to gulp. | ||
@@ -31,0 +45,0 @@ ```sh |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
29394
234
338