Socket
Socket
Sign inDemoInstall

grunt-s3

Package Overview
Dependencies
Maintainers
1
Versions
14
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

grunt-s3 - npm Package Compare versions

Comparing version 0.2.0-alpha.2 to 0.2.0-alpha.3

test/common.js

18

Gruntfile.js

@@ -8,4 +8,5 @@ var path = require('path');

nodeunit: {
all: ['test/upload.js', 'test/download.js', 'test/s3Task.js']
all: ['test/upload.js', 'test/download.js', 'test/delete.js', 'test/s3Task.js', 'test/sync.js']
},
clean: [ 's3/'],
s3: {

@@ -19,3 +20,4 @@ options: {

secure: false,
access: 'public-read'
access: 'public-read',
style: 'path'
},

@@ -27,3 +29,3 @@ test: {

options: {
key: "custom"
key: 'custom'
}

@@ -34,3 +36,6 @@ },

src: path.join(process.cwd(), 'test', 'files', '**', '*.txt'),
rel: path.join(process.cwd(), 'test', 'files')
rel: path.join(process.cwd(), 'test', 'files'),
options: {
bucket: 'overridden'
}
}]

@@ -43,4 +48,7 @@ }

grunt.loadNpmTasks('grunt-contrib-nodeunit');
grunt.registerTask('test', ['jshint', 'nodeunit']);
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.registerTask('test', ['clean', 'jshint', 'nodeunit']);
grunt.loadTasks(__dirname + '/tasks');
};

@@ -1,5 +0,5 @@

{
{
"name": "grunt-s3",
"description": "A grunt task to automate moving files to/from Amazon S3.",
"version": "0.2.0-alpha.2",
"version": "0.2.0-alpha.3",
"author": "Aaron Forsander (https://github.com/pifantastic)",

@@ -23,3 +23,4 @@ "homepage": "https://github.com/pifantastic/grunt-s3",

"scripts": {
"test": "grunt test"
"test": "grunt test",
"fakes3": "fakes3 -r s3 -p 1337"
},

@@ -45,4 +46,6 @@ "engines": {

"grunt-contrib-nodeunit": "~0.1.1",
"libyaml": "~0.2.1"
"libyaml": "~0.2.1",
"grunt-contrib-clean": "~0.5.0",
"rimraf": "~2.2.2"
}
}

@@ -37,3 +37,3 @@ [![Build Status](https://secure.travis-ci.org/pifantastic/grunt-s3.png?branch=master)](https://travis-ci.org/pifantastic/grunt-s3)

The following are the default options available to each target.
A quick reference of options

@@ -53,3 +53,4 @@ * **key** - (*string*) An Amazon S3 credentials key

* **upload** - (*array*) An array of objects, each object representing a file upload and containing a `src`
and a `dest`. Any of the above values may also be overriden.
and a `dest`. Any of the above values may also be overriden. Passing `rel:DIR` will cause the filesnames to be
expanded so that wild cards are not passed to the source name.
* **download** - (*array*) An array of objects, each object representing a file download and containing a

@@ -59,2 +60,5 @@ `src` and a `dest`. Any of the above values may also be overriden.

the above values may also be overriden.
* **sync** - (*array*) An array of ojects, each oject containing a `src` and `dest`. Default behavior is to
only upload new files (that don't exist). Adding `verify:true` forces an MD5 hash and Modified time check prior
to overwriting the server files.
* **debug** - (*boolean*) If true, no transfers with S3 will occur, will print all actions for review by user

@@ -64,11 +68,21 @@

Template strings in grunt will allow you to easily include values from other files. The below example
demonstrates loading aws settings from another file, Where grunt-aws.json is just a json key:value file like package.json. (Special thanks to @nanek)
This is **important because you should never check in your S3 credentials to github! Load them from an external file that is outside of the repo.**
```javascript
grunt.initConfig({
aws: grunt.file.readJSON('~/grunt-aws.json'),
s3: {
options: {
key: 'YOUR KEY',
secret: 'YOUR SECRET',
bucket: 'my-bucket',
access: 'public-read'
key: '<%= aws.key %>',
secret: '<%= aws.secret %>',
bucket: '<%= aws.bucket %>',
access: 'public-read',
headers: {
// Two Year cache policy (1000 * 60 * 60 * 24 * 730)
"Cache-Control": "max-age=630720000, public",
"Expires": new Date(Date.now() + 63072000000).toUTCString()
}
},

@@ -86,3 +100,3 @@ dev: {

dest: 'documents/important.txt',
gzip: true
options: { gzip: true }
},

@@ -126,2 +140,24 @@ {

}
],
sync: [
{
// only upload this document if it does not exist already
src: 'important_document.txt',
dest: 'documents/important.txt',
options: { gzip: true }
},
{
// make sure this document is newer than the one on S3 and replace it
verify: true,
src: 'passwords.txt',
dest: 'documents/ignore.txt'
},
{
src: path.join(variable.to.release, "build/cdn/js/**/*.js"),
dest: "jsgz",
// make sure the wildcard paths are fully expanded in the dest
rel: path.join(variable.to.release, "build/cdn/js"),
options: { gzip: true }
}
]

@@ -151,23 +187,2 @@ }

#### Grunt template strings
(Special thanks to @nanek)
Template strings in grunt will allow you to easily include values from other files. The below example
demonstrates loading aws settings from another file.
```javascript
grunt.initConfig({
aws: grunt.file.readJSON('grunt-aws.json'),
s3: {
key: '<%= aws.key %>',
secret: '<%= aws.secret %>',
bucket: '<%= aws.bucket %>',
access: 'public-read'
}
}
```
Where grunt-aws.json is just a json key:value file like package.json.
#### Environment variables

@@ -174,0 +189,0 @@

@@ -9,20 +9,20 @@ /*jshint esnext:true*/

// Core.
const util = require('util');
const crypto = require('crypto');
const fs = require('fs');
const path = require('path');
const url = require('url');
const zlib = require('zlib');
var util = require('util');
var crypto = require('crypto');
var fs = require('fs');
var path = require('path');
var url = require('url');
var zlib = require('zlib');
// Npm.
const knox = require('knox');
const mime = require('mime');
const deferred = require('underscore.deferred');
var knox = require('knox');
var mime = require('mime');
var deferred = require('underscore.deferred');
var Tempfile = require('temporary/lib/file');
// Local
const common = require('./common');
var common = require('./common');
// Avoid warnings.
const existsSync = ('existsSync' in fs) ? fs.existsSync : path.existsSync;
var existsSync = ('existsSync' in fs) ? fs.existsSync : path.existsSync;

@@ -32,18 +32,22 @@ /**

*/
const MSG_UPLOAD_SUCCESS = '↗'.blue + ' Uploaded: %s (%s)';
const MSG_DOWNLOAD_SUCCESS = '↙'.yellow + ' Downloaded: %s (%s)';
const MSG_DELETE_SUCCESS = '✗'.red + ' Deleted: %s';
const MSG_COPY_SUCCESS = '→'.cyan + ' Copied: %s to %s';
var MSG_UPLOAD_SUCCESS = '↗'.blue + ' Uploaded: %s (%s)';
var MSG_DOWNLOAD_SUCCESS = '↙'.yellow + ' Downloaded: %s (%s)';
var MSG_DELETE_SUCCESS = '✗'.red + ' Deleted: %s';
var MSG_COPY_SUCCESS = '→'.cyan + ' Copied: %s to %s';
var MSG_SKIP_SUCCESS = '→'.cyan + ' File Exists, skipped: %s';
var MSG_SKIP_MATCHES = '→'.cyan + ' File Matches, skipped: %s';
var MSG_SKIP_OLDER = '→'.cyan + ' File is Old, skipped: %s';
const MSG_UPLOAD_DEBUG = '↗'.blue + ' Upload: ' + '%s'.grey + ' to ' + '%s:%s'.cyan;
const MSG_DOWNLOAD_DEBUG = '↙'.yellow + ' Download: ' + '%s:%s'.cyan + ' to ' + '%s'.grey;
const MSG_DELETE_DEBUG = '✗'.red + ' Delete: ' + '%s:%s'.cyan;
const MSG_COPY_DEBUG = '→'.cyan + ' Copy: ' + '%s'.cyan + ' to ' + '%s:%s'.cyan;
var MSG_UPLOAD_DEBUG = '↗'.blue + ' Upload: ' + '%s'.grey + ' to ' + '%s:%s'.cyan;
var MSG_DOWNLOAD_DEBUG = '↙'.yellow + ' Download: ' + '%s:%s'.cyan + ' to ' + '%s'.grey;
var MSG_DELETE_DEBUG = '✗'.red + ' Delete: ' + '%s:%s'.cyan;
var MSG_COPY_DEBUG = '→'.cyan + ' Copy: ' + '%s'.cyan + ' to ' + '%s:%s'.cyan;
var MSG_SKIP_DEBUG = '→'.cyan + ' Sync: ' + '%s:%s'.cyan;
const MSG_ERR_NOT_FOUND = '¯\\_(ツ)_/¯ File not found: %s';
const MSG_ERR_UPLOAD = 'Upload error: %s (%s)';
const MSG_ERR_DOWNLOAD = 'Download error: %s (%s)';
const MSG_ERR_DELETE = 'Delete error: %s (%s)';
const MSG_ERR_COPY = 'Copy error: %s to %s';
const MSG_ERR_CHECKSUM = '%s error: expected hash: %s but found %s for %s';
var MSG_ERR_NOT_FOUND = '¯\\_(ツ)_/¯ File not found: %s';
var MSG_ERR_UPLOAD = 'Upload error: %s (%s)';
var MSG_ERR_DOWNLOAD = 'Download error: %s (%s)';
var MSG_ERR_DELETE = 'Delete error: %s (%s)';
var MSG_ERR_COPY = 'Copy error: %s to %s';
var MSG_ERR_CHECKSUM = '%s error: expected hash: %s but found %s for %s';

@@ -71,3 +75,21 @@ exports.init = function (grunt) {

var makeOptions = exports.makeOptions = function(opts) {
var options = _.clone(opts || {}, true);
return options;
};
/**
* Create an s3 client. Returns an Knox instance.
*
* @param {Object} Format.
* @returns {Object}
*/
var makeClient = exports.makeClient = function(options) {
return knox.createClient(_.pick(options, [
'region', 'endpoint', 'port', 'key', 'secret', 'access', 'bucket', 'secure', 'headers', 'style'
]));
};
/**
* Publishes the local file at src to the s3 dest.

@@ -86,7 +108,8 @@ *

var dfd = new _.Deferred();
var options = _.clone(opts, true);
var options = makeOptions(opts);
var prettySrc = path.relative(process.cwd(), src);
// Make sure the local file exists.
if (!existsSync(src)) {
return dfd.reject(makeError(MSG_ERR_NOT_FOUND, src));
return dfd.reject(makeError(MSG_ERR_NOT_FOUND, prettySrc));
}

@@ -101,8 +124,6 @@

// Pick out the configuration options we need for the client.
var client = knox.createClient(_(options).pick([
'region', 'endpoint', 'port', 'key', 'secret', 'access', 'bucket', 'secure'
]));
var client = makeClient(options);
if (options.debug) {
return dfd.resolve(util.format(MSG_UPLOAD_DEBUG, path.relative(process.cwd(), src), client.bucket, dest)).promise();
return dfd.resolve(util.format(MSG_UPLOAD_DEBUG, prettySrc, client.bucket, dest)).promise();
}

@@ -120,3 +141,3 @@

if (err || res.statusCode !== 200) {
cb(makeError(MSG_ERR_UPLOAD, src, err || res.statusCode));
cb(makeError(MSG_ERR_UPLOAD, prettySrc, err || res.statusCode));
}

@@ -127,3 +148,3 @@ else {

if (err) {
cb(makeError(MSG_ERR_UPLOAD, src, err));
cb(makeError(MSG_ERR_UPLOAD, prettySrc, err));
}

@@ -139,7 +160,7 @@ else {

if (remoteHash === localHash) {
var msg = util.format(MSG_UPLOAD_SUCCESS, src, localHash);
var msg = util.format(MSG_UPLOAD_SUCCESS, prettySrc, localHash);
cb(null, msg);
}
else {
cb(makeError(MSG_ERR_CHECKSUM, 'Upload', localHash, remoteHash, src));
cb(makeError(MSG_ERR_CHECKSUM, 'Upload', localHash, remoteHash, prettySrc));
}

@@ -149,2 +170,3 @@ }

}
res.resume();
});

@@ -177,3 +199,3 @@ };

.on('error', function (err) {
dfd.reject(makeError(MSG_ERR_UPLOAD, src, err));
dfd.reject(makeError(MSG_ERR_UPLOAD, prettySrc, err));
})

@@ -183,2 +205,3 @@ .on('close', function () {

src = tmp.path;
prettySrc += ' (gzip)';
upload(function (err, msg) {

@@ -226,8 +249,6 @@ // Clean up the temp file.

var dfd = new _.Deferred();
var options = _.clone(opts);
var options = makeOptions(opts);
// Pick out the configuration options we need for the client.
var client = knox.createClient(_(options).pick([
'region', 'endpoint', 'port', 'key', 'secret', 'access', 'bucket'
]));
var client = makeClient(options);

@@ -299,8 +320,6 @@ if (options.debug) {

var dfd = new _.Deferred();
var options = _.clone(opts);
var options = makeOptions(opts);
// Pick out the configuration options we need for the client.
var client = knox.createClient(_(options).pick([
'region', 'endpoint', 'port', 'key', 'secret', 'access', 'bucket'
]));
var client = makeClient(options);

@@ -346,8 +365,6 @@ if (options.debug) {

var dfd = new _.Deferred();
var options = _.clone(opts);
var options = makeOptions(opts);
// Pick out the configuration options we need for the client.
var client = knox.createClient(_(options).pick([
'region', 'endpoint', 'port', 'key', 'secret', 'access', 'bucket'
]));
var client = makeClient(options);

@@ -371,3 +388,98 @@ if (options.debug) {

/**
* Publishes the local file at src to the s3 dest, but only after checking if the file exists or doesn't match.
*
* Verifies that the upload was successful by comparing an md5 checksum of
* the local and remote versions. Also checks if the file exists first, both by filename or by hash and mtime
*
* @param {String} src The local path to the file to upload.
* @param {String} dest The s3 path, relative to the bucket, to which the src
* is uploaded.
* @param {Object} [options] An object containing options which override any
* option declared in the global s3 config.
*/
exports.sync = function (src, dest, opts) {
var dfd = new _.Deferred();
var options = makeOptions(opts);
var prettySrc = path.relative(process.cwd(), src);
// Pick out the configuration options we need for the client.
var client = makeClient(options);
if (options.debug) {
return dfd.resolve(util.format(MSG_SKIP_DEBUG, client.bucket, prettySrc)).promise();
}
// Check for the file on s3
// verify was truthy, so we need to make sure that this file is actually the file it thinks it is
client.headFile( dest, function(err, res) {
var upload;
// If the file was not found, then we should be able to continue with a normal upload procedure
if (res && res.statusCode === 404) {
upload = exports.upload( src, dest, opts);
// pass through the dfd state
return upload.then( dfd.resolve, dfd.reject );
}
if (!res || err || res.statusCode !== 200 ) {
return dfd.reject(makeError(MSG_ERR_DOWNLOAD, prettySrc, err || res.statusCode));
}
// we do not wish to overwrite a file that exists by verifying we have a newer one in place
if( !options.verify ) {
// the file exists so do nothing with that
return dfd.resolve(util.format(MSG_SKIP_SUCCESS, prettySrc));
}
// the file exists so let's check to make sure it's the right file, if not, we'll update it
// Read the local file so we can get its md5 hash.
fs.readFile(src, function (err, data) {
var remoteHash, localHash;
if (err) {
return dfd.reject(makeError(MSG_ERR_UPLOAD, prettySrc, err));
}
// The etag head in the response from s3 has double quotes around
// it. Strip them out.
remoteHash = res.headers.etag.replace(/"/g, '');
// Get an md5 of the local file so we can verify the upload.
localHash = crypto.createHash('md5').update(data).digest('hex');
if (remoteHash === localHash) {
// the file exists and is the same so do nothing with that
return dfd.resolve(util.format(MSG_SKIP_MATCHES, prettySrc));
}
fs.stat( src, function(err, stats) {
var remoteWhen, localWhen, upload;
if (err) {
return dfd.reject(makeError(MSG_ERR_UPLOAD, prettySrc, err));
}
// which one is newer? if local is newer, we should upload it
remoteWhen = new Date(res.headers['last-modified'] || "0"); // earliest date possible if no header is returned
localWhen = new Date(stats.mtime || "1"); // make second earliest date possible if mtime isn't set
if ( localWhen <= remoteWhen ) {
// Remote file was older
return dfd.resolve(util.format(MSG_SKIP_OLDER, prettySrc));
}
// default is that local is newer, only upload when it is
upload = exports.upload( src, dest, opts);
// pass through the dfd state
upload.then( dfd.resolve, dfd.reject );
});
});
}).end();
return dfd.promise();
};
return exports;
};

@@ -33,2 +33,10 @@ var path = require('path');

config.sync.forEach(function (sync) {
var syncFiles = self._parseUploadFiles(sync, config);
syncFiles.forEach(function (syncFile) {
transfers.push(s3.sync.bind(s3, syncFile.file, syncFile.dest, syncFile.upload));
});
});
config.download.forEach(function (download) {

@@ -81,4 +89,4 @@ transfers.push(s3.download.bind(s3, download.src, download.dest, _(download).defaults(config)));

// Put the key, secret and bucket information into the upload for knox
_.extend(upload, config);
// Put the key, secret and bucket information into the upload for knox.
var fileConfig = _.extend({}, config, upload.options || {});

@@ -110,3 +118,3 @@ // If there is only 1 file and it matches the original file wildcard,

dest: dest,
upload: upload
upload: fileConfig
};

@@ -133,2 +141,3 @@ });

debug: false,
verify: false,
maxOperations: 0,

@@ -144,3 +153,4 @@ encodePaths: false

del: [],
copy: []
copy: [],
sync: []
};

@@ -147,0 +157,0 @@

@@ -7,7 +7,20 @@

var _ = grunt.util._;
var async = grunt.util.async;
var s3Config = grunt.config("s3"),
config = _.extend({}, s3Config.options, s3Config.test.options);
var s3Config = grunt.config("s3")
, common = require('./common')
, config = common.config;
module.exports = {
setUp: function(cb) {
async.series([
common.clean,
function(done) {
s3.upload(__dirname + '/files/a.txt', 'a.txt', common.config).done(done);
}
], function() {
cb();
});
},
testDownload : function (test) {

@@ -17,3 +30,3 @@ test.expect(1);

var dest = __dirname + '/files/a.txt';
var src = __dirname + '/../s3/127/a.txt/.fakes3_metadataFFF/content';
var src = __dirname + '/../s3/127/test/a.txt/.fakes3_metadataFFF/content';

@@ -33,3 +46,3 @@ s3.download('a.txt', dest, config)

var dest = __dirname + '/files/b.txt.debug';
var src = __dirname + '/../s3/127/b.txt/.fakes3_metadataFFF/content';
var src = __dirname + '/../s3/127/test/b.txt/.fakes3_metadataFFF/content';

@@ -36,0 +49,0 @@ var debugConfig = _.defaults({}, config, {debug: true});

@@ -1,1 +0,1 @@

a
a

@@ -11,4 +11,5 @@ var path = require('path');

var s3Config = grunt.config('s3');
var config = _.extend({}, s3Config.options, s3Config.test_S3Task.options);
var s3Config = grunt.config("s3")
, common = require('./common')
, config = common.config;

@@ -39,2 +40,3 @@ var makeMockTask = function (taskDef) {

module.exports = {
setUp: common.clean,
run: function (test) {

@@ -94,2 +96,5 @@ var taskDef = new _.Deferred();

// Overrides are correct.
test.equal(uploadFiles[0].upload.bucket, 'overridden');
// File paths in root

@@ -96,0 +101,0 @@ test.equal(uploadFiles[0].file, path.join(process.cwd(), 'test', 'files', 'a.txt'));

@@ -10,13 +10,16 @@

var s3Config = grunt.config("s3"),
config = _.extend({}, s3Config.options, s3Config.test.options);
var s3Config = grunt.config("s3")
, common = require('./common')
, config = common.config;
module.exports = {
setUp: common.clean,
testUpload : function (test) {
test.expect(2);
async.waterfall([
async.series([
function (cb) {
var src = __dirname + '/files/a.txt';
var dest = __dirname + '/../s3/127/a.txt/.fakes3_metadataFFF/content';
var dest = __dirname + '/../s3/127/test/a.txt/.fakes3_metadataFFF/content';

@@ -40,5 +43,3 @@ s3.upload(src, 'a.txt', config)

}
], function () {
test.done();
});
], test.done);
},

@@ -49,6 +50,6 @@

async.waterfall([
async.series([
function (cb) {
var src = __dirname + '/files/b.txt';
var dest = __dirname + '/../s3/127/b.txt/.fakes3_metadataFFF/metadata';
var dest = __dirname + '/../s3/127/test/b.txt/.fakes3_metadataFFF/metadata';

@@ -59,3 +60,3 @@ var headerConfig = _.defaults({}, config, { headers : {'Content-Type' : '<3'} });

.always(function () {
var meta = yaml.parse(grunt.file.read(dest))
var meta = yaml.parse(grunt.file.read(dest));
test.ok(meta[0][':content_type'] === new Buffer('<3').toString('base64'), 'Headers are preserved.');

@@ -65,5 +66,3 @@ cb(null);

}
], function () {
test.done();
});
], test.done);
},

@@ -75,3 +74,3 @@

var src = __dirname + '/files/c.txt';
var dest = __dirname + '/../s3/127/c.txt/.fakes3_metadataFFF/content';
var dest = __dirname + '/../s3/127/test/c.txt/.fakes3_metadataFFF/content';

@@ -78,0 +77,0 @@ var debugConfig = _.defaults({}, config, { debug: true });

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc