Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

gulp-awspublish

Package Overview
Dependencies
Maintainers
1
Versions
59
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

gulp-awspublish - npm Package Compare versions

Comparing version 0.0.2 to 0.0.3

test/mocha.opts

8

History.md
0.0.2 / 2014-02-04
==================
* fix gz upload
* update logging padding
* add cache feature
* Update README.md
0.0.1 / 2014-02-03

@@ -3,0 +11,0 @@ ==================

1

lib/index.js

@@ -93,2 +93,3 @@ var Readable = require('stream').Readable,

file.path += 'gz';
file.s3.path += 'gz';
file.contents = buf;

@@ -95,0 +96,0 @@ cb(err, file);

2

package.json
{
"name": "gulp-awspublish",
"version": "0.0.2",
"version": "0.0.3",
"description": "A plugin for Gulp",

@@ -5,0 +5,0 @@ "keywords": [

@@ -1,4 +0,3 @@

# gulp-awspublish
[![NPM version][npm-image]][npm-url] [![Build Status][travis-image]][travis-url] [![Coverage Status](coveralls-image)](coveralls-url) [![Dependency Status][depstat-image]][depstat-url]
[![NPM version][npm-image]][npm-url] [![Dependency Status][depstat-image]][depstat-url]

@@ -24,3 +23,4 @@ > awspublish plugin for [gulp](https://github.com/wearefractal/gulp)

// publish all js files
// Cache-Control headers will be added on top of other headers
// Set Content-Length, Content-Type and Cache-Control headers
// Set x-amz-acl to public-read by default
var js = gulp.src('./public/*.js')

@@ -31,2 +31,3 @@ .pipe(publisher.publish(headers));

// Content-Encoding headers will be added on top of other headers
// uploaded files will have a jsgz extension
var jsgz = gulp.src('./public/*.js')

@@ -36,5 +37,5 @@ .pipe(awspublish.gzip())

// sync content of s3 bucket with listing of published files
// cache s3 etags to avoid unnecessary request next time
// print progress with reportr
// sync content of s3 bucket with files in the stream
// cache s3 etags locally to avoid unnecessary request next time
// print progress with reporter
publisher

@@ -51,7 +52,7 @@ .sync(es.merge(js, jsgz)))

create a gzip through stream, that gzip files and add Content-Encoding headers
create a through stream, that gzip files and add Content-Encoding headers
### awspublish.cache()
through stream that create or update an .awspublish cache file with the list
create a through stream that create or update an .awspublish cache file with the list
of key value pair (s3.path/s3.etag)

@@ -67,10 +68,10 @@

create a through stream, that push files to s3.
Publish take a header hash as argument to override or add other s3 headers.
Publish take a header hash that add or override existing s3 headers.
if there is an .awspublish cache file, we first check against it to see
if the file is in the cache we dont upload the file,
if there is an .awspublish cache file, we first compare disk file etag
with the one in the cache, if etags match we dont query amazon
and file.s3.state is set to 'cache'
we then make a header query and compare the remote etag with the local one
if etag5 match we don't upload the file and file.s3.state is set to 'skip'
if etags match we don't upload the file and file.s3.state is set to 'skip'

@@ -80,5 +81,6 @@ if there is a remote file.s3.state is set to 'update'

Files that get out of the stream get extra properties
s3.path: s3 path of this file
s3.state: publish state (create, update, cache or skip)
Files that go through the stream get extra properties
s3.path: s3 path
s3.etag: file etag
s3.state: publication state (create, update, cache or skip)
s3.headers: s3 headers for this file

@@ -99,3 +101,3 @@

create a reporter that logs to console each file state (delete, add, update, skip) and s3 path
create a reporter that logs s3.path and s3.state (delete, create, update, cache, skip)

@@ -110,9 +112,4 @@

[travis-url]: http://travis-ci.org/pgherveou/gulp-awspublish
[travis-image]: https://secure.travis-ci.org/pgherveou/gulp-awspublish.png?branch=master
[coveralls-url]: https://coveralls.io/r/pgherveou/gulp-awspublish
[coveralls-image]: https://coveralls.io/repos/pgherveou/gulp-awspublish/badge.png
[depstat-url]: https://david-dm.org/pgherveou/gulp-awspublish
[depstat-image]: https://david-dm.org/pgherveou/gulp-awspublish.png

@@ -27,177 +27,213 @@ /* global describe, before, it */

try { fs.unlinkSync('.awspublish'); } catch (err) {}
publisher.client.deleteMultiple(['/test/hello.txt'], done);
publisher.client.deleteMultiple([
'/test/hello.txt',
'/test/hello.txtgz'
], done);
});
// add some dummy file
['bar', 'foo', 'bim', 'boum'].forEach(function (name) {
var filename = name + '.txt',
headers = {'Content-Type': 'text/plain'};
before(function(done) {
publisher.client.putBuffer(name, filename, headers, done);
});
});
describe('Publish', function() {
it('should produce gzip file with s3 headers', function (done) {
it('should produce gzip file with s3 headers', function (done) {
var gzipStream = awspublish.gzip(),
stream = gzipStream.pipe(gutil.noop()),
srcFile;
var gzip = awspublish.gzip();
var srcFile = new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world')
});
srcFile = new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world')
});
gzip.write(srcFile);
gzip
.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0]).to.not.eq(srcFile);
expect(files[0].path).to.eq(srcFile.path + 'gz');
expect(files[0].s3.path).to.eq('/test/hello.txtgz');
expect(files[0].s3.headers['Content-Encoding']).to.eq('gzip');
stream.on('error', function(err) {
expect(err).to.exist;
done(err);
// compare uncompressed to srcFile
zlib.unzip(files[0].contents, function(err, buf) {
var newFileContent = buf.toString('utf8', 0, buf.length),
srcFileContent = srcFile.contents.toString('utf8', 0, srcFile.contents.length);
expect(newFileContent).to.eq(srcFileContent);
done();
});
}));
gzip.end();
});
stream.on('data', function (newFile) {
expect(newFile).to.exist;
expect(newFile.path).to.eq(srcFile.path + 'gz');
expect(newFile.s3.headers['Content-Encoding']).to.eq('gzip');
it('should upload gzip file', function (done) {
var gzip = awspublish.gzip(),
stream = gzip.pipe(publisher.publish());
// compare uncompressed to srcFile
zlib.unzip(newFile.contents, function(err, buf) {
var newFileContent = buf.toString('utf8', 0, buf.length),
srcFileContent = srcFile.contents.toString('utf8', 0, srcFile.contents.length);
gzip.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world')
}));
expect(newFileContent).to.eq(srcFileContent);
done();
});
stream
.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
publisher.client.headFile('/test/hello.txtgz', function(err, res) {
expect(res.headers.etag).to.exist;
done(err);
});
}));
gzip.end();
});
gzipStream.write(srcFile);
gzipStream.end();
});
it('should create new file on s3 with headers', function (done) {
it('should create new file on s3 with headers', function (done) {
var headers = {
'Cache-Control': 'max-age=315360000, no-transform, public'
};
var headers = {
'Cache-Control': 'max-age=315360000, no-transform, public'
};
var stream = publisher.publish(headers);
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world')
}));
var stream = publisher.publish(headers);
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world')
}));
stream
.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.path).to.eq('/test/hello.txt');
expect(files[0].s3.state).to.eq('create');
expect(files[0].s3.headers['Cache-Control']).to.eq(headers['Cache-Control']);
expect(files[0].s3.headers['x-amz-acl']).to.eq('public-read');
expect(files[0].s3.headers['Content-Type']).to.eq('text/plain');
expect(files[0].s3.headers['Content-Length']).to.eq(files[0].contents.length);
publisher.client.headFile('/test/hello.txt', function(err, res) {
expect(res.headers.etag).to.exist;
done(err);
});
}));
stream
.pipe(es.writeArray(function(err, files) {
stream.end();
});
it('should update exsiting file on s3', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.path).to.eq('/test/hello.txt');
expect(files[0].s3.state).to.eq('create');
expect(files[0].s3.headers['Cache-Control']).to.eq(headers['Cache-Control']);
expect(files[0].s3.headers['x-amz-acl']).to.eq('public-read');
expect(files[0].s3.headers['Content-Type']).to.eq('text/plain');
expect(files[0].s3.headers['Content-Length']).to.eq(files[0].contents.length);
publisher.client.headFile('/test/hello.txt', function(err, res) {
expect(res.headers.etag).to.exist;
done(err);
});
expect(files[0].s3.state).to.eq('update');
done(err);
}));
stream.end();
});
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
it('should update exsiting file on s3', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.state).to.eq('update');
done(err);
}));
stream.end();
});
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
it('should skip file update', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.state).to.eq('skip');
done(err);
}));
stream.end();
});
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
it('should skip file update', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.state).to.eq('skip');
done(err);
}));
stream.end();
});
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
it('should add cache file', function (done) {
expect(fs.existsSync('.awspublish')).to.be.false;
stream.end();
});
var stream = publisher.publish(),
cache = stream.pipe(awspublish.cache());
it('should add cache file', function (done) {
expect(fs.existsSync('.awspublish')).to.be.false;
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
var stream = publisher.publish(),
cache = stream.pipe(awspublish.cache());
cache.on('finish', function() {
var cache = JSON.parse(fs.readFileSync('.awspublish', 'utf8'));
expect(cache).to.have.ownProperty('/test/hello.txt');
done();
});
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
stream.end();
});
cache.on('finish', function() {
var cache = JSON.parse(fs.readFileSync('.awspublish', 'utf8'));
expect(cache).to.have.ownProperty('/test/hello.txt');
done();
it('should mark file as cached', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.state).to.eq('cache');
done(err);
}));
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
stream.end();
});
stream.end();
});
it('should mark file as cached', function (done) {
var stream = publisher.publish();
stream.pipe(es.writeArray(function(err, files) {
expect(err).not.to.exist;
expect(files).to.have.length(1);
expect(files[0].s3.state).to.eq('cache');
done(err);
}));
describe('Sync', function() {
stream.write(new gutil.File({
path: '/test/hello.txt',
base: '/',
contents: new Buffer('hello world 2')
}));
// remove files
before(function(done) {
publisher.client.deleteMultiple([
'/test/hello.txt',
'/test/hello.txtgz'
], done);
});
stream.end();
});
// add some dummy file
['bar', 'foo', 'bim', 'boum'].forEach(function (name) {
var filename = name + '.txt',
headers = {'Content-Type': 'text/plain'};
before(function(done) {
publisher.client.putBuffer(name, filename, headers, done);
});
});
it('should sync bucket with published data', function(done) {
var stream = gutil.noop();
it('should sync bucket with published data', function(done) {
var stream = gutil.noop();
publisher
.sync(stream)
.pipe(es.writeArray(function(err, arr) {
expect(err).to.not.exist;
var deleted = arr.filter(function (file) {
return file.s3 && file.s3.state === 'delete';
}).map(function (file) {
return file.s3.path;
}).sort().join(' ');
publisher
.sync(stream)
.pipe(es.writeArray(function(err, arr) {
expect(err).to.not.exist;
var deleted = arr.filter(function (file) {
return file.s3 && file.s3.state === 'delete';
}).map(function (file) {
return file.s3.path;
}).sort().join(' ');
expect(deleted).to.eq('boum.txt foo.txt test/hello.txt');
done(err);
}));
expect(deleted).to.eq('boum.txt foo.txt');
done(err);
}));
stream.write({ s3: { path: 'bim.txt' } });
stream.write({ s3: { path: 'bar.txt' } });
stream.end();
stream.write({ s3: { path: 'bim.txt' } });
stream.write({ s3: { path: 'bar.txt' } });
stream.end();
});
});

@@ -204,0 +240,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc