Socket
Socket
Sign inDemoInstall

archiver

Package Overview
Dependencies
Maintainers
1
Versions
83
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

archiver - npm Package Compare versions

Comparing version 0.4.10 to 0.5.0-alpha

examples/pack-bulk.js

10

benchmark/simple/pack-zip.js

@@ -48,6 +48,2 @@ var fs = require('fs');

archive
.addFile(fs.createReadStream(file), { name: 'large file' })
.finalize();
var bench = streamBench({

@@ -59,2 +55,6 @@ logReport: true,

archive.pipe(bench);
archive.pipe(bench);
archive
.file(file, { name: 'large file' })
.finalize();
/**
* node-archiver
*
* Copyright (c) 2012-2013 Chris Talkington, contributors.
* Copyright (c) 2012-2014 Chris Talkington, contributors.
* Licensed under the MIT license.
* https://github.com/ctalkington/node-archiver/blob/master/LICENSE-MIT
*/
var ArchiverCore = require('./modules/core');
var formatModules = {};
var ArchiverTar = require('./archiver/tar');
var ArchiverZip = require('./archiver/zip');
var archiver = module.exports = function(format, options) {
return archiver.create(format, options);
};
var archiver = module.exports = function(type, options) {
if (type === 'zip') {
return new ArchiverZip(options);
} else if (type === 'tar') {
return new ArchiverTar(options);
archiver.create = function(format, options) {
if (formatModules[format]) {
var inst = new ArchiverCore(options);
inst.setModule(new formatModules[format](options));
return inst;
} else {
throw new Error('Unknown archive type');
throw new Error('unknown format: ' + format);
}
};
archiver.create = archiver;
archiver.registerFormat = function(format, module) {
if (module && typeof module === 'function' && typeof module.prototype.append === 'function') {
formatModules[format] = module;
archiver.createTar = function(options) {
return new ArchiverTar(options);
// backwards compat
var compatName = 'create' + format.charAt(0).toUpperCase() + format.slice(1);
archiver[compatName] = function(options) {
return archiver.create(format, options);
};
} else {
throw new Error('format module invalid: ' + format);
}
};
archiver.createZip = function(options) {
return new ArchiverZip(options);
};
archiver.registerFormat('zip', require('./modules/zip'));
archiver.registerFormat('tar', require('./modules/tar'));
archiver.registerFormat('json', require('./modules/json'));

@@ -0,3 +1,10 @@

/**
* node-archiver
*
* Copyright (c) 2012-2014 Chris Talkington, contributors.
* Licensed under the MIT license.
* https://github.com/ctalkington/node-archiver/blob/master/LICENSE-MIT
*/
var inherits = require('util').inherits;
var Transform = require('stream').Transform || require('readable-stream/transform');
var Transform = require('stream').Transform || require('readable-stream').Transform;

@@ -9,3 +16,3 @@ var util = require('./');

this.checksum = util.crc32.createCRC32();
this.checksum = util.crc32();
this.digest = null;

@@ -12,0 +19,0 @@

/**
* node-archiver
*
* Copyright (c) 2012-2013 Chris Talkington, contributors.
* Copyright (c) 2012-2014 Chris Talkington, contributors.
* Licensed under the MIT license.
* https://github.com/ctalkington/node-archiver/blob/master/LICENSE-MIT
*/
var lookup = [

@@ -76,9 +75,16 @@ 0x00000000, 0x77073096, 0xee0e612c, 0x990951ba,

function CRC32() {
function CRC32(data) {
if (!(this instanceof CRC32))
return new CRC32(data);
this.crc = ~0;
if (data) {
this.update(data);
}
}
CRC32.prototype.update = function(buf) {
for (var i = 0; i < buf.length; i++) {
this.crc = (this.crc >>> 8) ^ lookup[(this.crc ^ buf[i]) & 0xff];
CRC32.prototype.update = function(data) {
for (var i = 0; i < data.length; i++) {
this.crc = (this.crc >>> 8) ^ lookup[(this.crc ^ data[i]) & 0xff];
}

@@ -93,6 +99,2 @@

exports.createCRC32 = function() {
return new CRC32();
};
exports.CRC32 = CRC32;
module.exports = CRC32;
/**
* node-archiver
*
* Copyright (c) 2012-2013 Chris Talkington, contributors.
* Copyright (c) 2012-2014 Chris Talkington, contributors.
* Licensed under the MIT license.
* https://github.com/ctalkington/node-archiver/blob/master/LICENSE-MIT
*/
var fs = require('fs');

@@ -15,13 +14,7 @@ var path = require('path');

util._ = require('lodash');
util.crc32 = require('./crc32');
util.lazystream = require('lazystream');
util.file = require('file-utils');
var objectTypes = {
'boolean': false,
'function': true,
'object': true,
'number': false,
'string': false,
'undefined': false
};
util.cleanBuffer = function(length) {

@@ -59,12 +52,2 @@ var buf = new Buffer(length);

util.convertDateTimeDos = function(input) {
return new Date(
((input >> 25) & 0x7f) + 1980,
((input >> 21) & 0x0f) - 1,
(input >> 16) & 0x1f,
(input >> 11) & 0x1f,
(input >> 5) & 0x3f,
(input & 0x1f) << 1);
};
util.convertDateTimeEpoch = function(input) {

@@ -96,84 +79,106 @@ input = input * 1000;

util.defaults = function(object, source) {
object = object || {};
var index;
var iterable = object;
var result = iterable;
// this is slightly different from lodash version
util.defaults = function(object, source, guard) {
var args = arguments;
var argsIndex = 0;
var argsLength = args.length;
args[0] = args[0] || {};
while (++argsIndex < argsLength) {
iterable = args[argsIndex];
return util._.defaults.apply(util._, args);
};
if (iterable && objectTypes[typeof iterable]) {
var ownIndex = -1;
var ownProps = objectTypes[typeof iterable] && util.keys(iterable);
var length = ownProps ? ownProps.length : 0;
util.epochDateTime = function(d) {
d = (d instanceof Date) ? d : new Date();
while (++ownIndex < length) {
index = ownProps[ownIndex];
if (typeof result[index] === 'undefined' || result[index] == null) {
result[index] = iterable[index];
} else if (util.isObject(result[index]) && util.isObject(iterable[index])) {
result[index] = util.defaults(result[index], iterable[index]);
}
}
}
}
return Math.round(d / 1000);
};
return result;
util.isStream = function(source) {
return (source instanceof stream.Stream);
};
util.dosDateTime = function(d, utc) {
d = (d instanceof Date) ? d : new Date();
utc = utc || false;
util.lazyReadStream = function(filepath) {
return new util.lazystream.Readable(function() {
return fs.createReadStream(filepath);
});
};
var year = (utc === true) ? d.getUTCFullYear() : d.getFullYear();
// reusing bits of grunt's multi-task source normalization
util.normalizeFilesArray = function(data) {
var files = [];
if (year < 1980) {
return (1<<21) | (1<<16);
data.forEach(function(obj) {
var prop;
if ('src' in obj || 'dest' in obj) {
files.push(obj);
}
});
if (files.length === 0) {
return [];
}
var val = {
year: year,
month: (utc === true) ? d.getUTCMonth() : d.getMonth(),
date: (utc === true) ? d.getUTCDate() : d.getDate(),
hours: (utc === true) ? d.getUTCHours() : d.getHours(),
minutes: (utc === true) ? d.getUTCMinutes() : d.getMinutes(),
seconds: (utc === true) ? d.getUTCSeconds() : d.getSeconds()
};
files = util._(files).chain().forEach(function(obj) {
if (!('src' in obj) || !obj.src) { return; }
// Normalize .src properties to flattened array.
if (Array.isArray(obj.src)) {
obj.src = util._.flatten(obj.src);
} else {
obj.src = [obj.src];
}
}).map(function(obj) {
// Build options object, removing unwanted properties.
var expandOptions = util._.extend({}, obj);
delete expandOptions.src;
delete expandOptions.dest;
return ((val.year-1980) << 25) | ((val.month+1) << 21) | (val.date << 16) |
(val.hours << 11) | (val.minutes << 5) | (val.seconds / 2);
};
// Expand file mappings.
if (obj.expand) {
return util.file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) {
// Copy obj properties to result.
var result = util._.extend({}, obj);
// Make a clone of the orig obj available.
result.orig = util._.extend({}, obj);
// Set .src and .dest, processing both as templates.
result.src = mapObj.src;
result.dest = mapObj.dest;
// Remove unwanted properties.
['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) {
delete result[prop];
});
return result;
});
}
util.epochDateTime = function(d) {
d = (d instanceof Date) ? d : new Date();
// Copy obj properties to result, adding an .orig property.
var result = util._.extend({}, obj);
// Make a clone of the orig obj available.
result.orig = util._.extend({}, obj);
return Math.round(d / 1000);
};
if ('src' in result) {
// Expose an expand-on-demand getter method as .src.
Object.defineProperty(result, 'src', {
enumerable: true,
get: function fn() {
var src;
if (!('result' in fn)) {
src = obj.src;
// If src is an array, flatten it. Otherwise, make it into an array.
src = Array.isArray(src) ? util._.flatten(src) : [src];
// Expand src files, memoizing result.
fn.result = util.file.expand(expandOptions, src);
}
return fn.result;
}
});
}
util.keys = function(object) {
if (!util.isObject(object)) {
return [];
}
if ('dest' in result) {
result.dest = obj.dest;
}
return Object.keys(object);
};
return result;
}).flatten().value();
util.isObject = function(value) {
// check if the value is the ECMAScript language type of Object
// http://es5.github.com/#x8
// and avoid a V8 bug
// http://code.google.com/p/v8/issues/detail?id=2291
return !!(value && objectTypes[typeof value]);
return files;
};
util.isStream = function(source) {
return (source instanceof stream.Stream);
};
util.octalDateTime = function(d) {

@@ -209,12 +214,5 @@ d = (d instanceof Date) ? d : new Date();

util.sanitizeFilePath = function(filepath) {
filepath = filepath || '';
filepath = path.normalize(filepath);
filepath = util.unixifyPath(filepath);
while (filepath.substring(0, 1) === '/') {
filepath = filepath.substring(1);
}
return filepath;
util.sanitizePath = function() {
var filepath = path.join.apply(path, arguments);
return filepath.replace(/\\/g, '/').replace(/:/g, '').replace(/^\/+/, '');
};

@@ -279,4 +277,5 @@

util.unixifyPath = function(filepath) {
util.unixifyPath = function() {
var filepath = path.join.apply(path, arguments);
return filepath.replace(/\\/g, '/');
};
{
"name": "archiver",
"version": "0.4.10",
"version": "0.5.0-alpha",
"description": "Creates Archives (ZIP) via Node Streams.",

@@ -25,3 +25,3 @@ "homepage": "https://github.com/ctalkington/node-archiver",

"engines": {
"node": ">= 0.6.3"
"node": ">= 0.8.0"
},

@@ -33,8 +33,11 @@ "scripts": {

"dependencies": {
"readable-stream": "~1.0.2",
"iconv-lite" : "~0.2.11"
"readable-stream": "~1.1.9",
"zip-stream": "~0.1.0",
"lazystream": "~0.1.0",
"file-utils": "~0.1.5",
"lodash": "~2.4.1"
},
"devDependencies": {
"chai": "~1.7.1",
"mocha": "~1.12.0",
"chai": "~1.8.1",
"mocha": "~1.16.0",
"rimraf": "~2.2.0",

@@ -41,0 +44,0 @@ "mkdirp": "~0.3.5",

@@ -1,4 +0,4 @@

# Archiver v0.4.10 [![Build Status](https://secure.travis-ci.org/ctalkington/node-archiver.png?branch=master)](http://travis-ci.org/ctalkington/node-archiver)
# Archiver v0.5.0-alpha [![Build Status](https://secure.travis-ci.org/ctalkington/node-archiver.png?branch=master)](http://travis-ci.org/ctalkington/node-archiver)
Creates Archives (Zip, Tar) via Node Streams. Depends on Node's built-in zlib module for compression available since version 0.6.3.
Creates Archives (Zip, Tar) via Node Streams.

@@ -15,16 +15,49 @@ ## Install

#### #create(type, options)
#### create(format, options)
Creates an Archiver instance based on the type (ie zip/tar) passed. Can be passed to `Archiver` for convenience.
Creates an Archiver instance based on the format (zip, tar, etc) passed. Parameters can be passed directly to `Archiver` constructor for convenience.
#### registerFormat(format, module)
Registers an archive format. Format modules are essentially transform streams with a few required methods. They will be further documented once a formal spec is in place.
### Instance Methods
#### #append(input, data, callback(err))
#### append(input, data, callback(err))
Appends a file to the instance. Input can be in the form of a text string, buffer, or stream. When the instance has received, processed, and emitted the input, the callback is fired.
Appends an input source (text string, buffer, or stream) to the instance. When the instance has received, processed, and emitted the input, the callback is fired.
Replaces `#addFile` which is in the depreciation stage and set to be remove in next release.
Replaced `#addFile` in v0.5.
#### #finalize(callback(err, bytes))
```js
archive.append('string', { name:'string.txt' });
archive.append(new Buffer('string'), { name:'buffer.txt' });
archive.append(fs.createReadStream('mydir/file.txt'), { name:'stream.txt' });
```
#### bulk(mappings)
Appends multiple files from passed array of src-dest file mappings, based on [Grunt's "Files Array" format](http://gruntjs.com/configuring-tasks#files-array-format). A lazystream wrapper is used to prevent issues with open file limits.
[Globbing patterns](http://gruntjs.com/configuring-tasks#globbing-patterns) and [multiple properties](http://gruntjs.com/configuring-tasks#building-the-files-object-dynamically) are supported through use of the [file-utils](https://github.com/SBoudrias/file-utils) package, based on Grunt's file utilities. Please note that multiple src files to single dest file (ie concat) is not supported.
The `data` property can be set (per src-dest mapping) to define file data for matched files.
```js
archive.bulk([
{ src: ['mydir/**'], data: { date: new Date() } },
{ expand: true, cwd: 'mydir', src: ['**'], dest: 'newdir' }
]);
```
#### file(filepath, data, callback(err))
Appends a file given its filepath. Uses a lazystream wrapper to prevent issues with open file limits.
```js
archive.file('mydir/file.txt', { name:'file.txt' });
```
#### finalize(callback(err, bytes))
Finalizes the instance. When the instance's stream has finished emitting, the callback is fired. This generally doesn't correspond to the end of the destination stream; though a solution to track the destination stream may come in a future release.

@@ -66,2 +99,6 @@

#### mode `number`
Sets the file permissions. (experimental)
## Tar

@@ -89,2 +126,6 @@

#### mode `number`
Sets the file permissions. Defaults to 0664.
## Things of Interest

@@ -91,0 +132,0 @@

/*global before,describe,it */
var fs = require('fs');
var PassThrough = require('stream').PassThrough || require('readable-stream/passthrough');
var WriteStream = fs.createWriteStream;

@@ -27,3 +28,3 @@ var assert = require('chai').assert;

describe('Archiver', function() {
var ArchiverCore = require('../lib/archiver/core');
var ArchiverCore = require('../lib/modules/core');

@@ -47,25 +48,16 @@ describe('#_normalizeSource(source)', function() {

describe('#_normalizeStream(source)', function() {
var core = new ArchiverCore();
});
it('should normalize older unbuffered streams', function() {
var noBufferStream = new UnBufferedStream();
var normalized = core._normalizeStream(noBufferStream);
assert.instanceOf(normalized, PassThrough);
});
});
describe('core', function() {
});
describe('#file', function() {
var actual;
before(function(done) {
var archive = archiver('json');
var testStream = new WriteStream('tmp/file.json');
describe('tar', function() {
describe('#append', function() {
it('should append Buffer sources', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/buffer.tar');
testStream.on('close', function() {
assert.equal(testStream.digest, 'bc84fec33e7a4f6c8777cabd0beba503a7bce331');
actual = common.readJSON('tmp/file.json');
done();

@@ -77,46 +69,30 @@ });

archive
.append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate })
.file('test/fixtures/test.txt', { name: 'test.txt', date: testDate })
.file('test/fixtures/test.txt')
.finalize();
});
it('should append Stream sources', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/stream.tar');
it('should append filepath', function() {
assert.isArray(actual);
assert.propertyVal(actual[0], 'name', 'test.txt');
assert.propertyVal(actual[0], 'date', '2013-01-03T14:26:38.000Z');
assert.propertyVal(actual[0], 'crc32', 585446183);
assert.propertyVal(actual[0], 'size', 19);
});
testStream.on('close', function() {
assert.equal(testStream.digest, 'b3bf662968c87989431a25b2f699eae213392e82');
done();
});
archive.pipe(testStream);
archive
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
.finalize();
it('should fallback to filepath when no name is set', function() {
assert.isArray(actual);
assert.propertyVal(actual[1], 'name', 'test/fixtures/test.txt');
});
});
it('should append multiple sources', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/multiple.tar');
describe('#bulk', function() {
var actual;
testStream.on('close', function() {
assert.equal(testStream.digest, '0c4e2a79d0d2c41ae5eb2e1e70d315a617583e4d');
done();
});
before(function(done) {
var archive = archiver('json');
var testStream = new WriteStream('tmp/bulk.json');
archive.pipe(testStream);
archive
.append('string', { name: 'string.txt', date: testDate })
.append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate2 })
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
.finalize();
});
it('should use prefix for deep paths', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/feature-prefix.tar');
testStream.on('close', function() {
assert.equal(testStream.digest, 'c1efbfbdc9a49979a6e02b4009003de533fcda48');
actual = common.readJSON('tmp/bulk.json');
done();

@@ -127,28 +103,12 @@ });

var deepPath = 'vvmbtqhysigpregbdrc/pyqaznbelhppibmbykz/';
deepPath += 'qcbclwjhktiazmhnsjt/kpsgdfyfkarbvnlinrt/';
deepPath += 'holobndxfccyecblhcc/';
deepPath += deepPath;
archive
.append('deep path', { name: deepPath + 'file.txt', date: testDate })
.bulk([
{ expand: true, cwd: 'test/fixtures', src: 'directory/**' }
])
.finalize();
});
it('should append zero length sources', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/zerolength.tar');
testStream.on('close', function() {
assert.equal(testStream.digest, 'f4f7b53f8ee4c7124298695bffbacfa9e9c0a99f');
done();
});
archive.pipe(testStream);
archive
.append('', { name: 'string.txt', date: testDate })
.append(new Buffer(0), { name: 'buffer.txt', date: testDate })
.append(fs.createReadStream('test/fixtures/empty.txt'), { name: 'stream.txt', date: testDate })
.finalize();
it('should append multiple files', function() {
assert.isArray(actual);
assert.lengthOf(actual, 3);
});

@@ -160,14 +120,11 @@ });

describe('zip', function() {
describe('tar', function() {
describe('#append', function() {
it('should append Buffer sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/buffer.tar');
var testStream = new WriteHashStream('tmp/buffer.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, '9c14aaaab831cad774d0dfaf665ae6da8e33577c');
assert.equal(testStream.digest, 'bc84fec33e7a4f6c8777cabd0beba503a7bce331');
done();

@@ -184,10 +141,7 @@ });

it('should append Stream sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/stream.tar');
var testStream = new WriteHashStream('tmp/stream.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, 'd7e3970142a06d4a87fbd6458284eeaf8f5de907');
assert.equal(testStream.digest, 'b3bf662968c87989431a25b2f699eae213392e82');
done();

@@ -204,10 +158,7 @@ });

it('should append multiple sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/multiple.tar');
var testStream = new WriteHashStream('tmp/multiple.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, 'dac10ec60ee700ea07a90bca3e6d1a8db2670a9b');
assert.equal(testStream.digest, '0c4e2a79d0d2c41ae5eb2e1e70d315a617583e4d');
done();

@@ -221,16 +172,12 @@ });

.append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate2 })
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate2 })
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream-store.txt', date: testDate, store: true })
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate })
.finalize();
});
it('should support STORE for Buffer sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
it('should use prefix for deep paths', function(done) {
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/feature-prefix.tar');
var testStream = new WriteHashStream('tmp/buffer-store.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, '09305770a3272cbcd7c151ee267cb1b0075dd29e');
assert.equal(testStream.digest, 'c1efbfbdc9a49979a6e02b4009003de533fcda48');
done();

@@ -241,96 +188,18 @@ });

archive
.append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate, store: true })
.finalize();
});
var deepPath = 'vvmbtqhysigpregbdrc/pyqaznbelhppibmbykz/';
deepPath += 'qcbclwjhktiazmhnsjt/kpsgdfyfkarbvnlinrt/';
deepPath += 'holobndxfccyecblhcc/';
deepPath += deepPath;
it('should support STORE for Stream sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var testStream = new WriteHashStream('tmp/stream-store.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, '999f407f3796b551d91608349a06521b8f80f229');
done();
});
archive.pipe(testStream);
archive
.append(fs.createReadStream('test/fixtures/test.txt'), { name: 'stream.txt', date: testDate, store: true })
.append('deep path', { name: deepPath + 'file.txt', date: testDate })
.finalize();
});
it('should support archive and file comments', function(done) {
var archive = archiver.createZip({
comment: 'this is a zip comment',
forceUTC: true
});
var testStream = new WriteHashStream('tmp/comments.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, 'ea7911cbe2508682c2a17d30b366ac33527ba84f');
done();
});
archive.pipe(testStream);
archive
.append(binaryBuffer(20000), {name: 'buffer.txt', date: testDate, comment: 'this is a file comment'})
.finalize();
});
it('should STORE files when compression level is zero', function(done) {
var archive = archiver('zip', {
forceUTC: true,
zlib: {
level: 0
}
});
var testStream = new WriteHashStream('tmp/store-level0.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, '09305770a3272cbcd7c151ee267cb1b0075dd29e');
done();
});
archive.pipe(testStream);
archive
.append(binaryBuffer(20000), { name: 'buffer.txt', date: testDate })
.finalize();
});
it('should properly handle accented characters in filenames', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var testStream = new WriteHashStream('tmp/accentedchars-filenames.zip');
testStream.on('close', function() {
assert.equal(testStream.digest, '69194ccb7175d7fcfcb06c8cb0ed2c429dadb9f9');
done();
});
archive.pipe(testStream);
archive
.append(binaryBuffer(20000), { name: 'àáâãäçèéêëìíîïñòóôõöùúûüýÿ.txt', date: testDate })
.append(binaryBuffer(20000), { name: 'ÀÁÂÃÄÇÈÉÊËÌÍÎÏÑÒÓÔÕÖÙÚÛÜÝ.txt', date: testDate2 })
.finalize();
});
it('should append zero length sources', function(done) {
var archive = archiver('zip', {
forceUTC: true
});
var testStream = new WriteHashStream('tmp/zerolength.zip');
var archive = archiver('tar');
var testStream = new WriteHashStream('tmp/zerolength.tar');
testStream.on('close', function() {
assert.equal(testStream.digest, '638e64b5b5769d2ad989a153ace568a0279cf6b6');
assert.equal(testStream.digest, 'f4f7b53f8ee4c7124298695bffbacfa9e9c0a99f');
done();

@@ -337,0 +206,0 @@ });

@@ -8,4 +8,3 @@ /*global before,describe,it */

var tar = require('../lib/headers/tar');
var zip = require('../lib/headers/zip');
var tar = require('../lib/modules/tar/headers');

@@ -137,204 +136,2 @@ var testDate = new Date('Jan 03 2013 14:26:38 GMT');

describe('zip', function() {
var fileFixture = fs.readFileSync('test/fixtures/headers/zip-file.bin');
var fileDescriptorFixture = fs.readFileSync('test/fixtures/headers/zip-filedescriptor.bin');
var centralDirectoryFixture = fs.readFileSync('test/fixtures/headers/zip-centralheader.bin');
var centralFooterFixture = fs.readFileSync('test/fixtures/headers/zip-centralfooter.bin');
describe('#encode(type, object)', function() {
describe('type->file', function() {
var actual = zip.encode('file', {
name: 'test.txt',
filenameLength: 8,
date: testDate,
comment: '',
mode: null,
store: true,
lastModifiedDate: 1109619539,
versionMadeBy: 20,
versionNeededToExtract: 20,
flags: 2056,
compressionMethod: 0,
uncompressedSize: 0,
compressedSize: 0,
offset: 0
});
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, fileFixture);
});
});
describe('type->fileDescriptor', function() {
var actual = zip.encode('fileDescriptor', {
crc32: 585446183,
uncompressedSize: 19,
compressedSize: 19,
});
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, fileDescriptorFixture);
});
});
describe('type->centralDirectory', function() {
var actual = zip.encode('centralDirectory', {
name: 'test.txt',
filenameLength: 8,
date: testDate,
store: true,
comment: '',
mode: null,
lastModifiedDate: 1109619539,
versionMadeBy: 20,
versionNeededToExtract: 20,
flags: 2056,
compressionMethod: 0,
uncompressedSize: 19,
compressedSize: 19,
offset: 0,
crc32: 585446183
});
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, centralDirectoryFixture);
});
});
describe('type->centralFooter', function() {
var actual = zip.encode('centralFooter', {
directoryRecordsDisk: 1,
directoryRecords: 1,
centralDirectorySize: 56,
centralDirectoryOffset: 73,
comment: ''
});
it('should return an instance of Buffer', function() {
assert.instanceOf(actual, Buffer);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, centralFooterFixture);
});
});
});
describe('#decode(type, buffer)', function() {
describe('type->file', function() {
var actual = zip.decode('file', fileFixture);
it('should return an object', function() {
assert.isObject(actual);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, {
signature: 67324752,
versionNeededToExtract: 20,
flags: 2056,
compressionMethod: 0,
lastModifiedDate: 1109619539,
crc32: 0,
compressedSize: 0,
uncompressedSize: 0,
filenameLength: 8,
extraFieldLength: 0,
name: 'test.txt',
extraField: null
});
});
});
describe('type->fileDescriptor', function() {
var actual = zip.decode('fileDescriptor', fileDescriptorFixture);
it('should return an object', function() {
assert.isObject(actual);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, {
signature: 134695760,
crc32: 585446183,
uncompressedSize: 19,
compressedSize: 19,
});
});
});
describe('type->centralDirectory', function() {
var actual = zip.decode('centralDirectory', centralDirectoryFixture);
it('should return an object', function() {
assert.isObject(actual);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, {
signature: 33639248,
versionMadeBy: 20,
versionNeededToExtract: 20,
flags: 2056,
compressionMethod: 0,
lastModifiedDate: 1109619539,
crc32: 585446183,
compressedSize: 19,
uncompressedSize: 19,
filenameLength: 8,
extraFieldLength: 0,
commentLength: 0,
diskNumberStart: 0,
internalFileAttributes: 0,
externalFileAttributes: 0,
offset: 0,
name: 'test.txt',
extraField: null,
comment: null
});
});
});
describe('type->centralFooter', function() {
var actual = zip.decode('centralFooter', centralFooterFixture);
it('should return an object', function() {
assert.isObject(actual);
});
it('should match provided fixture', function() {
assert.deepEqual(actual, {
signature: 101010256,
diskNumber: 0,
diskNumberStart: 0,
directoryRecordsDisk: 1,
directoryRecords: 1,
centralDirectorySize: 56,
centralDirectoryOffset: 73,
commentLength: 0,
comment: null
});
});
});
});
});
});

@@ -7,3 +7,3 @@ var crypto = require('crypto');

var Readable = require('stream').Readable || require('readable-stream');
var Writable = require('stream').Writable || require('readable-stream/writable');
var Writable = require('stream').Writable || require('readable-stream').Writable;

@@ -65,2 +65,17 @@ function adjustDateByOffset(d, offset) {

function readJSON(filepath) {
var contents;
try {
contents = fs.readFileSync(String(filepath));
contents = JSON.parse(contents);
} catch(e) {
contents = null;
}
return contents;
}
module.exports.readJSON = readJSON;
function UnBufferedStream() {

@@ -67,0 +82,0 @@ this.readable = true;

@@ -12,3 +12,2 @@ /*global describe,it */

var ChecksumStream = require('../lib/util/ChecksumStream');
var DeflateRawChecksum = require('../lib/util/DeflateRawChecksum');
var crc32 = require('../lib/util/crc32');

@@ -20,4 +19,2 @@ var utils = require('../lib/util');

var testDate = new Date('Jan 03 2013 14:26:38 GMT');
var testDateDos = 1109607251;
var testDateDosUTC = 1109619539;
var testDateEpoch = 1357223198;

@@ -62,63 +59,30 @@ var testDateOctal = 12071312436;

describe('crc32', function() {
describe('CRC32', function() {
describe('crc32(data)', function() {
it('should initialize CRC32 instance based on data', function() {
var actual = crc32('testing checksum');
describe('#update(data)', function() {
it('should update crc32 based on data', function() {
var actual = crc32.createCRC32().update('testing checksum');
assert.equal(actual.crc, 323269802);
});
assert.equal(actual.crc, 323269802);
});
});
describe('#digest()', function() {
it('should return digest of crc32', function() {
var actual = crc32.createCRC32().update('testing checksum').digest();
describe('#update(data)', function() {
it('should update CRC32 based on data', function() {
var actual = crc32().update('testing checksum');
assert.equal(actual, -323269803);
});
assert.equal(actual.crc, 323269802);
});
});
describe('createCRC32()', function() {
it('should return an instance of CRC32', function() {
assert.instanceOf(crc32.createCRC32(), crc32.CRC32);
});
});
describe('#digest()', function() {
it('should return digest of CRC32', function() {
var actual = crc32().update('testing checksum').digest();
});
describe('DeflateRawChecksum', function() {
it('should checksum data while writing', function(done) {
var deflate = new DeflateRawChecksum();
deflate.on('end', function() {
assert.equal(deflate.digest, -270675091);
done();
assert.equal(actual, -323269803);
});
deflate.write(testBuffer);
deflate.end();
});
it('should calculate data size while writing', function(done) {
var deflate = new DeflateRawChecksum();
deflate.on('end', function() {
assert.equal(deflate.rawSize, 20000);
done();
});
deflate.write(testBuffer);
deflate.end();
});
});
describe('index', function() {

@@ -148,10 +112,2 @@

describe('convertDateTimeDos(input)', function() {
it('should convert DOS input into an instance of Date', function() {
var actual = adjustDateByOffset(utils.convertDateTimeDos(testDateDosUTC), testTimezoneOffset);
assert.deepEqual(actual, testDate);
});
});
describe('convertDateTimeEpoch(input)', function() {

@@ -185,3 +141,3 @@ it('should convert epoch input into an instance of Date', function() {

describe('defaults(object, source)', function() {
describe('defaults(object, source, guard)', function() {
it('should default when object key is missing', function() {

@@ -197,55 +153,4 @@ var actual = utils.defaults({ value1: true }, {

});
it('should default when object key contains null value', function() {
var actual = utils.defaults({ value1: null }, {
value1: true,
value2: true
});
assert.deepEqual(actual, {
value1: true,
value2: true
});
});
it('should not default when object value is zero', function() {
var actual = utils.defaults({ value1: 0 }, {
value1: 1
});
assert.deepEqual(actual, {
value1: 0
});
});
it('should support defaulting multiple levels', function() {
var actual = utils.defaults({
level1: {
value1: 0
}
}, {
level1: {
value2: 2
}
});
assert.deepEqual(actual, {
level1: {
value1: 0,
value2: 2
}
});
});
});
describe('dosDateTime(date, utc)', function() {
it.skip('should convert date into its DOS representation', function() {
assert.deepEqual(utils.dosDateTime(testDate), testDateDos);
});
it('should convert date (forcing UTC) into its DOS representation', function() {
assert.equal(utils.dosDateTime(testDate, true), testDateDosUTC);
});
});
describe('epochDateTime(date)', function() {

@@ -281,5 +186,7 @@ it('should convert date into its epoch representation', function() {

describe('sanitizeFilePath(filepath)', function() {
describe('sanitizePath(filepath)', function() {
it('should sanitize filepath', function() {
assert.equal(utils.sanitizeFilePath('\\this/path//file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('\\this/path//file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('/this/path/file.txt'), 'this/path/file.txt');
assert.equal(utils.sanitizePath('c:\\this\\path\\file.txt'), 'c/this/path/file.txt');
});

@@ -286,0 +193,0 @@ });

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc