Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

unzipper

Package Overview
Dependencies
Maintainers
1
Versions
76
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

unzipper - npm Package Compare versions

Comparing version 0.9.15 to 0.10.0

.circleci/config.yml

68

lib/extract.js

@@ -5,36 +5,18 @@ module.exports = Extract;

var Writer = require('fstream').Writer;
var util = require('util');
var path = require('path');
var stream = require('stream');
var duplexer2 = require('duplexer2');
var Promise = require('bluebird');
util.inherits(Extract, Parse);
function Extract (opts) {
if (!(this instanceof Extract))
return new Extract(opts);
var self = this;
var finishCb;
var pending = 0;
var _final = typeof this._final === 'function' ? this._final : undefined;
function checkFinished() {
if (pending === 0 && finishCb) {
_final ? _final(finishCb) : finishCb();
}
}
this._final = function(cb) {
finishCb = cb;
checkFinished();
};
// make sure path is normalized before using it
opts.path = path.normalize(opts.path);
Parse.call(self,opts);
var parser = new Parse(opts);
self.on('entry', function(entry) {
if (entry.type == 'Directory') return;
var outStream = new stream.Writable({objectMode: true});
outStream._write = function(entry, encoding, cb) {
if (entry.type == 'Directory') return cb();
// to avoid zip slip (writing outside of the destination), we resolve

@@ -45,3 +27,3 @@ // the target path, and make sure it's nested in the intended

if (extractPath.indexOf(opts.path) != 0) {
return;
return cb();
}

@@ -51,14 +33,26 @@

pending += 1;
entry.pipe(writer)
.on('error',function(e) {
self.emit('error',e);
pending -= 1;
checkFinished();
})
.on('close', function() {
pending -= 1;
checkFinished();
.on('error', cb)
.on('close', cb);
};
var extract = duplexer2(parser,outStream);
parser.once('crx-header', function(crxHeader) {
extract.crxHeader = crxHeader;
});
parser
.pipe(outStream)
.on('finish',function() {
extract.emit('close');
});
});
extract.promise = function() {
return new Promise(function(resolve, reject) {
extract.on('finish', resolve);
extract.on('error',reject);
});
};
return extract;
}

@@ -8,2 +8,4 @@ var binary = require('binary');

var Buffer = require('../Buffer');
var path = require('path');
var Writer = require('fstream').Writer;

@@ -13,17 +15,49 @@ var signature = Buffer.alloc(4);

module.exports = function centralDirectory(source) {
function getCrxHeader(source) {
var sourceStream = source.stream(0).pipe(PullStream());
return sourceStream.pull(4).then(function(data) {
var signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
var crxHeader;
return sourceStream.pull(12).then(function(data) {
crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
}).then(function() {
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
}).then(function(data) {
crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
});
}
});
}
module.exports = function centralDirectory(source, options) {
var endDir = PullStream(),
records = PullStream(),
self = this,
tailSize = (options && options.tailSize) || 80,
crxHeader,
vars;
if (options && options.crx)
crxHeader = getCrxHeader(source);
return source.size()
.then(function(size) {
source.stream(size-40).pipe(endDir);
source.stream(Math.max(0,size-tailSize)).pipe(endDir);
return endDir.pull(signature);
})
.then(function() {
return endDir.pull(22);
return Promise.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(data) {
.then(function(d) {
var data = d.directory;
var startOffset = d.crxHeader && d.crxHeader.size || 0;
vars = binary.parse(data)

@@ -40,4 +74,32 @@ .word32lu('signature')

vars.offsetToStartOfCentralDirectory += startOffset;
source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);
vars.extract = function(opts) {
if (!opts || !opts.path) throw new Error('PATH_MISSING');
return vars.files.then(function(files) {
return Promise.map(files, function(entry) {
if (entry.type == 'Directory') return;
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
var extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error',reject)
.pipe(writer)
.on('close',resolve)
.on('error',reject);
});
},{concurrency: opts.concurrency || 1});
});
};
vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() {

@@ -65,2 +127,4 @@ return records.pull(46).then(function(data) {

vars.offsetToLocalFileHeader += startOffset;
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {

@@ -78,4 +142,5 @@ vars.pathBuffer = fileNameBuffer;

vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File';
vars.stream = function(_password) {
return unzip(source, vars.offsetToLocalFileHeader,_password);
return unzip(source, vars.offsetToLocalFileHeader,_password, vars);
};

@@ -82,0 +147,0 @@ vars.buffer = function(_password) {

@@ -11,3 +11,3 @@ var fs = require('fs');

module.exports = {
buffer: function(buffer) {
buffer: function(buffer, options) {
var source = {

@@ -23,5 +23,5 @@ stream: function(offset, length) {

};
return directory(source);
return directory(source, options);
},
file: function(filename) {
file: function(filename, options) {
var source = {

@@ -42,16 +42,16 @@ stream: function(offset,length) {

};
return directory(source);
return directory(source, options);
},
url: function(request,opt) {
if (typeof opt === 'string')
opt = {url: opt};
if (!opt.url)
url: function(request, params, options) {
if (typeof params === 'string')
params = {url: params};
if (!params.url)
throw 'URL missing';
opt.headers = opt.headers || {};
params.headers = params.headers || {};
var source = {
stream : function(offset,length) {
var options = Object.create(opt);
options.headers = Object.create(opt.headers);
var options = Object.create(params);
options.headers = Object.create(params.headers);
options.headers.range = 'bytes='+offset+'-' + (length ? length : '');

@@ -62,3 +62,3 @@ return request(options);

return new Promise(function(resolve,reject) {
var req = request(opt);
var req = request(params);
req.on('response',function(d) {

@@ -72,6 +72,6 @@ req.abort();

return directory(source);
return directory(source, options);
},
s3 : function(client,params) {
s3 : function(client,params, options) {
var source = {

@@ -97,5 +97,4 @@ size: function() {

return directory(source);
return directory(source, options);
}
};

@@ -14,3 +14,3 @@ var Promise = require('bluebird');

module.exports = function unzip(source,offset,_password) {
module.exports = function unzip(source,offset,_password, directoryVars) {
var file = PullStream(),

@@ -48,2 +48,5 @@ entry = Stream.PassThrough(),

vars.extra = parseExtraField(extraField, vars);
// Ignore logal file header vars if the directory vars are available
if (directoryVars && directoryVars.compressedSize) vars = directoryVars;
if (vars.flags & 0x01) checkEncryption = file.pull(12)

@@ -50,0 +53,0 @@ .then(function(header) {

@@ -46,2 +46,5 @@ var util = require('util');

if (signature === 0x34327243) {
return self._readCrxHeader();
}
if (signature === 0x04034b50) {

@@ -67,2 +70,19 @@ return self._readFile();

Parse.prototype._readCrxHeader = function() {
var self = this;
return self.pull(12).then(function(data) {
self.crxHeader = binary.parse(data)
.word32lu('version')
.word32lu('pubKeyLength')
.word32lu('signatureLength')
.vars;
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
}).then(function(data) {
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
self.emit('crx-header',self.crxHeader);
return self._readRecord();
});
};
Parse.prototype._readFile = function () {

@@ -84,4 +104,6 @@ var self = this;

if (self.crxHeader) vars.crxHeader = self.crxHeader;
return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
fileName = fileNameBuffer.toString('utf8');
var fileName = fileNameBuffer.toString('utf8');
var entry = Stream.PassThrough();

@@ -88,0 +110,0 @@ var __autodraining = false;

{
"name": "unzipper",
"version": "0.9.15",
"version": "0.10.0",
"description": "Unzip cross-platform streaming API ",

@@ -5,0 +5,0 @@ "author": "Evan Oxfeld <eoxfeld@gmail.com>",

@@ -30,2 +30,4 @@ [![NPM Version][npm-image]][npm-url]

Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.
## Installation

@@ -197,3 +199,5 @@

### Open.file([path])
The last argument is optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size. Additionally you can supply option `crx: true` which will check for a crx header and parse the file accordingly by shifting all file offsets by the length of the crx header.
### Open.file([path], [options])
Returns a Promise to the central directory information with methods to extract individual files. `start` and `end` options are used to avoid reading the whole file.

@@ -218,3 +222,3 @@

### Open.url([requestLibrary], [url | options])
### Open.url([requestLibrary], [url | params], [options])
This function will return a Promise to the central directory information from a URL point to a zipfile. Range-headers are used to avoid reading the whole file. Unzipper does not ship with a request library so you will have to provide it as the first option.

@@ -261,3 +265,3 @@

### Open.s3([aws-sdk], [params])
### Open.s3([aws-sdk], [params], [options])
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instantiated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.

@@ -286,3 +290,3 @@

### Open.buffer(buffer)
### Open.buffer(buffer, [options])
If you already have the zip file in-memory as a buffer, you can open the contents directly.

@@ -305,3 +309,14 @@

### Open.[method].extract()
The directory object returned from `Open.[method]` provides an `extract` method which extracts all the files to a specified `path`, with an optional `concurrency` (default: 1).
Example (with concurrency of 5):
```js
unzip.Open.file('path/to/archive.zip')
.then(d => d.extract({path: '/extraction/path', concurrency: 5}));
```
## Licenses
See LICENCE
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc