Socket
Socket
Sign inDemoInstall

unzipper

Package Overview
Dependencies
Maintainers
1
Versions
76
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

unzipper - npm Package Compare versions

Comparing version 0.11.6 to 0.12.1

eslint.config.mjs

19

lib/BufferStream.js

@@ -1,20 +0,19 @@

var Promise = require('bluebird');
var Stream = require('stream');
const Stream = require('stream');
module.exports = function(entry) {
return new Promise(function(resolve,reject) {
var chunks = [];
var bufferStream = Stream.Transform()
.on('finish',function() {
return new Promise(function(resolve, reject) {
const chunks = [];
const bufferStream = Stream.Transform()
.on('finish', function() {
resolve(Buffer.concat(chunks));
})
.on('error',reject);
bufferStream._transform = function(d,e,cb) {
.on('error', reject);
bufferStream._transform = function(d, e, cb) {
chunks.push(d);
cb();
};
entry.on('error',reject)
entry.on('error', reject)
.pipe(bufferStream);
});
};

@@ -1,13 +0,18 @@

var bigInt = require('big-integer');
var Stream = require('stream');
const Int64 = require("node-int64");
let Stream = require("stream");
var table;
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require("readable-stream");
let table;
function generateTable() {
var poly = 0xEDB88320,c,n,k;
const poly = 0xEDB88320;
let c, n, k;
table = [];
for (n = 0; n < 256; n++) {
c = n;
for (k = 0; k < 8; k++)
c = (c & 1) ? poly ^ (c >>> 1) : c = c >>> 1;
for (k = 0; k < 8; k++) c = c & 1 ? poly ^ (c >>> 1) : (c = c >>> 1);
table[n] = c >>> 0;

@@ -17,42 +22,65 @@ }

function crc(ch,crc) {
if (!table)
generateTable();
function crc(ch, crc) {
if (!table) generateTable();
if (ch.charCodeAt)
ch = ch.charCodeAt(0);
if (ch.charCodeAt) ch = ch.charCodeAt(0);
return (bigInt(crc).shiftRight(8).and(0xffffff)).xor(table[bigInt(crc).xor(ch).and(0xff)]).value;
const l = (crc.readUInt32BE() >> 8) & 0xffffff;
const r = table[(crc.readUInt32BE() ^ (ch >>> 0)) & 0xff];
return (l ^ r) >>> 0;
}
function Decrypt() {
if (!(this instanceof Decrypt))
return new Decrypt();
function multiply(a, b) {
const ah = (a >> 16) & 0xffff;
const al = a & 0xffff;
const bh = (b >> 16) & 0xffff;
const bl = b & 0xffff;
const high = (ah * bl + al * bh) & 0xffff;
this.key0 = 305419896;
this.key1 = 591751049;
this.key2 = 878082192;
return ((high << 16) >>> 0) + al * bl;
}
Decrypt.prototype.update = function(h) {
this.key0 = crc(h,this.key0);
this.key1 = bigInt(this.key0).and(255).and(4294967295).add(this.key1)
this.key1 = bigInt(this.key1).multiply(134775813).add(1).and(4294967295).value;
this.key2 = crc(bigInt(this.key1).shiftRight(24).and(255), this.key2);
function Decrypt() {
if (!(this instanceof Decrypt)) return new Decrypt();
this.key0 = Buffer.allocUnsafe(4);
this.key1 = Buffer.allocUnsafe(4);
this.key2 = Buffer.allocUnsafe(4);
this.key0.writeUInt32BE(0x12345678, 0);
this.key1.writeUInt32BE(0x23456789, 0);
this.key2.writeUInt32BE(0x34567890, 0);
}
Decrypt.prototype.update = function (h) {
this.key0.writeUInt32BE(crc(h, this.key0));
this.key1.writeUInt32BE(
((this.key0.readUInt32BE() & 0xff & 0xFFFFFFFF) +
this.key1.readUInt32BE()) >>> 0
);
const x = new Int64(
(multiply(this.key1.readUInt32BE(), 134775813) + 1) & 0xFFFFFFFF
);
const b = Buffer.alloc(8);
x.copy(b, 0);
b.copy(this.key1, 0, 4, 8);
this.key2.writeUInt32BE(
crc(((this.key1.readUInt32BE() >> 24) & 0xff) >>> 0, this.key2)
);
};
Decrypt.prototype.decryptByte = function(c) {
var k = bigInt(this.key2).or(2);
c = c ^ bigInt(k).multiply(bigInt(k^1)).shiftRight(8).and(255);
Decrypt.prototype.decryptByte = function (c) {
const k = (this.key2.readUInt32BE() | 2) >>> 0;
c = c ^ ((multiply(k, (k ^ 1 >>> 0)) >> 8) & 0xff);
this.update(c);
return c;
};
Decrypt.prototype.stream = function() {
var stream = Stream.Transform(),
self = this;
stream._transform = function(d,e,cb) {
for (var i = 0; i<d.length;i++) {
Decrypt.prototype.stream = function () {
const stream = Stream.Transform(),
self = this;
stream._transform = function (d, e, cb) {
for (let i = 0; i < d.length; i++) {
d[i] = self.decryptByte(d[i]);

@@ -63,8 +91,6 @@ }

};
return stream;
};
module.exports = Decrypt;
module.exports = Extract;
var Parse = require('./parse');
var Writer = require('fstream').Writer;
var path = require('path');
var stream = require('stream');
var duplexer2 = require('duplexer2');
var Promise = require('bluebird');
const Parse = require('./parse');
const fs = require('fs-extra');
const path = require('path');
const stream = require('stream');
const duplexer2 = require('duplexer2');

@@ -14,15 +13,13 @@ function Extract (opts) {

var parser = new Parse(opts);
const parser = new Parse(opts);
var outStream = new stream.Writable({objectMode: true});
outStream._write = function(entry, encoding, cb) {
const outStream = new stream.Writable({objectMode: true});
outStream._write = async function(entry, encoding, cb) {
if (entry.type == 'Directory') return cb();
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
// NOTE: Need to normalize to forward slashes for UNIX OS's to properly
// NOTE: Need to normalize to forward slashes for UNIX OS's to properly
// ignore the zip slipped file entirely
var extractPath = path.join(opts.path, entry.path.replace(/\\/g, '/'));
const extractPath = path.join(opts.path, entry.path.replace(/\\/g, '/'));
if (extractPath.indexOf(opts.path) != 0) {

@@ -32,4 +29,12 @@ return cb();

const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
if (entry.type == 'Directory') {
await fs.ensureDir(extractPath);
return cb();
}
await fs.ensureDir(path.dirname(extractPath));
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : fs.createWriteStream(extractPath);
entry.pipe(writer)

@@ -40,3 +45,3 @@ .on('error', cb)

var extract = duplexer2(parser,outStream);
const extract = duplexer2(parser, outStream);
parser.once('crx-header', function(crxHeader) {

@@ -48,10 +53,10 @@ extract.crxHeader = crxHeader;

.pipe(outStream)
.on('finish',function() {
.on('finish', function() {
extract.emit('close');
});
extract.promise = function() {
return new Promise(function(resolve, reject) {
extract.on('close', resolve);
extract.on('error',reject);
extract.on('error', reject);
});

@@ -58,0 +63,0 @@ };

@@ -1,3 +0,3 @@

var Stream = require('stream');
var util = require('util');
const Stream = require('stream');
const util = require('util');
function NoopStream() {

@@ -10,6 +10,6 @@ if (!(this instanceof NoopStream)) {

util.inherits(NoopStream,Stream.Transform);
util.inherits(NoopStream, Stream.Transform);
NoopStream.prototype._transform = function(d,e,cb) { cb() ;};
NoopStream.prototype._transform = function(d, e, cb) { cb() ;};
module.exports = NoopStream;

@@ -1,21 +0,21 @@

var PullStream = require('../PullStream');
var unzip = require('./unzip');
var Promise = require('bluebird');
var BufferStream = require('../BufferStream');
var parseExtraField = require('../parseExtraField');
var path = require('path');
var Writer = require('fstream').Writer;
var parseDateTime = require('../parseDateTime');
var parseBuffer = require('../parseBuffer');
const PullStream = require('../PullStream');
const unzip = require('./unzip');
const BufferStream = require('../BufferStream');
const parseExtraField = require('../parseExtraField');
const path = require('path');
const fs = require('fs-extra');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
const Bluebird = require('bluebird');
var signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50,0);
const signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50, 0);
function getCrxHeader(source) {
var sourceStream = source.stream(0).pipe(PullStream());
const sourceStream = source.stream(0).pipe(PullStream());
return sourceStream.pull(4).then(function(data) {
var signature = data.readUInt32LE(0);
const signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
var crxHeader;
let crxHeader;
return sourceStream.pull(12).then(function(data) {

@@ -30,3 +30,3 @@ crxHeader = parseBuffer.parse(data, [

}).then(function(data) {
crxHeader.publicKey = data.slice(0,crxHeader.pubKeyLength);
crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);

@@ -42,3 +42,3 @@ crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;

function getZip64CentralDirectory(source, zip64CDL) {
var d64loc = parseBuffer.parse(zip64CDL, [
const d64loc = parseBuffer.parse(zip64CDL, [
['signature', 4],

@@ -54,6 +54,6 @@ ['diskNumber', 4],

var dir64 = PullStream();
const dir64 = PullStream();
source.stream(d64loc.offsetToStartOfCentralDirectory).pipe(dir64);
return dir64.pull(56)
return dir64.pull(56);
}

@@ -63,3 +63,3 @@

function parseZip64DirRecord (dir64record) {
var vars = parseBuffer.parse(dir64record, [
const vars = parseBuffer.parse(dir64record, [
['signature', 4],

@@ -81,13 +81,13 @@ ['sizeOfCentralDirectory', 8],

return vars
return vars;
}
module.exports = function centralDirectory(source, options) {
var endDir = PullStream(),
records = PullStream(),
tailSize = (options && options.tailSize) || 80,
sourceSize,
crxHeader,
startOffset,
vars;
const endDir = PullStream();
const records = PullStream();
const tailSize = (options && options.tailSize) || 80;
let sourceSize,
crxHeader,
startOffset,
vars;

@@ -101,4 +101,4 @@ if (options && options.crx)

source.stream(Math.max(0,size-tailSize))
.on('error', function (error) { endDir.emit('error', error) })
source.stream(Math.max(0, size-tailSize))
.on('error', function (error) { endDir.emit('error', error); })
.pipe(endDir);

@@ -109,6 +109,6 @@

.then(function() {
return Promise.props({directory: endDir.pull(22), crxHeader: crxHeader});
return Bluebird.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(d) {
var data = d.directory;
const data = d.directory;
startOffset = d.crxHeader && d.crxHeader.size || 0;

@@ -135,4 +135,4 @@

// Offset to zip64 CDL is 20 bytes before normal CDR
const zip64CDLSize = 20
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize)
const zip64CDLSize = 20;
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize);
const zip64CDLStream = PullStream();

@@ -143,6 +143,6 @@

return zip64CDLStream.pull(zip64CDLSize)
.then(function (d) { return getZip64CentralDirectory(source, d) })
.then(function (d) { return getZip64CentralDirectory(source, d); })
.then(function (dir64record) {
vars = parseZip64DirRecord(dir64record)
})
vars = parseZip64DirRecord(dir64record);
});
} else {

@@ -165,20 +165,26 @@ vars.offsetToStartOfCentralDirectory += startOffset;

return vars.files.then(function(files) {
return Promise.map(files, function(entry) {
if (entry.type == 'Directory') return;
return Bluebird.map(files, async function(entry) {
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
var extractPath = path.join(opts.path, entry.path);
const extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
var writer = opts.getWriter ? opts.getWriter({path: extractPath}) : Writer({ path: extractPath });
if (entry.type == 'Directory') {
await fs.ensureDir(extractPath);
return;
}
await fs.ensureDir(path.dirname(extractPath));
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : fs.createWriteStream(extractPath);
return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error',reject)
.on('error', reject)
.pipe(writer)
.on('close',resolve)
.on('error',reject);
.on('close', resolve)
.on('error', reject);
});

@@ -189,5 +195,5 @@ }, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 });

vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() {
return records.pull(46).then(function(data) {
var vars = vars = parseBuffer.parse(data, [
vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function() {
return records.pull(46).then(function(data) {
const vars = parseBuffer.parse(data, [
['signature', 4],

@@ -212,38 +218,38 @@ ['versionMadeBy', 2],

vars.offsetToLocalFileHeader += startOffset;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
vars.offsetToLocalFileHeader += startOffset;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
vars.isUnicode = (vars.flags & 0x800) != 0;
return records.pull(vars.extraFieldLength);
})
.then(function(extraField) {
vars.extra = parseExtraField(extraField, vars);
return records.pull(vars.fileCommentLength);
})
.then(function(comment) {
vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(vars.path)) ? 'Directory' : 'File';
var padding = options && options.padding || 1000;
vars.stream = function(_password) {
var totalSize = 30
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
vars.isUnicode = (vars.flags & 0x800) != 0;
return records.pull(vars.extraFieldLength);
})
.then(function(extraField) {
vars.extra = parseExtraField(extraField, vars);
return records.pull(vars.fileCommentLength);
})
.then(function(comment) {
vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path)) ? 'Directory' : 'File';
const padding = options && options.padding || 1000;
vars.stream = function(_password) {
const totalSize = 30
+ padding // add an extra buffer
+ (vars.extraFieldLength || 0)
+ (vars.extraFieldLength || 0)
+ (vars.fileNameLength || 0)
+ vars.compressedSize;
return unzip(source, vars.offsetToLocalFileHeader,_password, vars, totalSize);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
});
});
});
return Bluebird.props(vars);
});
return Promise.props(vars);
});
};

@@ -1,12 +0,11 @@

var fs = require('graceful-fs');
var Promise = require('bluebird');
var directory = require('./directory');
var Stream = require('stream');
const fs = require('graceful-fs');
const directory = require('./directory');
const Stream = require('stream');
module.exports = {
buffer: function(buffer, options) {
var source = {
const source = {
stream: function(offset, length) {
var stream = Stream.PassThrough();
var end = length ? offset + length : undefined;
const stream = Stream.PassThrough();
const end = length ? offset + length : undefined;
stream.end(buffer.slice(offset, end));

@@ -22,10 +21,10 @@ return stream;

file: function(filename, options) {
var source = {
stream: function(start,length) {
var end = length ? start + length : undefined;
return fs.createReadStream(filename,{start, end});
const source = {
stream: function(start, length) {
const end = length ? start + length : undefined;
return fs.createReadStream(filename, {start, end});
},
size: function() {
return new Promise(function(resolve,reject) {
fs.stat(filename,function(err,d) {
return new Promise(function(resolve, reject) {
fs.stat(filename, function(err, d) {
if (err)

@@ -49,6 +48,6 @@ reject(err);

var source = {
stream : function(offset,length) {
var options = Object.create(params);
var end = length ? offset + length : '';
const source = {
stream : function(offset, length) {
const options = Object.create(params);
const end = length ? offset + length : '';
options.headers = Object.create(params.headers);

@@ -59,5 +58,5 @@ options.headers.range = 'bytes='+offset+'-' + end;

size: function() {
return new Promise(function(resolve,reject) {
var req = request(params);
req.on('response',function(d) {
return new Promise(function(resolve, reject) {
const req = request(params);
req.on('response', function(d) {
req.abort();

@@ -68,3 +67,3 @@ if (!d.headers['content-length'])

resolve(d.headers['content-length']);
}).on('error',reject);
}).on('error', reject);
});

@@ -77,7 +76,7 @@ }

s3 : function(client,params, options) {
var source = {
s3 : function(client, params, options) {
const source = {
size: function() {
return new Promise(function(resolve,reject) {
client.headObject(params, function(err,d) {
return new Promise(function(resolve, reject) {
client.headObject(params, function(err, d) {
if (err)

@@ -90,7 +89,7 @@ reject(err);

},
stream: function(offset,length) {
var d = {};
for (var key in params)
stream: function(offset, length) {
const d = {};
for (const key in params)
d[key] = params[key];
var end = length ? offset + length : '';
const end = length ? offset + length : '';
d.Range = 'bytes='+offset+'-' + end;

@@ -97,0 +96,0 @@ return client.getObject(d).createReadStream();

@@ -1,15 +0,14 @@

var Promise = require('bluebird');
var Decrypt = require('../Decrypt');
var PullStream = require('../PullStream');
var Stream = require('stream');
var zlib = require('zlib');
var parseExtraField = require('../parseExtraField');
var parseDateTime = require('../parseDateTime');
var parseBuffer = require('../parseBuffer');
const Decrypt = require('../Decrypt');
const PullStream = require('../PullStream');
const Stream = require('stream');
const zlib = require('zlib');
const parseExtraField = require('../parseExtraField');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
module.exports = function unzip(source, offset, _password, directoryVars, length) {
var file = PullStream(),
entry = Stream.PassThrough();
const file = PullStream(),
entry = Stream.PassThrough();
var req = source.stream(offset, length);
const req = source.stream(offset, length);
req.pipe(file).on('error', function(e) {

@@ -21,3 +20,3 @@ entry.emit('error', e);

.then(function(data) {
var vars = parseBuffer.parse(data, [
let vars = parseBuffer.parse(data, [
['signature', 4],

@@ -44,3 +43,3 @@ ['versionsNeededToExtract', 2],

.then(function(extraField) {
var checkEncryption;
let checkEncryption;
vars.extra = parseExtraField(extraField, vars);

@@ -55,3 +54,3 @@ // Ignore logal file header vars if the directory vars are available

var decrypt = Decrypt();
const decrypt = Decrypt();

@@ -62,3 +61,3 @@ String(_password).split('').forEach(function(d) {

for (var i=0; i < header.length; i++)
for (let i=0; i < header.length; i++)
header[i] = decrypt.decryptByte(header[i]);

@@ -69,3 +68,3 @@

var check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
const check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
if (header[11] !== check)

@@ -79,3 +78,3 @@ throw new Error('BAD_PASSWORD');

.then(function() {
entry.emit('vars',vars);
entry.emit('vars', vars);
return vars;

@@ -86,40 +85,40 @@ });

entry.vars.then(function(vars) {
var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0,
eof;
entry.vars.then(function(vars) {
const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;
var inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();
const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
var stream = file.stream(eof);
let stream = file.stream(eof);
if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());
if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());
stream
.pipe(inflater)
.on('error',function(err) { entry.emit('error',err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy()
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
stream
.pipe(inflater)
.on('error', function(err) { entry.emit('error', err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy();
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
.catch(function(e) {
entry.emit('error',e);
entry.emit('error', e);
});

@@ -126,0 +125,0 @@

@@ -1,14 +0,13 @@

var util = require('util');
var zlib = require('zlib');
var Stream = require('stream');
var Promise = require('bluebird');
var PullStream = require('./PullStream');
var NoopStream = require('./NoopStream');
var BufferStream = require('./BufferStream');
var parseExtraField = require('./parseExtraField');
var parseDateTime = require('./parseDateTime');
var pipeline = Stream.pipeline;
var parseBuffer = require('./parseBuffer');
const util = require('util');
const zlib = require('zlib');
const Stream = require('stream');
const PullStream = require('./PullStream');
const NoopStream = require('./NoopStream');
const BufferStream = require('./BufferStream');
const parseExtraField = require('./parseExtraField');
const parseDateTime = require('./parseDateTime');
const pipeline = Stream.pipeline;
const parseBuffer = require('./parseBuffer');
var endDirectorySignature = Buffer.alloc(4);
const endDirectorySignature = Buffer.alloc(4);
endDirectorySignature.writeUInt32LE(0x06054b50, 0);

@@ -20,7 +19,7 @@

}
var self = this;
const self = this;
self._opts = opts || { verbose: false };
PullStream.call(self, self._opts);
self.on('finish',function() {
self.on('finish', function() {
self.emit('end');

@@ -31,3 +30,3 @@ self.emit('close');

if (!self.__emittedError || self.__emittedError !== e)
self.emit('error',e);
self.emit('error', e);
});

@@ -39,3 +38,3 @@ }

Parse.prototype._readRecord = function () {
var self = this;
const self = this;

@@ -46,3 +45,3 @@ return self.pull(4).then(function(data) {

var signature = data.readUInt32LE(0);
const signature = data.readUInt32LE(0);

@@ -65,3 +64,3 @@ if (signature === 0x34327243) {

// signature to be consumed so set includeEof=true
var includeEof = true;
const includeEof = true;
return self.pull(endDirectorySignature, includeEof).then(function() {

@@ -81,3 +80,3 @@ return self._readEndOfCentralDirectoryRecord();

Parse.prototype._readCrxHeader = function() {
var self = this;
const self = this;
return self.pull(12).then(function(data) {

@@ -91,5 +90,5 @@ self.crxHeader = parseBuffer.parse(data, [

}).then(function(data) {
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
self.crxHeader.publicKey = data.slice(0, self.crxHeader.pubKeyLength);
self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
self.emit('crx-header',self.crxHeader);
self.emit('crx-header', self.crxHeader);
return true;

@@ -100,5 +99,5 @@ });

Parse.prototype._readFile = function () {
var self = this;
const self = this;
return self.pull(26).then(function(data) {
var vars = parseBuffer.parse(data, [
const vars = parseBuffer.parse(data, [
['versionsNeededToExtract', 2],

@@ -121,13 +120,13 @@ ['flags', 2],

return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
var fileName = fileNameBuffer.toString('utf8');
var entry = Stream.PassThrough();
var __autodraining = false;
const fileName = fileNameBuffer.toString('utf8');
const entry = Stream.PassThrough();
let __autodraining = false;
entry.autodrain = function() {
__autodraining = true;
var draining = entry.pipe(NoopStream());
const draining = entry.pipe(NoopStream());
draining.promise = function() {
return new Promise(function(resolve, reject) {
draining.on('finish',resolve);
draining.on('error',reject);
draining.on('finish', resolve);
draining.on('error', reject);
});

@@ -149,3 +148,3 @@ };

};
entry.type = (vars.uncompressedSize === 0 && /[\/\\]$/.test(fileName)) ? 'Directory' : 'File';
entry.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(fileName)) ? 'Directory' : 'File';

@@ -165,3 +164,3 @@ if (self._opts.verbose) {

return self.pull(vars.extraFieldLength).then(function(extraField) {
var extra = parseExtraField(extraField, vars);
const extra = parseExtraField(extraField, vars);

@@ -187,7 +186,7 @@ entry.vars = vars;

var fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0,
eof;
const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;
entry.__autodraining = __autodraining; // expose __autodraining for test purposes
var inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : Stream.PassThrough();
entry.__autodraining = __autodraining; // expose __autodraining for test purposes
const inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : Stream.PassThrough();

@@ -214,3 +213,3 @@ if (fileSizeKnown) {

}
)
);
});

@@ -223,5 +222,5 @@ });

Parse.prototype._processDataDescriptor = function (entry) {
var self = this;
const self = this;
return self.pull(16).then(function(data) {
var vars = parseBuffer.parse(data, [
const vars = parseBuffer.parse(data, [
['dataDescriptorSignature', 4],

@@ -239,5 +238,5 @@ ['crc32', 4],

Parse.prototype._readCentralDirectoryFileHeader = function () {
var self = this;
const self = this;
return self.pull(42).then(function(data) {
var vars = parseBuffer.parse(data, [
const vars = parseBuffer.parse(data, [
['versionMadeBy', 2],

@@ -265,8 +264,8 @@ ['versionsNeededToExtract', 2],

})
.then(function(extraField) {
return self.pull(vars.fileCommentLength);
})
.then(function(fileComment) {
return true;
});
.then(function() {
return self.pull(vars.fileCommentLength);
})
.then(function() {
return true;
});
});

@@ -276,6 +275,6 @@ };

Parse.prototype._readEndOfCentralDirectoryRecord = function() {
var self = this;
const self = this;
return self.pull(18).then(function(data) {
var vars = parseBuffer.parse(data, [
const vars = parseBuffer.parse(data, [
['diskNumber', 2],

@@ -290,4 +289,3 @@ ['diskStart', 2],

return self.pull(vars.commentLength).then(function(comment) {
comment = comment.toString('utf8');
return self.pull(vars.commentLength).then(function() {
self.end();

@@ -301,6 +299,6 @@ self.push(null);

Parse.prototype.promise = function() {
var self = this;
return new Promise(function(resolve,reject) {
self.on('finish',resolve);
self.on('error',reject);
const self = this;
return new Promise(function(resolve, reject) {
self.on('finish', resolve);
self.on('error', reject);
});

@@ -307,0 +305,0 @@ };

const parseUIntLE = function(buffer, offset, size) {
var result;
let result;
switch(size) {

@@ -17,6 +17,6 @@ case 1:

default:
throw new Error('Unsupported UInt LE size!');
throw new Error('Unsupported UInt LE size!');
}
return result;
}
};

@@ -40,4 +40,4 @@ /**

const parse = function(buffer, format) {
var result = {}
var offset = 0;
const result = {};
let offset = 0;
for(const [key, size] of format) {

@@ -53,6 +53,6 @@ if(buffer.length >= offset + size) {

return result;
}
};
module.exports = {
parse
}
};

@@ -1,8 +0,8 @@

var parseBuffer = require('./parseBuffer');
const parseBuffer = require('./parseBuffer');
module.exports = function(extraField, vars) {
var extra;
let extra;
// Find the ZIP64 header, if present.
while(!extra && extraField && extraField.length) {
var candidateExtra = parseBuffer.parse(extraField, [
const candidateExtra = parseBuffer.parse(extraField, [
['signature', 2],

@@ -30,3 +30,3 @@ ['partsize', 2],

if (vars.uncompressedSize === 0xffffffff)
if (vars.uncompressedSize === 0xffffffff)
vars.uncompressedSize= extra.uncompressedSize;

@@ -33,0 +33,0 @@

@@ -1,14 +0,14 @@

var Stream = require('stream');
var Parse = require('./parse');
var duplexer2 = require('duplexer2');
var BufferStream = require('./BufferStream');
const Stream = require('stream');
const Parse = require('./parse');
const duplexer2 = require('duplexer2');
const BufferStream = require('./BufferStream');
function parseOne(match,opts) {
var inStream = Stream.PassThrough({objectMode:true});
var outStream = Stream.PassThrough();
var transform = Stream.Transform({objectMode:true});
var re = match instanceof RegExp ? match : (match && new RegExp(match));
var found;
function parseOne(match, opts) {
const inStream = Stream.PassThrough({objectMode:true});
const outStream = Stream.PassThrough();
const transform = Stream.Transform({objectMode:true});
const re = match instanceof RegExp ? match : (match && new RegExp(match));
let found;
transform._transform = function(entry,e,cb) {
transform._transform = function(entry, e, cb) {
if (found || (re && !re.exec(entry.path))) {

@@ -19,12 +19,12 @@ entry.autodrain();

found = true;
out.emit('entry',entry);
entry.on('error',function(e) {
outStream.emit('error',e);
out.emit('entry', entry);
entry.on('error', function(e) {
outStream.emit('error', e);
});
entry.pipe(outStream)
.on('error',function(err) {
.on('error', function(err) {
cb(err);
})
.on('finish',function(d) {
cb(null,d);
.on('finish', function(d) {
cb(null, d);
});

@@ -35,10 +35,10 @@ }

inStream.pipe(Parse(opts))
.on('error',function(err) {
outStream.emit('error',err);
.on('error', function(err) {
outStream.emit('error', err);
})
.pipe(transform)
.on('error',Object) // Silence error as its already addressed in transform
.on('finish',function() {
.on('error', Object) // Silence error as its already addressed in transform
.on('finish', function() {
if (!found)
outStream.emit('error',new Error('PATTERN_NOT_FOUND'));
outStream.emit('error', new Error('PATTERN_NOT_FOUND'));
else

@@ -48,3 +48,3 @@ outStream.end();

var out = duplexer2(inStream,outStream);
const out = duplexer2(inStream, outStream);
out.buffer = function() {

@@ -51,0 +51,0 @@ return BufferStream(outStream);

@@ -1,5 +0,4 @@

var Stream = require('stream');
var Promise = require('bluebird');
var util = require('util');
var strFunction = 'function';
const Stream = require('stream');
const util = require('util');
const strFunction = 'function';

@@ -10,15 +9,15 @@ function PullStream() {

Stream.Duplex.call(this,{decodeStrings:false, objectMode:true});
Stream.Duplex.call(this, {decodeStrings:false, objectMode:true});
this.buffer = Buffer.from('');
var self = this;
self.on('finish',function() {
const self = this;
self.on('finish', function() {
self.finished = true;
self.emit('chunk',false);
self.emit('chunk', false);
});
}
util.inherits(PullStream,Stream.Duplex);
util.inherits(PullStream, Stream.Duplex);
PullStream.prototype._write = function(chunk,e,cb) {
this.buffer = Buffer.concat([this.buffer,chunk]);
PullStream.prototype._write = function(chunk, e, cb) {
this.buffer = Buffer.concat([this.buffer, chunk]);
this.cb = cb;

@@ -31,9 +30,10 @@ this.emit('chunk');

// otherwise (i.e. buffer) it is interpreted as a pattern signaling end of stream
PullStream.prototype.stream = function(eof,includeEof) {
var p = Stream.PassThrough();
var done,self= this;
PullStream.prototype.stream = function(eof, includeEof) {
const p = Stream.PassThrough();
let done;
const self= this;
function cb() {
if (typeof self.cb === strFunction) {
var callback = self.cb;
const callback = self.cb;
self.cb = undefined;

@@ -45,6 +45,6 @@ return callback();

function pull() {
var packet;
let packet;
if (self.buffer && self.buffer.length) {
if (typeof eof === 'number') {
packet = self.buffer.slice(0,eof);
packet = self.buffer.slice(0, eof);
self.buffer = self.buffer.slice(eof);

@@ -54,17 +54,17 @@ eof -= packet.length;

} else {
var match = self.buffer.indexOf(eof);
let match = self.buffer.indexOf(eof);
if (match !== -1) {
// store signature match byte offset to allow us to reference
// this for zip64 offset
self.match = match
self.match = match;
if (includeEof) match = match + eof.length;
packet = self.buffer.slice(0,match);
packet = self.buffer.slice(0, match);
self.buffer = self.buffer.slice(match);
done = true;
} else {
var len = self.buffer.length - eof.length;
const len = self.buffer.length - eof.length;
if (len <= 0) {
cb();
} else {
packet = self.buffer.slice(0,len);
packet = self.buffer.slice(0, len);
self.buffer = self.buffer.slice(len);

@@ -74,16 +74,16 @@ }

}
if (packet) p.write(packet,function() {
if (packet) p.write(packet, function() {
if (self.buffer.length === 0 || (eof.length && self.buffer.length <= eof.length)) cb();
});
}
if (!done) {
if (self.finished) {
self.removeListener('chunk',pull);
self.removeListener('chunk', pull);
self.emit('error', new Error('FILE_ENDED'));
return;
}
} else {
self.removeListener('chunk',pull);
self.removeListener('chunk', pull);
p.end();

@@ -93,3 +93,3 @@ }

self.on('chunk',pull);
self.on('chunk', pull);
pull();

@@ -99,3 +99,3 @@ return p;

PullStream.prototype.pull = function(eof,includeEof) {
PullStream.prototype.pull = function(eof, includeEof) {
if (eof === 0) return Promise.resolve('');

@@ -106,3 +106,3 @@

if (!isNaN(eof) && this.buffer.length > eof) {
var data = this.buffer.slice(0,eof);
const data = this.buffer.slice(0, eof);
this.buffer = this.buffer.slice(eof);

@@ -113,14 +113,14 @@ return Promise.resolve(data);

// Otherwise we stream until we have it
var buffer = Buffer.from(''),
self = this;
let buffer = Buffer.from('');
const self = this;
var concatStream = Stream.Transform();
concatStream._transform = function(d,e,cb) {
buffer = Buffer.concat([buffer,d]);
const concatStream = new Stream.Transform();
concatStream._transform = function(d, e, cb) {
buffer = Buffer.concat([buffer, d]);
cb();
};
var rejectHandler;
var pullStreamRejectHandler;
return new Promise(function(resolve,reject) {
let rejectHandler;
let pullStreamRejectHandler;
return new Promise(function(resolve, reject) {
rejectHandler = reject;

@@ -130,16 +130,16 @@ pullStreamRejectHandler = function(e) {

reject(e);
}
};
if (self.finished)
return reject(new Error('FILE_ENDED'));
self.once('error',pullStreamRejectHandler); // reject any errors from pullstream itself
self.stream(eof,includeEof)
.on('error',reject)
self.once('error', pullStreamRejectHandler); // reject any errors from pullstream itself
self.stream(eof, includeEof)
.on('error', reject)
.pipe(concatStream)
.on('finish',function() {resolve(buffer);})
.on('error',reject);
.on('finish', function() {resolve(buffer);})
.on('error', reject);
})
.finally(function() {
self.removeListener('error',rejectHandler);
self.removeListener('error',pullStreamRejectHandler);
});
.finally(function() {
self.removeListener('error', rejectHandler);
self.removeListener('error', pullStreamRejectHandler);
});
};

@@ -146,0 +146,0 @@

{
"name": "unzipper",
"version": "0.11.6",
"version": "0.12.1",
"description": "Unzip cross-platform streaming API ",

@@ -26,11 +26,14 @@ "author": "Evan Oxfeld <eoxfeld@gmail.com>",

"dependencies": {
"big-integer": "^1.6.17",
"bluebird": "~3.4.1",
"duplexer2": "~0.1.4",
"fstream": "^1.0.12",
"graceful-fs": "^4.2.2"
"fs-extra": "^11.2.0",
"graceful-fs": "^4.2.2",
"node-int64": "^0.4.0"
},
"devDependencies": {
"aws-sdk": "^2.77.0",
"@eslint/js": "^9.2.0",
"aws-sdk": "^2.1636.0",
"dirdiff": ">= 0.0.1 < 1",
"eslint": "^9.2.0",
"globals": "^15.2.0",
"iconv-lite": "^0.4.24",

@@ -57,4 +60,4 @@ "request": "^2.88.0",

"scripts": {
"test": "npx tap test/*.js --coverage-report=html"
"test": "npx tap test/*.js --coverage-report=html --reporter=dot"
}
}

@@ -16,6 +16,208 @@ [![NPM Version][npm-image]][npm-url]

This is an active fork and drop-in replacement of the [node-unzip](https://github.com/EvanOxfeld/node-unzip) and addresses the following issues:
## Installation
```bash
$ npm install unzipper
```
## Open methods
The open methods allow random access to the underlying files of a zip archive, from disk or from the web, s3 or a custom source.
The open methods return a promise on the contents of the central directory of a zip file, with individual `files` listed in an array.
Each file record has the following methods, providing random access to the underlying files:
* `stream([password])` - returns a stream of the unzipped content which can be piped to any destination
* `buffer([password])` - returns a promise on the buffered content of the file.
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike `adm-zip` the Open methods will never read the entire zipfile into buffer.
The last argument to the `Open` methods is an optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size. Additionally you can supply option `crx: true` which will check for a crx header and parse the file accordingly by shifting all file offsets by the length of the crx header.
### Open.file([path], [options])
Returns a Promise to the central directory information with methods to extract individual files. `start` and `end` options are used to avoid reading the whole file.
Here is a simple example of opening up a zip file, printing out the directory information and then extracting the first file inside the zipfile to disk:
```js
async function main() {
const directory = await unzipper.Open.file('path/to/archive.zip');
console.log('directory', directory);
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
If you want to extract all files from the zip file, the directory object supplies an extract method. Here is a quick example:
```js
async function main() {
const directory = await unzipper.Open.file('path/to/archive.zip');
await directory.extract({ path: '/path/to/destination' })
}
```
### Open.url([requestLibrary], [url | params], [options])
This function will return a Promise to the central directory information from a URL point to a zipfile. Range-headers are used to avoid reading the whole file. Unzipper does not ship with a request library so you will have to provide it as the first option.
Live Example: (extracts a tiny xml file from the middle of a 500MB zipfile)
```js
const request = require('request');
const unzipper = require('./unzip');
async function main() {
const directory = await unzipper.Open.url(request,'http://www2.census.gov/geo/tiger/TIGER2015/ZCTA5/tl_2015_us_zcta510.zip');
const file = directory.files.find(d => d.path === 'tl_2015_us_zcta510.shp.iso.xml');
const content = await file.buffer();
console.log(content.toString());
}
main();
```
This function takes a second parameter which can either be a string containing the `url` to request, or an `options` object to invoke the supplied `request` library with. This can be used when other request options are required, such as custom headers or authentication to a third party service.
```js
const request = require('google-oauth-jwt').requestWithJWT();
const googleStorageOptions = {
url: `https://www.googleapis.com/storage/v1/b/m-bucket-name/o/my-object-name`,
qs: { alt: 'media' },
jwt: {
email: google.storage.credentials.client_email,
key: google.storage.credentials.private_key,
scopes: ['https://www.googleapis.com/auth/devstorage.read_only']
}
});
async function getFile(req, res, next) {
const directory = await unzipper.Open.url(request, googleStorageOptions);
const file = zip.files.find((file) => file.path === 'my-filename');
return file.stream().pipe(res);
});
```
### Open.s3([aws-sdk], [params], [options])
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instantiated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.
Example:
```js
const unzipper = require('./unzip');
const AWS = require('aws-sdk');
const s3Client = AWS.S3(config);
async function main() {
const directory = await unzipper.Open.s3(s3Client,{Bucket: 'unzipper', Key: 'archive.zip'});
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
### Open.buffer(buffer, [options])
If you already have the zip file in-memory as a buffer, you can open the contents directly.
Example:
```js
// never use readFileSync - only used here to simplify the example
const buffer = fs.readFileSync('path/to/arhive.zip');
async function main() {
const directory = await unzipper.Open.buffer(buffer);
console.log('directory',directory);
// ...
}
main();
```
### Open.custom(source, [options])
This function can be used to provide a custom source implementation. The source parameter expects a `stream` and a `size` function to be implemented. The size function should return a `Promise` that resolves the total size of the file. The stream function should return a `Readable` stream according to the supplied offset and length parameters.
Example:
```js
// Custom source implementation for reading a zip file from Google Cloud Storage
const { Storage } = require('@google-cloud/storage');
async function main() {
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
const zipFile = bucket.file('my-zip-file.zip');
const customSource = {
stream: function(offset, length) {
return zipFile.createReadStream({
start: offset,
end: length && offset + length
})
},
size: async function() {
const objMetadata = (await zipFile.getMetadata())[0];
return objMetadata.size;
}
};
const directory = await unzipper.Open.custom(customSource);
console.log('directory', directory);
// ...
}
main();
```
### Open.[method].extract()
The directory object returned from `Open.[method]` provides an `extract` method which extracts all the files to a specified `path`, with an optional `concurrency` (default: 1).
Example (with concurrency of 5):
```js
unzip.Open.file('path/to/archive.zip')
.then(d => d.extract({path: '/extraction/path', concurrency: 5}));
```
Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)
Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`).
## Streaming an entire zip file (legacy)
This library began as an active fork and drop-in replacement of the [node-unzip](https://github.com/EvanOxfeld/node-unzip) to address the following issues:
* finish/close events are not always triggered, particular when the input stream is slower than the receivers
* Any files are buffered into memory before passing on to entry
Originally the only way to use the library was to stream the entire zip file. This method is inefficient if you are only interested in selected files from the zip files. Additionally this method can be error prone since it relies on the local file headers which could be wrong.
The structure of this fork is similar to the original, but uses Promises and inherit guarantees provided by node streams to ensure low memory footprint and emits finish/close events at the end of processing. The new `Parser` will push any parsed `entries` downstream if you pipe from it, while still supporting the legacy `entry` event as well.

@@ -28,14 +230,2 @@

Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)
Chrome extension files (.crx) are zipfiles with an [extra header](http://www.adambarth.com/experimental/crx/docs/crx.html) at the start of the file. Unzipper will parse .crx file with the streaming methods (`Parse` and `ParseOne`). The `Open` methods will check for `crx` headers and parse crx files, but only if you provide `crx: true` in options.
## Installation
```bash
$ npm install unzipper
```
## Quick Examples
### Extract to a directory

@@ -208,162 +398,3 @@ ```js

## Open
Previous methods rely on the entire zipfile being received through a pipe. The Open methods load take a different approach: load the central directory first (at the end of the zipfile) and provide the ability to pick and choose which zipfiles to extract, even extracting them in parallel. The open methods return a promise on the contents of the directory, with individual `files` listed in an array. Each file element has the following methods:
* `stream([password])` - returns a stream of the unzipped content which can be piped to any destination
* `buffer([password])` - returns a promise on the buffered content of the file.
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike `adm-zip` the Open methods will never read the entire zipfile into buffer.
The last argument is optional `options` object where you can specify `tailSize` (default 80 bytes), i.e. how many bytes should we read at the end of the zipfile to locate the endOfCentralDirectory. This location can be variable depending on zip64 extensible data sector size. Additionally you can supply option `crx: true` which will check for a crx header and parse the file accordingly by shifting all file offsets by the length of the crx header.
### Open.file([path], [options])
Returns a Promise to the central directory information with methods to extract individual files. `start` and `end` options are used to avoid reading the whole file.
Example:
```js
async function main() {
const directory = await unzipper.Open.file('path/to/archive.zip');
console.log('directory', directory);
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
### Open.url([requestLibrary], [url | params], [options])
This function will return a Promise to the central directory information from a URL point to a zipfile. Range-headers are used to avoid reading the whole file. Unzipper does not ship with a request library so you will have to provide it as the first option.
Live Example: (extracts a tiny xml file from the middle of a 500MB zipfile)
```js
const request = require('request');
const unzipper = require('./unzip');
async function main() {
const directory = await unzipper.Open.url(request,'http://www2.census.gov/geo/tiger/TIGER2015/ZCTA5/tl_2015_us_zcta510.zip');
const file = directory.files.find(d => d.path === 'tl_2015_us_zcta510.shp.iso.xml');
const content = await file.buffer();
console.log(content.toString());
}
main();
```
This function takes a second parameter which can either be a string containing the `url` to request, or an `options` object to invoke the supplied `request` library with. This can be used when other request options are required, such as custom headers or authentication to a third party service.
```js
const request = require('google-oauth-jwt').requestWithJWT();
const googleStorageOptions = {
url: `https://www.googleapis.com/storage/v1/b/m-bucket-name/o/my-object-name`,
qs: { alt: 'media' },
jwt: {
email: google.storage.credentials.client_email,
key: google.storage.credentials.private_key,
scopes: ['https://www.googleapis.com/auth/devstorage.read_only']
}
});
async function getFile(req, res, next) {
const directory = await unzipper.Open.url(request, googleStorageOptions);
const file = zip.files.find((file) => file.path === 'my-filename');
return file.stream().pipe(res);
});
```
### Open.s3([aws-sdk], [params], [options])
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instantiated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.
Example:
```js
const unzipper = require('./unzip');
const AWS = require('aws-sdk');
const s3Client = AWS.S3(config);
async function main() {
const directory = await unzipper.Open.s3(s3Client,{Bucket: 'unzipper', Key: 'archive.zip'});
return new Promise( (resolve, reject) => {
directory.files[0]
.stream()
.pipe(fs.createWriteStream('firstFile'))
.on('error',reject)
.on('finish',resolve)
});
}
main();
```
### Open.buffer(buffer, [options])
If you already have the zip file in-memory as a buffer, you can open the contents directly.
Example:
```js
// never use readFileSync - only used here to simplify the example
const buffer = fs.readFileSync('path/to/arhive.zip');
async function main() {
const directory = await unzipper.Open.buffer(buffer);
console.log('directory',directory);
// ...
}
main();
```
### Open.custom(source, [options])
This function can be used to provide a custom source implementation. The source parameter expects a `stream` and a `size` function to be implemented. The size function should return a `Promise` that resolves the total size of the file. The stream function should return a `Readable` stream according to the supplied offset and length parameters.
Example:
```js
// Custom source implementation for reading a zip file from Google Cloud Storage
const { Storage } = require('@google-cloud/storage');
async function main() {
const storage = new Storage();
const bucket = storage.bucket('my-bucket');
const zipFile = bucket.file('my-zip-file.zip');
const customSource = {
stream: function(offset, length) {
return zipFile.createReadStream({
start: offset,
end: length && offset + length
})
},
size: async function() {
const objMetadata = (await zipFile.getMetadata())[0];
return objMetadata.size;
}
};
const directory = await unzipper.Open.custom(customSource);
console.log('directory', directory);
// ...
}
main();
```
### Open.[method].extract()
The directory object returned from `Open.[method]` provides an `extract` method which extracts all the files to a specified `path`, with an optional `concurrency` (default: 1).
Example (with concurrency of 5):
```js
unzip.Open.file('path/to/archive.zip')
.then(d => d.extract({path: '/extraction/path', concurrency: 5}));
```
## Licenses
See LICENCE
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc