Socket
Socket
Sign inDemoInstall

unzipper

Package Overview
Dependencies
Maintainers
1
Versions
76
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

unzipper - npm Package Compare versions

Comparing version 0.9.13 to 0.9.14

31

lib/PullStream.js

@@ -37,4 +37,12 @@ var Stream = require('stream');

var p = Stream.PassThrough();
var count = 0,done,needmore,packet,self= this;
var done,packet,self= this;
function cb() {
if (typeof self.cb === strFunction) {
var callback = self.cb;
self.cb = undefined;
return callback();
}
}
function pull() {

@@ -56,19 +64,12 @@ if (self.buffer && self.buffer.length) {

var len = self.buffer.length - eof.length;
if (len < 0) {
len = self.buffer.length;
needmore = true;
if (len <= 0) {
cb();
} else {
packet = self.buffer.slice(0,len);
self.buffer = self.buffer.slice(len);
}
packet = self.buffer.slice(0,len);
self.buffer = self.buffer.slice(len);
}
}
p.write(packet,function() {
if ((self.buffer.length === (eof.length || 0) || needmore) &&
typeof self.cb === strFunction &&
packet.length !== 0
) {
var cb = self.cb;
delete self.cb;
cb();
}
if (packet) p.write(packet,function() {
if (self.buffer.length === 0 || (eof.length && self.buffer.length <= eof.length)) cb();
});

@@ -75,0 +76,0 @@ }

{
"name": "unzipper",
"version": "0.9.13",
"version": "0.9.14",
"description": "Unzip cross-platform streaming API ",

@@ -5,0 +5,0 @@ "author": "Evan Oxfeld <eoxfeld@gmail.com>",

@@ -21,3 +21,3 @@ [![NPM Version][npm-image]][npm-url]

The stucture of this fork is similar to the original, but uses Promises and inherit guarantees provided by node streams to ensure low memory footprint and guarantee finish/close events at the end of processing. The new `Parser` will push any parsed `entries` downstream if you pipe from it, while still supporting the legacy `entry` event as well.
The structure of this fork is similar to the original, but uses Promises and inherit guarantees provided by node streams to ensure low memory footprint and emits finish/close events at the end of processing. The new `Parser` will push any parsed `entries` downstream if you pipe from it, while still supporting the legacy `entry` event as well.

@@ -27,3 +27,3 @@ Breaking changes: The new `Parser` will not automatically drain entries if there are no listeners or pipes in place.

Unzipper provides simple APIs similar to [node-tar](https://github.com/isaacs/node-tar) for parsing and extracting zip files.
There are no added compiled dependencies - inflation is handled by node.js's built in zlib support.
There are no added compiled dependencies - inflation is handled by node.js's built in zlib support.

@@ -46,3 +46,3 @@ Please note: Methods that use the Central Directory instead of parsing entire file can be found under [`Open`](#open)

Extract emits the 'close' event once the zip's contents have been fully extracted to disk. Extract uses [fstream.Writer](https://www.npmjs.com/package/fstream) and therefore needs need an absolute path to the destination directory. This directory will be automatically created if it doesn't already exits.
Extract emits the 'close' event once the zip's contents have been fully extracted to disk. `Extract` uses [fstream.Writer](https://www.npmjs.com/package/fstream) and therefore needs need an absolute path to the destination directory. This directory will be automatically created if it doesn't already exits.

@@ -57,3 +57,3 @@ ### Parse zip file contents

```
```js
// If you want to handle autodrain errors you can either:

@@ -72,5 +72,5 @@ entry.autodrain().catch(e => handleError);

.on('entry', function (entry) {
var fileName = entry.path;
var type = entry.type; // 'Directory' or 'File'
var size = entry.vars.uncompressedSize; // There is also compressedSize;
const fileName = entry.path;
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {

@@ -95,5 +95,5 @@ entry.pipe(fs.createWriteStream('output/path'));

transform: function(entry,e,cb) {
var fileName = entry.path;
var type = entry.type; // 'Directory' or 'File'
var size = entry.vars.uncompressedSize; // There is also compressedSize;
const fileName = entry.path;
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "this IS the file I'm looking for") {

@@ -124,3 +124,3 @@ entry.pipe(fs.createWriteStream('output/path'))

}))
```

@@ -142,3 +142,3 @@

While the recommended strategy of consuming the unzipped contents is using streams, it is sometimes convenient to be able to get the full buffered contents of each file . Each `entry` provides a `.buffer` function that consumes the entry by buffering the contents into memory and returning a promise to the complete buffer.
While the recommended strategy of consuming the unzipped contents is using streams, it is sometimes convenient to be able to get the full buffered contents of each file . Each `entry` provides a `.buffer` function that consumes the entry by buffering the contents into memory and returning a promise to the complete buffer.

@@ -161,3 +161,3 @@ ```js

The parser emits `finish` and `error` events like any other stream. The parser additionally provides a promise wrapper around those two events to allow easy folding into existing Promise based structures.
The parser emits `finish` and `error` events like any other stream. The parser additionally provides a promise wrapper around those two events to allow easy folding into existing Promise-based structures.

@@ -180,3 +180,3 @@ Example:

```js
var il = require('iconv-lite');
const il = require('iconv-lite');
fs.createReadStream('path/to/archive.zip')

@@ -186,7 +186,7 @@ .pipe(unzipper.Parse())

// if some legacy zip tool follow ZIP spec then this flag will be set
var isUnicode = entry.props.flags.isUnicode;
const isUnicode = entry.props.flags.isUnicode;
// decode "non-unicode" filename from OEM Cyrillic character set
var fileName = isUnicode ? entry.path : il.decode(entry.props.pathBuffer, 'cp866');
var type = entry.type; // 'Directory' or 'File'
var size = entry.vars.uncompressedSize; // There is also compressedSize;
const fileName = isUnicode ? entry.path : il.decode(entry.props.pathBuffer, 'cp866');
const type = entry.type; // 'Directory' or 'File'
const size = entry.vars.uncompressedSize; // There is also compressedSize;
if (fileName === "Текстовый файл.txt") {

@@ -204,4 +204,4 @@ entry.pipe(fs.createWriteStream(fileName));

* `buffer([password])` - returns a promise on the buffered content of the file)
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike adm-zip the Open methods will never read the entire zipfile into buffer.
If the file is encrypted you will have to supply a password to decrypt, otherwise you can leave blank.
Unlike `adm-zip` the Open methods will never read the entire zipfile into buffer.

@@ -234,4 +234,4 @@ ### Open.file([path])

```js
var request = require('request');
var unzipper = require('./unzip');
const request = require('request');
const unzipper = require('./unzip');

@@ -272,3 +272,3 @@ async function main() {

### Open.s3([aws-sdk], [params])
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instanciated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.
This function will return a Promise to the central directory information from a zipfile on S3. Range-headers are used to avoid reading the whole file. Unzipper does not ship with with the aws-sdk so you have to provide an instantiated client as first arguments. The params object requires `Bucket` and `Key` to fetch the correct file.

@@ -278,5 +278,5 @@ Example:

```js
var unzipper = require('./unzip');
var AWS = require('aws-sdk');
var s3Client = AWS.S3(config);
const unzipper = require('./unzip');
const AWS = require('aws-sdk');
const s3Client = AWS.S3(config);

@@ -304,3 +304,3 @@ async function main() {

// never use readFileSync - only used here to simplify the example
var buffer = fs.readFileSync('path/to/arhive.zip');
const buffer = fs.readFileSync('path/to/arhive.zip');

@@ -307,0 +307,0 @@ async function main() {

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc