node-stream-zip
Advanced tools
Comparing version 0.1.0 to 0.2.0
@@ -13,3 +13,3 @@ /** | ||
events = require('events'), | ||
zlib = require('zlib') | ||
zlib = require('zlib'), | ||
stream = require('stream'); | ||
@@ -100,2 +100,3 @@ | ||
FLG_MSK : 4096, // mask header values | ||
FLG_ENTRY_ENC : 1, | ||
@@ -215,2 +216,4 @@ /* 4.5 Extensible data fields */ | ||
op.chunkSize *= 2; | ||
if (pos <= minPos) | ||
return that.emit('error', 'Bad archive'); | ||
var expandLength = Math.min(op.chunkSize, pos - minPos); | ||
@@ -304,8 +307,12 @@ op.win.expandLeft(expandLength, readCentralDirectoryCallback); | ||
var entryStream = new EntryDataReaderStream(fd, offset, entry.compressedSize); | ||
if (entry.method === consts.STORED) { | ||
callback(null, entryStream); | ||
} else if (entry.method === consts.DEFLATED) { | ||
callback(null, entryStream.pipe(zlib.createInflateRaw())); | ||
if (entry.encrypted) { | ||
return callback('Entry encrypted'); | ||
} else { | ||
callback('Unknown compression method: ' + entry.method); | ||
if (entry.method === consts.STORED) { | ||
callback(null, entryStream); | ||
} else if (entry.method === consts.DEFLATED || entry.method === consts.ENHANCED_DEFLATED) { | ||
callback(null, entryStream.pipe(zlib.createInflateRaw())); | ||
} else { | ||
callback('Unknown compression method: ' + entry.method); | ||
} | ||
} | ||
@@ -323,6 +330,29 @@ } catch (ex) { | ||
} else { | ||
stm.on('end', function () { | ||
that.emit('extract', entry, outPath); | ||
callback(); | ||
}).pipe(fs.createWriteStream(outPath)); | ||
var fsStm, errThrown; | ||
stm.on('error', function(err) { | ||
errThrown = err; | ||
if (fsStm) { | ||
stm.unpipe(fsStm); | ||
fsStm.close(function () { | ||
callback(err); | ||
}); | ||
} | ||
}); | ||
fs.open(outPath, 'w', function(err, fdFile) { | ||
if (err) | ||
return callback(err || errThrown); | ||
if (errThrown) { | ||
fs.close(fd, function() { | ||
callback(errThrown); | ||
}); | ||
return; | ||
} | ||
fsStm = fs.createWriteStream(outPath, { fd: fdFile }); | ||
fsStm.on('finish', function() { | ||
that.emit('extract', entry, outPath); | ||
if (!errThrown) | ||
callback(); | ||
}); | ||
stm.pipe(fsStm); | ||
}); | ||
} | ||
@@ -349,3 +379,5 @@ }); | ||
var targetPath = path.join(baseDir, file.name.replace(baseRelPath, '')); | ||
extract(file, targetPath, function () { | ||
extract(file, targetPath, function (err) { | ||
if (err) | ||
return callback(err, extractedCount); | ||
extractFiles(baseDir, baseRelPath, files, callback, extractedCount + 1); | ||
@@ -356,19 +388,27 @@ }); | ||
this.extract = function(entry, outPath, callback) { | ||
var entryName = entry || ''; | ||
if (typeof entry === 'string') { | ||
entry = this.entry(entry); | ||
if (!entry) | ||
return callback('Entry not found'); | ||
if (entry) { | ||
entryName = entry.name; | ||
} else { | ||
if (entryName.length && entryName[entryName.length - 1] !== '/') | ||
entryName += '/'; | ||
} | ||
} | ||
if (entry.isDirectory) { | ||
var files = [], dirs = []; | ||
if (!entry || entry.isDirectory) { | ||
var files = [], dirs = [], allDirs = {}; | ||
for (var e in entries) { | ||
if (Object.prototype.hasOwnProperty.call(entries, e) && e.lastIndexOf(entry.name, 0) === 0) { | ||
var relPath = e.replace(entry.name, ''); | ||
if (Object.prototype.hasOwnProperty.call(entries, e) && e.lastIndexOf(entryName, 0) === 0) { | ||
var relPath = e.replace(entryName, ''); | ||
var childEntry = entries[e]; | ||
if (childEntry.isDirectory) { | ||
var parts = relPath.split('/').filter(function(f) { return f; }); | ||
if (childEntry.isFile) { | ||
files.push(childEntry); | ||
relPath = path.dirname(relPath); | ||
} | ||
if (relPath && !allDirs[relPath] && relPath[0] !== '.') { | ||
allDirs[relPath] = true; | ||
var parts = relPath.split('/').filter(function (f) { return f; }); | ||
if (parts.length) | ||
dirs.push(parts); | ||
} else { | ||
files.push(childEntry); | ||
} | ||
@@ -383,6 +423,6 @@ } | ||
else | ||
extractFiles(outPath, entry.name, files, callback, 0); | ||
extractFiles(outPath, entryName, files, callback, 0); | ||
}); | ||
} else { | ||
extractFiles(outPath, entry.name, files, callback, 0); | ||
extractFiles(outPath, entryName, files, callback, 0); | ||
} | ||
@@ -490,7 +530,7 @@ } else { | ||
// uncompressed file crc-32 value | ||
this.crc = data.readUInt32LE(consts.LOCCRC); | ||
this.crc = data.readUInt32LE(consts.LOCCRC) || this.crc; | ||
// compressed size | ||
this.compressedSize = data.readUInt32LE(consts.LOCSIZ); | ||
this.compressedSize = data.readUInt32LE(consts.LOCSIZ) || this.compressedSize; | ||
// uncompressed size | ||
this.size = data.readUInt32LE(consts.LOCLEN); | ||
this.size = data.readUInt32LE(consts.LOCLEN) || this.size; | ||
// filename length | ||
@@ -506,3 +546,2 @@ this.fnameLen = data.readUInt16LE(consts.LOCNAM); | ||
this.isDirectory = (lastChar == 47) || (lastChar == 92); | ||
this.isFile = !this.isDirectory; | ||
@@ -544,2 +583,10 @@ if (this.extraLen) { | ||
Object.defineProperty(ZipEntry.prototype, 'encrypted', { | ||
get: function() { return (this.flags & consts.FLG_ENTRY_ENC) == consts.FLG_ENTRY_ENC; } | ||
}); | ||
Object.defineProperty(ZipEntry.prototype, 'isFile', { | ||
get: function() { return !this.isDirectory; } | ||
}); | ||
// endregion | ||
@@ -546,0 +593,0 @@ |
{ | ||
"name": "node-stream-zip", | ||
"version": "0.1.0", | ||
"description": "node.js library for reading and extraction of ZIP archives", | ||
"keywords": [ | ||
"zip", | ||
"archive", | ||
"unzip", | ||
"stream" | ||
], | ||
"homepage": "https://github.com/antelle/node-stream-zip", | ||
"author": "Antelle <antelle.net@gmail.com> (https://github.com/antelle)", | ||
"bugs": { | ||
"email": "antelle.net@gmail.com", | ||
"url": "https://github.com/antelle/node-stream-zip/issues" | ||
}, | ||
"licenses": [ | ||
{ | ||
"type": "MIT", | ||
"url": "https://github.com/antelle/node-stream-zip/blob/master/MIT-LICENSE.txt" | ||
} | ||
], | ||
"files": [ | ||
"node-stream-zip.js" | ||
], | ||
"main": "node-stream-zip.js", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/antelle/node-stream-zip.git" | ||
}, | ||
"engines": { | ||
"node": ">=0.10.0" | ||
"name": "node-stream-zip", | ||
"version": "0.2.0", | ||
"description": "node.js library for reading and extraction of ZIP archives", | ||
"keywords": [ | ||
"zip", | ||
"archive", | ||
"unzip", | ||
"stream" | ||
], | ||
"homepage": "https://github.com/antelle/node-stream-zip", | ||
"author": "Antelle <antelle.net@gmail.com> (https://github.com/antelle)", | ||
"bugs": { | ||
"email": "antelle.net@gmail.com", | ||
"url": "https://github.com/antelle/node-stream-zip/issues" | ||
}, | ||
"licenses": [ | ||
{ | ||
"type": "MIT", | ||
"url": "https://github.com/antelle/node-stream-zip/blob/master/MIT-LICENSE.txt" | ||
} | ||
], | ||
"files": [ | ||
"node-stream-zip.js" | ||
], | ||
"scripts": { | ||
"test": "nodeunit test/tests.js" | ||
}, | ||
"main": "node-stream-zip.js", | ||
"repository": { | ||
"type": "git", | ||
"url": "https://github.com/antelle/node-stream-zip.git" | ||
}, | ||
"engines": { | ||
"node": ">=0.10.0" | ||
}, | ||
"devDependencies": {} | ||
} |
@@ -8,11 +8,8 @@ # node-stream-zip | ||
- large archives support | ||
- all operations are non-blocking | ||
- all operations are non-blocking, no sync i/o | ||
- fast initialization | ||
- no dependencies or binary addons | ||
- decompression with built-in zlib streams | ||
- no dependencies, no binary addons | ||
- decompression with built-in zlib module | ||
- deflate, deflate64, sfx, macosx/windows built-in archives | ||
# Warning: alpha version | ||
The project is in active development for now, approx stable release with tests and bugfixes in March'15. | ||
# Installation | ||
@@ -25,3 +22,3 @@ | ||
```javascript | ||
var StreamZip = require('./node-stream-zip.js'); | ||
var StreamZip = require('node-stream-zip'); | ||
var zip = new StreamZip({ | ||
@@ -31,5 +28,5 @@ file: 'archive.zip', | ||
}); | ||
zip.on('error', function(err) { console.error('ERROR: ' + err); }); | ||
zip.on('error', function(err) { /*handle*/ }); | ||
zip.on('ready', function() { | ||
// console.log('Loaded. Entries read: ' + zip.entriesCount); | ||
console.log('Entries read: ' + zip.entriesCount); | ||
// stream to stdout | ||
@@ -39,18 +36,37 @@ zip.stream('node/benchmark/net/tcp-raw-c2s.js', function(err, stm) { | ||
}); | ||
// stream to file | ||
zip.extract('node/benchmark/net/tcp-raw-c2s.js', 'd:/temp/', function(err) { | ||
// extract file | ||
zip.extract('node/benchmark/net/tcp-raw-c2s.js', './temp/', function(err) { | ||
console.log('Entry extracted'); | ||
}); | ||
// stream folder | ||
zip.extract('node/benchmark/', 'd:/temp/ext', function(err, count) { | ||
// extract folder | ||
zip.extract('node/benchmark/', './temp/', function(err, count) { | ||
console.log('Extracted ' + count + ' entries'); | ||
}); | ||
// extract all | ||
zip.extract(null, './temp/', function(err, count) { | ||
console.log('Extracted ' + count + ' entries'); | ||
}); | ||
}); | ||
zip.on('extract', function(entry, file) { | ||
console.log('extract', entry.name, file); | ||
console.log('Extracted ' + entry.name + ' to ' + file); | ||
}); | ||
zip.on('entry', function(entry) { | ||
// called on load, when entry description has been read | ||
// you can already stream this entry, without waiting until all entry descriptions are read (suitable for very large archives) | ||
console.log('Read entry ', entry.name); | ||
}); | ||
``` | ||
# Building | ||
The project doesn't require building. To run unit tests with [nodeunit](https://github.com/caolan/nodeunit): | ||
`$ npm test` | ||
# Known issues | ||
- [utf8](https://github.com/rubyzip/rubyzip/wiki/Files-with-non-ascii-filenames) file names | ||
- AES encrypted files | ||
# Contributors | ||
ZIP parsing code has been forked from [adm-zip](https://github.com/cthackers/adm-zip) | ||
ZIP parsing code has been partially forked from [cthackers/adm-zip](https://github.com/cthackers/adm-zip) (MIT license). |
29102
635
68