Comparing version 3.0.0 to 3.1.0
344
index.js
@@ -16,5 +16,8 @@ var fs = require("fs"); | ||
exports.dosDateTimeToDate = dosDateTimeToDate; | ||
exports.getFileNameLowLevel = getFileNameLowLevel; | ||
exports.validateFileName = validateFileName; | ||
exports.parseExtraFields = parseExtraFields; | ||
exports.ZipFile = ZipFile; | ||
exports.Entry = Entry; | ||
exports.LocalFileHeader = LocalFileHeader; | ||
exports.RandomAccessReader = RandomAccessReader; | ||
@@ -115,3 +118,3 @@ | ||
// found eocdr | ||
var eocdrBuffer = buffer.slice(i); | ||
var eocdrBuffer = buffer.subarray(i); | ||
@@ -139,4 +142,4 @@ // 0 - End of central directory signature = 0x06054b50 | ||
// the encoding is always cp437. | ||
var comment = decodeStrings ? decodeBuffer(eocdrBuffer, 22, eocdrBuffer.length, false) | ||
: eocdrBuffer.slice(22); | ||
var comment = decodeStrings ? decodeBuffer(eocdrBuffer.subarray(22), false) | ||
: eocdrBuffer.subarray(22); | ||
@@ -303,30 +306,28 @@ if (!(entryCount === 0xffff || centralDirectoryOffset === 0xffffffff)) { | ||
// 46 - File name | ||
var isUtf8 = (entry.generalPurposeBitFlag & 0x800) !== 0; | ||
entry.fileName = self.decodeStrings ? decodeBuffer(buffer, 0, entry.fileNameLength, isUtf8) | ||
: buffer.slice(0, entry.fileNameLength); | ||
entry.fileNameRaw = buffer.subarray(0, entry.fileNameLength); | ||
// 46+n - Extra field | ||
var fileCommentStart = entry.fileNameLength + entry.extraFieldLength; | ||
var extraFieldBuffer = buffer.slice(entry.fileNameLength, fileCommentStart); | ||
entry.extraFields = []; | ||
var i = 0; | ||
while (i < extraFieldBuffer.length - 3) { | ||
var headerId = extraFieldBuffer.readUInt16LE(i + 0); | ||
var dataSize = extraFieldBuffer.readUInt16LE(i + 2); | ||
var dataStart = i + 4; | ||
var dataEnd = dataStart + dataSize; | ||
if (dataEnd > extraFieldBuffer.length) return emitErrorAndAutoClose(self, new Error("extra field length exceeds extra field buffer size")); | ||
var dataBuffer = newBuffer(dataSize); | ||
extraFieldBuffer.copy(dataBuffer, 0, dataStart, dataEnd); | ||
entry.extraFields.push({ | ||
id: headerId, | ||
data: dataBuffer, | ||
}); | ||
i = dataEnd; | ||
entry.extraFieldRaw = buffer.subarray(entry.fileNameLength, fileCommentStart); | ||
// 46+n+m - File comment | ||
entry.fileCommentRaw = buffer.subarray(fileCommentStart, fileCommentStart + entry.fileCommentLength); | ||
// Parse the extra fields, which we need for processing other fields. | ||
try { | ||
entry.extraFields = parseExtraFields(entry.extraFieldRaw); | ||
} catch (err) { | ||
return emitErrorAndAutoClose(self, err); | ||
} | ||
// 46+n+m - File comment | ||
entry.fileComment = self.decodeStrings ? decodeBuffer(buffer, fileCommentStart, fileCommentStart + entry.fileCommentLength, isUtf8) | ||
: buffer.slice(fileCommentStart, fileCommentStart + entry.fileCommentLength); | ||
// compatibility hack for https://github.com/thejoshwolfe/yauzl/issues/47 | ||
// Interpret strings according to bit flags, extra fields, and options. | ||
if (self.decodeStrings) { | ||
var isUtf8 = (entry.generalPurposeBitFlag & 0x800) !== 0; | ||
entry.fileComment = decodeBuffer(entry.fileCommentRaw, isUtf8); | ||
entry.fileName = getFileNameLowLevel(entry.generalPurposeBitFlag, entry.fileNameRaw, entry.extraFields, self.strictFileNames); | ||
var errorMessage = validateFileName(entry.fileName); | ||
if (errorMessage != null) return emitErrorAndAutoClose(self, new Error(errorMessage)); | ||
} else { | ||
entry.fileComment = entry.fileCommentRaw; | ||
entry.fileName = entry.fileNameRaw; | ||
} | ||
// Maintain API compatibility. See https://github.com/thejoshwolfe/yauzl/issues/47 | ||
entry.comment = entry.fileComment; | ||
@@ -381,32 +382,2 @@ | ||
// check for Info-ZIP Unicode Path Extra Field (0x7075) | ||
// see https://github.com/thejoshwolfe/yauzl/issues/33 | ||
if (self.decodeStrings) { | ||
for (var i = 0; i < entry.extraFields.length; i++) { | ||
var extraField = entry.extraFields[i]; | ||
if (extraField.id === 0x7075) { | ||
if (extraField.data.length < 6) { | ||
// too short to be meaningful | ||
continue; | ||
} | ||
// Version 1 byte version of this extra field, currently 1 | ||
if (extraField.data.readUInt8(0) !== 1) { | ||
// > Changes may not be backward compatible so this extra | ||
// > field should not be used if the version is not recognized. | ||
continue; | ||
} | ||
// NameCRC32 4 bytes File Name Field CRC32 Checksum | ||
var oldNameCrc32 = extraField.data.readUInt32LE(1); | ||
if (crc32.unsigned(buffer.slice(0, entry.fileNameLength)) !== oldNameCrc32) { | ||
// > If the CRC check fails, this UTF-8 Path Extra Field should be | ||
// > ignored and the File Name field in the header should be used instead. | ||
continue; | ||
} | ||
// UnicodeName Variable UTF-8 version of the entry File Name | ||
entry.fileName = decodeBuffer(extraField.data, 5, extraField.data.length, true); | ||
break; | ||
} | ||
} | ||
} | ||
// validate file size | ||
@@ -425,10 +396,2 @@ if (self.validateEntrySizes && entry.compressionMethod === 0) { | ||
if (self.decodeStrings) { | ||
if (!self.strictFileNames) { | ||
// allow backslash | ||
entry.fileName = entry.fileName.replace(/\\/g, "/"); | ||
} | ||
var errorMessage = validateFileName(entry.fileName, self.validateFileNameOptions); | ||
if (errorMessage != null) return emitErrorAndAutoClose(self, new Error(errorMessage)); | ||
} | ||
self.emit("entry", entry); | ||
@@ -448,2 +411,5 @@ | ||
callback = options; | ||
options = null; | ||
} | ||
if (options == null) { | ||
options = {}; | ||
@@ -496,3 +462,76 @@ } else { | ||
} | ||
// make sure we don't lose the fd before we open the actual read stream | ||
var decompress; | ||
if (entry.compressionMethod === 0) { | ||
// 0 - The file is stored (no compression) | ||
decompress = false; | ||
} else if (entry.compressionMethod === 8) { | ||
// 8 - The file is Deflated | ||
decompress = options.decompress != null ? options.decompress : true; | ||
} else { | ||
return callback(new Error("unsupported compression method: " + entry.compressionMethod)); | ||
} | ||
self.readLocalFileHeader(entry, {minimal: true}, function(err, localFileHeader) { | ||
if (err) return callback(err); | ||
self.openReadStreamLowLevel( | ||
localFileHeader.fileDataStart, entry.compressedSize, | ||
relativeStart, relativeEnd, | ||
decompress, entry.uncompressedSize, | ||
callback); | ||
}); | ||
}; | ||
ZipFile.prototype.openReadStreamLowLevel = function(fileDataStart, compressedSize, relativeStart, relativeEnd, decompress, uncompressedSize, callback) { | ||
var self = this; | ||
var fileDataEnd = fileDataStart + compressedSize; | ||
var readStream = self.reader.createReadStream({ | ||
start: fileDataStart + relativeStart, | ||
end: fileDataStart + relativeEnd, | ||
}); | ||
var endpointStream = readStream; | ||
if (decompress) { | ||
var destroyed = false; | ||
var inflateFilter = zlib.createInflateRaw(); | ||
readStream.on("error", function(err) { | ||
// setImmediate here because errors can be emitted during the first call to pipe() | ||
setImmediate(function() { | ||
if (!destroyed) inflateFilter.emit("error", err); | ||
}); | ||
}); | ||
readStream.pipe(inflateFilter); | ||
if (self.validateEntrySizes) { | ||
endpointStream = new AssertByteCountStream(uncompressedSize); | ||
inflateFilter.on("error", function(err) { | ||
// forward zlib errors to the client-visible stream | ||
setImmediate(function() { | ||
if (!destroyed) endpointStream.emit("error", err); | ||
}); | ||
}); | ||
inflateFilter.pipe(endpointStream); | ||
} else { | ||
// the zlib filter is the client-visible stream | ||
endpointStream = inflateFilter; | ||
} | ||
// this is part of yauzl's API, so implement this function on the client-visible stream | ||
installDestroyFn(endpointStream, function() { | ||
destroyed = true; | ||
if (inflateFilter !== endpointStream) inflateFilter.unpipe(endpointStream); | ||
readStream.unpipe(inflateFilter); | ||
// TODO: the inflateFilter may cause a memory leak. see Issue #27. | ||
readStream.destroy(); | ||
}); | ||
} | ||
callback(null, endpointStream); | ||
}; | ||
ZipFile.prototype.readLocalFileHeader = function(entry, options, callback) { | ||
var self = this; | ||
if (callback == null) { | ||
callback = options; | ||
options = null; | ||
} | ||
if (options == null) options = {}; | ||
self.reader.ref(); | ||
@@ -508,78 +547,54 @@ var buffer = newBuffer(30); | ||
} | ||
// all this should be redundant | ||
var fileNameLength = buffer.readUInt16LE(26); | ||
var extraFieldLength = buffer.readUInt16LE(28); | ||
var fileDataStart = entry.relativeOffsetOfLocalHeader + 30 + fileNameLength + extraFieldLength; | ||
// We now have enough information to do this bounds check. | ||
if (fileDataStart + entry.compressedSize > self.fileSize) { | ||
return callback(new Error("file data overflows file bounds: " + | ||
fileDataStart + " + " + entry.compressedSize + " > " + self.fileSize)); | ||
} | ||
if (options.minimal) { | ||
return callback(null, {fileDataStart: fileDataStart}); | ||
} | ||
var localFileHeader = new LocalFileHeader(); | ||
localFileHeader.fileDataStart = fileDataStart; | ||
// 4 - Version needed to extract (minimum) | ||
localFileHeader.versionNeededToExtract = buffer.readUInt16LE(4); | ||
// 6 - General purpose bit flag | ||
localFileHeader.generalPurposeBitFlag = buffer.readUInt16LE(6); | ||
// 8 - Compression method | ||
localFileHeader.compressionMethod = buffer.readUInt16LE(8); | ||
// 10 - File last modification time | ||
localFileHeader.lastModFileTime = buffer.readUInt16LE(10); | ||
// 12 - File last modification date | ||
localFileHeader.lastModFileDate = buffer.readUInt16LE(12); | ||
// 14 - CRC-32 | ||
localFileHeader.crc32 = buffer.readUInt32LE(14); | ||
// 18 - Compressed size | ||
localFileHeader.compressedSize = buffer.readUInt32LE(18); | ||
// 22 - Uncompressed size | ||
localFileHeader.uncompressedSize = buffer.readUInt32LE(22); | ||
// 26 - File name length (n) | ||
var fileNameLength = buffer.readUInt16LE(26); | ||
localFileHeader.fileNameLength = fileNameLength; | ||
// 28 - Extra field length (m) | ||
var extraFieldLength = buffer.readUInt16LE(28); | ||
localFileHeader.extraFieldLength = extraFieldLength; | ||
// 30 - File name | ||
// 30+n - Extra field | ||
var localFileHeaderEnd = entry.relativeOffsetOfLocalHeader + buffer.length + fileNameLength + extraFieldLength; | ||
var decompress; | ||
if (entry.compressionMethod === 0) { | ||
// 0 - The file is stored (no compression) | ||
decompress = false; | ||
} else if (entry.compressionMethod === 8) { | ||
// 8 - The file is Deflated | ||
decompress = options.decompress != null ? options.decompress : true; | ||
} else { | ||
return callback(new Error("unsupported compression method: " + entry.compressionMethod)); | ||
} | ||
var fileDataStart = localFileHeaderEnd; | ||
var fileDataEnd = fileDataStart + entry.compressedSize; | ||
if (entry.compressedSize !== 0) { | ||
// bounds check now, because the read streams will probably not complain loud enough. | ||
// since we're dealing with an unsigned offset plus an unsigned size, | ||
// we only have 1 thing to check for. | ||
if (fileDataEnd > self.fileSize) { | ||
return callback(new Error("file data overflows file bounds: " + | ||
fileDataStart + " + " + entry.compressedSize + " > " + self.fileSize)); | ||
buffer = newBuffer(fileNameLength + extraFieldLength); | ||
self.reader.ref(); | ||
readAndAssertNoEof(self.reader, buffer, 0, buffer.length, entry.relativeOffsetOfLocalHeader + 30, function(err) { | ||
try { | ||
if (err) return callback(err); | ||
localFileHeader.fileName = buffer.subarray(0, fileNameLength); | ||
localFileHeader.extraField = buffer.subarray(fileNameLength); | ||
return callback(null, localFileHeader); | ||
} finally { | ||
self.reader.unref(); | ||
} | ||
} | ||
var readStream = self.reader.createReadStream({ | ||
start: fileDataStart + relativeStart, | ||
end: fileDataStart + relativeEnd, | ||
}); | ||
var endpointStream = readStream; | ||
if (decompress) { | ||
var destroyed = false; | ||
var inflateFilter = zlib.createInflateRaw(); | ||
readStream.on("error", function(err) { | ||
// setImmediate here because errors can be emitted during the first call to pipe() | ||
setImmediate(function() { | ||
if (!destroyed) inflateFilter.emit("error", err); | ||
}); | ||
}); | ||
readStream.pipe(inflateFilter); | ||
if (self.validateEntrySizes) { | ||
endpointStream = new AssertByteCountStream(entry.uncompressedSize); | ||
inflateFilter.on("error", function(err) { | ||
// forward zlib errors to the client-visible stream | ||
setImmediate(function() { | ||
if (!destroyed) endpointStream.emit("error", err); | ||
}); | ||
}); | ||
inflateFilter.pipe(endpointStream); | ||
} else { | ||
// the zlib filter is the client-visible stream | ||
endpointStream = inflateFilter; | ||
} | ||
// this is part of yauzl's API, so implement this function on the client-visible stream | ||
installDestroyFn(endpointStream, function() { | ||
destroyed = true; | ||
if (inflateFilter !== endpointStream) inflateFilter.unpipe(endpointStream); | ||
readStream.unpipe(inflateFilter); | ||
// TODO: the inflateFilter may cause a memory leak. see Issue #27. | ||
readStream.destroy(); | ||
}); | ||
} | ||
callback(null, endpointStream); | ||
} finally { | ||
@@ -603,2 +618,5 @@ self.reader.unref(); | ||
function LocalFileHeader() { | ||
} | ||
function dosDateTimeToDate(date, time) { | ||
@@ -617,2 +635,46 @@ var day = date & 0x1f; // 1-31 | ||
function getFileNameLowLevel(generalPurposeBitFlag, fileNameBuffer, extraFields, strictFileNames) { | ||
var fileName = null; | ||
// check for Info-ZIP Unicode Path Extra Field (0x7075) | ||
// see https://github.com/thejoshwolfe/yauzl/issues/33 | ||
for (var i = 0; i < extraFields.length; i++) { | ||
var extraField = extraFields[i]; | ||
if (extraField.id === 0x7075) { | ||
if (extraField.data.length < 6) { | ||
// too short to be meaningful | ||
continue; | ||
} | ||
// Version 1 byte version of this extra field, currently 1 | ||
if (extraField.data.readUInt8(0) !== 1) { | ||
// > Changes may not be backward compatible so this extra | ||
// > field should not be used if the version is not recognized. | ||
continue; | ||
} | ||
// NameCRC32 4 bytes File Name Field CRC32 Checksum | ||
var oldNameCrc32 = extraField.data.readUInt32LE(1); | ||
if (crc32.unsigned(fileNameBuffer) !== oldNameCrc32) { | ||
// > If the CRC check fails, this UTF-8 Path Extra Field should be | ||
// > ignored and the File Name field in the header should be used instead. | ||
continue; | ||
} | ||
// UnicodeName Variable UTF-8 version of the entry File Name | ||
fileName = decodeBuffer(extraField.data.subarray(5), true); | ||
break; | ||
} | ||
} | ||
if (fileName == null) { | ||
// The typical case. | ||
var isUtf8 = (generalPurposeBitFlag & 0x800) !== 0; | ||
fileName = decodeBuffer(fileNameBuffer, isUtf8); | ||
} | ||
if (!strictFileNames) { | ||
// Allow backslash. | ||
fileName = fileName.replace(/\\/g, "/"); | ||
} | ||
return fileName; | ||
} | ||
function validateFileName(fileName) { | ||
@@ -632,2 +694,21 @@ if (fileName.indexOf("\\") !== -1) { | ||
function parseExtraFields(extraFieldBuffer) { | ||
var extraFields = []; | ||
var i = 0; | ||
while (i < extraFieldBuffer.length - 3) { | ||
var headerId = extraFieldBuffer.readUInt16LE(i + 0); | ||
var dataSize = extraFieldBuffer.readUInt16LE(i + 2); | ||
var dataStart = i + 4; | ||
var dataEnd = dataStart + dataSize; | ||
if (dataEnd > extraFieldBuffer.length) throw new Error("extra field length exceeds extra field buffer size"); | ||
var dataBuffer = extraFieldBuffer.subarray(dataStart, dataEnd); | ||
extraFields.push({ | ||
id: headerId, | ||
data: dataBuffer, | ||
}); | ||
i = dataEnd; | ||
} | ||
return extraFields; | ||
} | ||
function readAndAssertNoEof(reader, buffer, offset, length, position, callback) { | ||
@@ -692,2 +773,3 @@ if (length === 0) { | ||
RandomAccessReader.prototype.createReadStream = function(options) { | ||
if (options == null) options = {}; | ||
var start = options.start; | ||
@@ -771,8 +853,8 @@ var end = options.end; | ||
var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ '; | ||
function decodeBuffer(buffer, start, end, isUtf8) { | ||
function decodeBuffer(buffer, isUtf8) { | ||
if (isUtf8) { | ||
return buffer.toString("utf8", start, end); | ||
return buffer.toString("utf8"); | ||
} else { | ||
var result = ""; | ||
for (var i = start; i < end; i++) { | ||
for (var i = 0; i < buffer.length; i++) { | ||
result += cp437[buffer[i]]; | ||
@@ -779,0 +861,0 @@ } |
{ | ||
"name": "yauzl", | ||
"version": "3.0.0", | ||
"version": "3.1.0", | ||
"description": "yet another unzip library for node", | ||
"engines": { | ||
"node": ">=12" | ||
}, | ||
"main": "index.js", | ||
@@ -6,0 +9,0 @@ "scripts": { |
173
README.md
@@ -152,5 +152,43 @@ # yauzl | ||
Each parameter is a `Number` treated as an unsigned 16-bit integer. | ||
Note that this format does not support timezones, | ||
so the returned object will use the local timezone. | ||
Note that this format does not support timezones. | ||
The returned `Date` object will be constructed using the local timezone. | ||
In order to interpret the parameters in UTC time instead of local time, you can convert with the following snippet: | ||
```js | ||
var timestampInterpretedAsLocal = yauzl.dosDateTimeToDate(date, time); // or entry.getLastModDate() | ||
var timestampInterpretedAsUTCInstead = new Date( | ||
timestampInterpretedAsLocal.getTime() - | ||
timestampInterpretedAsLocal.getTimezoneOffset() * 60 * 1000 | ||
); | ||
``` | ||
Note that there is an ECMAScript proposal to add better timezone support to JavaScript called the `Temporal` API. | ||
Last I checked, it is at stage 3. https://github.com/tc39/proposal-temporal | ||
Once that new API is available and stable, better timezone handling should be possible here somehow. | ||
Feel free to open a feature request against this library when the time comes. | ||
### getFileNameLowLevel(generalPurposeBitFlag, fileNameBuffer, extraFields, strictFileNames) | ||
If you are setting `decodeStrings` to `false`, then this function can be used to decode the file name yourself. | ||
This function is effectively used internally by yauzl to populate the `entry.fileName` field when `decodeStrings` is `true`. | ||
WARNING: This method of getting the file name bypasses the security checks in [`validateFileName()`](#validatefilename-filename). | ||
You should call that function yourself to be sure to guard against malicious file paths. | ||
`generalPurposeBitFlag` can be found on an [`Entry`](#class-entry) or [`LocalFileHeader`](#class-localfileheader). | ||
Only General Purpose Bit 11 is used, and only when an Info-ZIP Unicode Path Extra Field cannot be found in `extraFields`. | ||
`fileNameBuffer` is a `Buffer` representing the file name field of the entry. | ||
This is `entry.fileNameRaw` or `localFileHeader.fileName`. | ||
`extraFields` is the parsed extra fields array from `entry.extraFields` or `parseExtraFields()`. | ||
`strictFileNames` is a boolean, the same as the option of the same name in `open()`. | ||
When `false`, backslash characters (`\`) will be replaced with forward slash characters (`/`). | ||
This function always returns a string, although it may not be a valid file name. | ||
See `validateFileName()`. | ||
### validateFileName(fileName) | ||
@@ -170,2 +208,14 @@ | ||
### parseExtraFields(extraFieldBuffer) | ||
This function is used internally by yauzl to compute [`entry.extraFields`](#extrafields). | ||
It is exported in case you want to call it on [`localFileHeader.extraField`](#class-localfileheader). | ||
`extraFieldBuffer` is a `Buffer`, such as `localFileHeader.extraField`. | ||
Returns an `Array` with each item in the form `{id: id, data: data}`, | ||
where `id` is a `Number` and `data` is a `Buffer`. | ||
Throws an `Error` if the data encodes an item with a size that exceeds the bounds of the buffer. | ||
You may want to surround calls to this function with `try { ... } catch (err) { ... }` to handle the error. | ||
### Class: ZipFile | ||
@@ -293,2 +343,51 @@ | ||
#### readLocalFileHeader(entry, [options], callback) | ||
This is a low-level function you probably don't need to call. | ||
The intended use case is either preparing to call `openReadStreamLowLevel()` | ||
or simply examining the content of the local file header out of curiosity or for debugging zip file structure issues. | ||
`entry` is an entry obtained from `Event: "entry"`. | ||
An `entry` in this library is a file's metadata from a Central Directory Header, | ||
and this function gives the corresponding redundant data in a Local File Header. | ||
`options` may be omitted or `null`, and has the following defaults: | ||
```js | ||
{ | ||
minimal: false, | ||
} | ||
``` | ||
If `minimal` is `false` (or omitted or `null`), the callback receives a full `LocalFileHeader`. | ||
If `minimal` is `true`, the callback receives an object with a single property and no prototype `{fileDataStart: fileDataStart}`. | ||
For typical zipfile reading usecases, this field is the only one you need, | ||
and yauzl internally effectively uses the `{minimal: true}` option as part of `openReadStream()`. | ||
The `callback` receives `(err, localFileHeaderOrAnObjectWithJustOneFieldDependingOnTheMinimalOption)`, | ||
where the type of the second parameter is described in the above discussion of the `minimal` option. | ||
#### openReadStreamLowLevel(fileDataStart, compressedSize, relativeStart, relativeEnd, decompress, uncompressedSize, callback) | ||
This is a low-level function available for advanced use cases. You probably want `openReadStream()` instead. | ||
The intended use case for this function is calling `readEntry()` and `readLocalFileHeader()` with `{minimal: true}` first, | ||
and then opening the read stream at a later time, possibly after closing and reopening the entire zipfile, | ||
possibly even in a different process. | ||
The parameters are all integers and booleans, which are friendly to serialization. | ||
* `fileDataStart` - from `localFileHeader.fileDataStart` | ||
* `compressedSize` - from `entry.compressedSize` | ||
* `relativeStart` - the resolved value of `options.start` from `openReadStream()`. Must be a non-negative integer, not `null`. Typically `0` to start at the beginning of the data. | ||
* `relativeEnd` - the resolved value of `options.end` from `openReadStream()`. Must be a non-negative integer, not `null`. Typically `entry.compressedSize` to include all the data. | ||
* `decompress` - boolean indicating whether the data should be piped through a zlib inflate stream. | ||
* `uncompressedSize` - from `entry.uncompressedSize`. Only used when `validateEntrySizes` is `true`. If `validateEntrySizes` is `false`, this value is ignored, but must still be present, not omitted, in the arguments; you have to give it some value, even if it's `null`. | ||
* `callback` - receives `(err, readStream)`, the same as for `openReadStream()` | ||
This low-level function does not read any metadata from the underlying storage before opening the read stream. | ||
This is both a performance feature and a safety hazard. | ||
None of the integer parameters are bounds checked. | ||
None of the validation from `openReadStream()` with respect to compression and encryption is done here either. | ||
Only the bounds checks from `validateEntrySizes` are done, because that is part of processing the stream data. | ||
#### close() | ||
@@ -344,10 +443,10 @@ | ||
* `compressionMethod` | ||
* `lastModFileTime` (MS-DOS format, see `getLastModDateTime`) | ||
* `lastModFileDate` (MS-DOS format, see `getLastModDateTime`) | ||
* `lastModFileTime` (MS-DOS format, see [`getLastModDate()`](#getlastmoddate)) | ||
* `lastModFileDate` (MS-DOS format, see [`getLastModDate()`](#getlastmoddate)) | ||
* `crc32` | ||
* `compressedSize` | ||
* `uncompressedSize` | ||
* `fileNameLength` (bytes) | ||
* `extraFieldLength` (bytes) | ||
* `fileCommentLength` (bytes) | ||
* `fileNameLength` (in bytes) | ||
* `extraFieldLength` (in bytes) | ||
* `fileCommentLength` (in bytes) | ||
* `internalFileAttributes` | ||
@@ -357,2 +456,15 @@ * `externalFileAttributes` | ||
These fields are of type `Buffer`, and represent variable-length bytes before being processed: | ||
* `fileNameRaw` | ||
* `extraFieldRaw` | ||
* `fileCommentRaw` | ||
There are additional fields described below: `fileName`, `extraFields`, `fileComment`. | ||
These are the processed versions of the `*Raw` fields listed above. See their own sections below. | ||
(Note the inconsistency in pluralization of "field" vs "fields" in `extraField`, `extraFields`, and `extraFieldRaw`. | ||
Sorry about that.) | ||
The `new Entry()` constructor is available for clients to call, but it's usually not useful. | ||
The constructor takes no parameters and does nothing; no fields will exist. | ||
#### fileName | ||
@@ -375,3 +487,3 @@ | ||
`Array` with each entry in the form `{id: id, data: data}`, | ||
`Array` with each item in the form `{id: id, data: data}`, | ||
where `id` is a `Number` and `data` is a `Buffer`. | ||
@@ -386,3 +498,3 @@ | ||
When the field is identified and verified to be reliable (see the zipfile spec), | ||
the the file name in this field is stored in the `fileName` property, | ||
the file name in this field is stored in the `fileName` property, | ||
and the file name in the central directory record for this entry is ignored. | ||
@@ -392,3 +504,3 @@ Note that when `decodeStrings` is false, all Info-ZIP Unicode Path Extra Fields are ignored. | ||
None of the other fields are considered significant by this library. | ||
Fields that this library reads are left unalterned in the `extraFields` array. | ||
Fields that this library reads are left unaltered in the `extraFields` array. | ||
@@ -408,3 +520,3 @@ #### fileComment | ||
Effectively implemented as: | ||
Effectively implemented as the following. See [`dosDateTimeToDate()`](#dosdatetimetodatedate-time). | ||
@@ -438,2 +550,31 @@ ```js | ||
### Class: LocalFileHeader | ||
This is a trivial class that has no methods and only the following properties. | ||
The constructor is available to call, but it doesn't do anything. | ||
See `readLocalFileHeader()`. | ||
See the zipfile spec for what these fields mean. | ||
* `fileDataStart` - `Number`: inferred from `fileNameLength`, `extraFieldLength`, and this struct's position in the zipfile. | ||
* `versionNeededToExtract` - `Number` | ||
* `generalPurposeBitFlag` - `Number` | ||
* `compressionMethod` - `Number` | ||
* `lastModFileTime` - `Number` | ||
* `lastModFileDate` - `Number` | ||
* `crc32` - `Number` | ||
* `compressedSize` - `Number` | ||
* `uncompressedSize` - `Number` | ||
* `fileNameLength` - `Number` | ||
* `extraFieldLength` - `Number` | ||
* `fileName` - `Buffer` | ||
* `extraField` - `Buffer` | ||
Note that unlike `Class: Entry`, the `fileName` and `extraField` are completely unprocessed. | ||
This notably lacks Unicode and ZIP64 handling as well as any kind of safety validation on the file name. | ||
See also [`parseExtraFields()`](#parseextrafields-extrafieldbuffer). | ||
Also note that if your object is missing some of these fields, | ||
make sure to read the docs on the `minimal` option in `readLocalFileHeader()`. | ||
### Class: RandomAccessReader | ||
@@ -549,3 +690,3 @@ | ||
### Limitted ZIP64 Support | ||
### Limited ZIP64 Support | ||
@@ -629,2 +770,10 @@ For ZIP64, only zip files smaller than `8PiB` are supported, | ||
* 3.1.0 | ||
* Added `readLocalFileHeader()` and `Class: LocalFileHeader`. | ||
* Added `openReadStreamLowLevel()`. | ||
* Added `getFileNameLowLevel()` and `parseExtraFields()`. | ||
Added fields to `Class: Entry`: `fileNameRaw`, `extraFieldRaw`, `fileCommentRaw`. | ||
* Added `examples/compareCentralAndLocalHeaders.js` that demonstrate many of these low level APIs. | ||
* Noted dropped support of node versions before 12 in the `"engines"` field of `package.json`. | ||
* Fixed a crash when calling `openReadStream()` with an explicitly `null` options parameter (as opposed to omitted). | ||
* 3.0.0 | ||
@@ -631,0 +780,0 @@ * BREAKING CHANGE: implementations of [RandomAccessReader](#class-randomaccessreader) that implement a `destroy` method must instead implement `_destroy` in accordance with the node standard https://nodejs.org/api/stream.html#writable_destroyerr-callback (note the error and callback parameters). If you continue to override `destory` instead, some error handling may be subtly broken. Additionally, this is required for async iterators to work correctly in some versions of node. [issue #110](https://github.com/thejoshwolfe/yauzl/issues/110) |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
86838
1085
827