Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

adm-zip

Package Overview
Dependencies
Maintainers
1
Versions
40
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

adm-zip - npm Package Compare versions

Comparing version 0.2.1 to 0.4.3

test/assets/attributes_test.zip

78

adm-zip.js
var fs = require("fs"),
buffer = require("buffer"),
pth = require("path");

@@ -11,17 +10,17 @@

module.exports = function(/*String*/inPath) {
module.exports = function(/*String*/input) {
var _zip = undefined,
_filename = "";
if (inPath && typeof inPath === "string") { // load zip file
if (fs.existsSync(inPath)) {
_filename = inPath;
_zip = new ZipFile(fs.readFileSync(inPath));
if (input && typeof input === "string") { // load zip file
if (fs.existsSync(input)) {
_filename = input;
_zip = new ZipFile(input, Utils.Constants.FILE);
} else {
throw Utils.Errors.INVALID_FILENAME;
}
} else if(inPath && Buffer.isBuffer(inPath)) { // load buffer
_zip = new ZipFile(inPath);
} else if(input && Buffer.isBuffer(input)) { // load buffer
_zip = new ZipFile(input, Utils.Constants.BUFFER);
} else { // create new zip file
_zip = new ZipFile();
_zip = new ZipFile(null, Utils.Constants.NONE);
}

@@ -46,6 +45,2 @@

//process.on('uncaughtException', function (err) {
// console.log('Caught exception: ' + err);
//});
return {

@@ -62,2 +57,3 @@ /**

},
/**

@@ -78,2 +74,3 @@ * Asynchronous readFile

},
/**

@@ -96,2 +93,3 @@ * Extracts the given entry from the archive and returns the content as plain text in the given encoding

},
/**

@@ -198,11 +196,4 @@ * Asynchronous readAsText

if (fs.existsSync(localPath)) {
var entry = new ZipEntry();
entry.entryName = localPath.split("\\").join("/"); //windows fix
var stats = fs.statSync(localPath);
entry.setData(fs.readFileSync(localPath));
entry.header.inAttr = stats.mode;
entry.header.attr = stats.mode;
entry.attr = stats.mode;
entry.header.time = stats.mtime;
_zip.setEntry(entry);
var p = localPath.split("\\").join("/").split("/").pop();
this.addFile(p, fs.readFileSync(localPath), "", 0)
} else {

@@ -224,20 +215,14 @@ throw Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath);

if (fs.existsSync(localPath)) {
var items = Utils.findFiles(localPath);
var items = Utils.findFiles(localPath),
self = this;
if (items.length) {
items.forEach(function(path) {
var entry = new ZipEntry();
entry.entryName = path.split("\\").join("/").replace(localPath, ""); //windows fix
var stats = fs.statSync(path);
if (stats.isDirectory()) {
entry.setData("");
entry.header.inAttr = stats.mode;
entry.header.attr = stats.mode
var p = path.split("\\").join("/").replace(localPath, ""); //windows fix
if (p.charAt(p.length - 1) !== "/") {
self.addFile(p, fs.readFileSync(path), "", 0)
} else {
entry.setData(fs.readFileSync(path));
entry.header.inAttr = stats.mode;
entry.header.attr = stats.mode
self.addFile(p, new Buffer(0), "", 0)
}
entry.attr = stats.mode;
entry.header.time = stats.mtime;
_zip.setEntry(entry);
});

@@ -266,6 +251,5 @@ }

if (entry.isDirectory && content.length) {
throw Utils.Errors.DIRECTORY_CONTENT_ERROR;
// throw Utils.Errors.DIRECTORY_CONTENT_ERROR;
}
entry.setData(content);
entry.header.time = new Date();
_zip.setEntry(entry);

@@ -360,3 +344,6 @@ },

_zip.entries.forEach(function(entry) {
if (entry.isDirectory) return;
if (entry.isDirectory) {
Utils.makeDir(pth.resolve(targetPath, entry.entryName.toString()));
return;
}
var content = entry.getData();

@@ -366,3 +353,3 @@ if (!content) {

}
Utils.writeFileTo(pth.resolve(targetPath, entry.entryName), content, overwrite);
Utils.writeFileTo(pth.resolve(targetPath, entry.entryName.toString()), content, overwrite);
})

@@ -375,2 +362,3 @@ },

* @param targetFileName
* @param callback
*/

@@ -390,3 +378,3 @@ writeZip : function(/*String*/targetFileName, /*Function*/callback) {

var zipData = _zip.toBuffer();
var zipData = _zip.compressToBuffer();
if (zipData) {

@@ -408,11 +396,5 @@ Utils.writeFileTo(targetFileName, zipData, true);

}
return _zip.toBuffer()
return _zip.compressToBuffer()
}
/*get lastError () {
var x = function() { console.log("2", arguments); };
x.prototype = 2
return x; //
} */
}
};

@@ -7,3 +7,3 @@ var Utils = require("../util"),

var _verMade = 0x0A,
_version = 10,
_version = 0x0A,
_flags = 0,

@@ -17,8 +17,25 @@ _method = 0,

_extraLen = 0,
_comLen = 0,
_diskStart = 0,
_inattr = 438,
_attr = 438,
_inattr = 0,
_attr = 0,
_offset = 0;
var _dataHeader = {};
function setTime(val) {
var val = new Date(val);
_time = (val.getFullYear() - 1980 & 0x7f) << 25 // b09-16 years from 1980
| (val.getMonth() + 1) << 21 // b05-08 month
| val.getDay() << 16 // b00-04 hour
// 2 bytes time
| val.getHours() << 11 // b11-15 hour
| val.getMinutes() << 5 // b05-10 minute
| val.getSeconds() >> 1; // b00-04 seconds divided by 2
}
setTime(+new Date());
return {

@@ -46,9 +63,4 @@ get made () { return _verMade; },

},
set time (val) { val = new Date(val);
_time = (val.getFullYear() - 1980 & 0x7f) << 25
| (val.getMonth() + 1) << 21
| val.getDay() << 16
| val.getHours() << 11
| val.getMinutes() << 5
| val.getSeconds() >> 1;
set time (val) {
setTime(val);
},

@@ -92,2 +104,38 @@

get realDataOffset () {
return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
},
get dataHeader () {
return _dataHeader;
},
loadDataHeaderFromBinary : function(/*Buffer*/input) {
var data = input.slice(_offset, _offset + Constants.LOCHDR);
// 30 bytes and should start with "PK\003\004"
if (data.readUInt32LE(0) != Constants.LOCSIG) {
throw Utils.Errors.INVALID_LOC;
}
_dataHeader = {
// version needed to extract
version : data.readUInt16LE(Constants.LOCVER),
// general purpose bit flag
flags : data.readUInt16LE(Constants.LOCFLG),
// compression method
method : data.readUInt16LE(Constants.LOCHOW),
// modification time (2 bytes time, 2 bytes date)
time : data.readUInt32LE(Constants.LOCTIM),
// uncompressed file crc-32 value
crc : data.readUInt32LE(Constants.LOCCRC),
// compressed size
compressedSize : data.readUInt32LE(Constants.LOCSIZ),
// uncompressed size
size : data.readUInt32LE(Constants.LOCLEN),
// filename length
fnameLen : data.readUInt16LE(Constants.LOCNAM),
// extra field length
extraLen : data.readUInt16LE(Constants.LOCEXT)
}
},
loadFromBinary : function(/*Buffer*/data) {

@@ -130,3 +178,29 @@ // data should be 46 bytes and start with "PK 01 02"

toBinary : function() {
dataHeaderToBinary : function() {
// LOC header size (30 bytes)
var data = new Buffer(Constants.LOCHDR);
// "PK\003\004"
data.writeUInt32LE(Constants.LOCSIG, 0);
// version needed to extract
data.writeUInt16LE(_version, Constants.LOCVER);
// general purpose bit flag
data.writeUInt16LE(_flags, Constants.LOCFLG);
// compression method
data.writeUInt16LE(_method, Constants.LOCHOW);
// modification time (2 bytes time, 2 bytes date)
data.writeUInt32LE(_time, Constants.LOCTIM);
// uncompressed file crc-32 value
data.writeUInt32LE(_crc, Constants.LOCCRC);
// compressed size
data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
// uncompressed size
data.writeUInt32LE(_size, Constants.LOCLEN);
// filename length
data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
// extra field length
data.writeUInt16LE(_extraLen, Constants.LOCEXT);
return data;
},
entryHeaderToBinary : function() {
// CEN header size (46 bytes)

@@ -192,2 +266,2 @@ var data = new Buffer(Constants.CENHDR + _fnameLen + _extraLen + _comLen);

}
};
};
exports.EntryHeader = require("./entryHeader");
exports.DataHeader = require("./dataHeader");
exports.MainHeader = require("./mainHeader");
exports.MainHeader = require("./mainHeader");

@@ -0,0 +0,0 @@ var Utils = require("../util"),

@@ -0,292 +1,396 @@

/*
* $Id: rawdeflate.js,v 0.5 2013/04/09 14:25:38 dankogai Exp dankogai $
*
* GNU General Public License, version 2 (GPL-2.0)
* http://opensource.org/licenses/GPL-2.0
* Original:
* http://www.onicos.com/staff/iz/amuse/javascript/expert/deflate.txt
*/
function JSDeflater(/*inbuff*/inbuf) {
var WSIZE = 0x8000, // Sliding Window size
WINDOW_SIZE = 0x10000,
/* Copyright (C) 1999 Masanao Izumo <iz@onicos.co.jp>
* Version: 1.0.1
* LastModified: Dec 25 1999
*/
/* for deflate */
MIN_MATCH = 0x03,
MAX_MATCH = 0x102,
var WSIZE = 32768, // Sliding Window size
zip_STORED_BLOCK = 0,
zip_STATIC_TREES = 1,
zip_DYN_TREES = 2,
zip_DEFAULT_LEVEL = 6,
zip_FULL_SEARCH = true,
zip_INBUFSIZ = 32768, // Input buffer size
zip_INBUF_EXTRA = 64, // Extra buffer
zip_OUTBUFSIZ = 1024 * 8,
zip_window_size = 2 * WSIZE,
MIN_MATCH = 3,
MAX_MATCH = 258,
zip_BITS = 16,
LIT_BUFSIZE = 0x2000,
MAX_DIST = 0x7EFA,
MAX_BITS = 0x0F,
MAX_BL_BITS = 0x07,
L_CODES = 0x11E,
D_CODES = 0x1E,
BL_CODES = 0x13,
REP_3_6 = 0x10,
REPZ_3_10 = 0x11,
REPZ_11_138 = 0x12,
HEAP_SIZE = 2 * L_CODES + 1,
H_SHIFT = parseInt((0x10 + MIN_MATCH - 1) / MIN_MATCH),
zip_HASH_BITS = 13,
zip_DIST_BUFSIZE = LIT_BUFSIZE,
zip_HASH_SIZE = 1 << zip_HASH_BITS,
zip_HASH_MASK = zip_HASH_SIZE - 1,
zip_WMASK = WSIZE - 1,
zip_NIL = 0, // Tail of hash chains
zip_TOO_FAR = 4096,
zip_MIN_LOOKAHEAD = MAX_MATCH + MIN_MATCH + 1,
zip_MAX_DIST = WSIZE - zip_MIN_LOOKAHEAD,
zip_SMALLEST = 1,
zip_MAX_BITS = 15,
zip_MAX_BL_BITS = 7,
zip_LENGTH_CODES = 29,
zip_LITERALS = 256,
zip_END_BLOCK = 256,
zip_L_CODES = zip_LITERALS + 1 + zip_LENGTH_CODES,
zip_D_CODES = 30,
zip_BL_CODES = 19,
zip_REP_3_6 = 16,
zip_REPZ_3_10 = 17,
zip_REPZ_11_138 = 18,
zip_HEAP_SIZE = 2 * zip_L_CODES + 1,
zip_H_SHIFT = parseInt((zip_HASH_BITS + MIN_MATCH - 1) / MIN_MATCH);
/* variables */
freeQueue,
qHead, qTail,
initFlag,
outbuf = null,
outcnt, outoff,
complete,
window,
dBuf,
lBuf,
prev,
biBuf,
biValid,
blockStart,
zip_ins_h,
hashHead,
prevMatch,
matchAvailable,
matchLength,
matchStart,
prevLength,
dataStart,
eofile,
lookahead,
maxChainLength,
maxLazyMatch,
compression_level,
goodMatch,
dynLTree = [],
dynDTree = [],
staticLTree = [],
staticDTree = [],
blTree = [],
lDesc,
dDesc,
blDesc,
blCount,
zip_heap,
heapLen,
heapMax,
depth,
lengthCode,
distCode,
baseLength,
baseDist,
flagBuf,
lastLit,
lastDist,
lastFlags,
flags,
flagBit,
optLen,
staticLen,
deflateData,
deflatePos,
var zip_free_queue, zip_qhead, zip_qtail, zip_initflag, zip_outbuf = null, zip_outcnt, zip_outoff, zip_complete,
zip_window, zip_d_buf, zip_l_buf, zip_prev, zip_bi_buf, zip_bi_valid, zip_block_start, zip_ins_h, zip_hash_head,
zip_prev_match, zip_match_available, zip_match_length, zip_prev_length, zip_strstart, zip_match_start, zip_eofile,
zip_lookahead, zip_max_chain_length, zip_max_lazy_match, zip_compr_level, zip_good_match, zip_nice_match,
zip_dyn_ltree, zip_dyn_dtree, zip_static_ltree, zip_static_dtree, zip_bl_tree, zip_l_desc, zip_d_desc, zip_bl_desc,
zip_bl_count, zip_heap, zip_heap_len, zip_heap_max, zip_depth, zip_length_code, zip_dist_code, zip_base_length,
zip_base_dist, zip_flag_buf, zip_last_lit, zip_last_dist, zip_last_flags, zip_flags, zip_flag_bit, zip_opt_len,
zip_static_len, zip_deflate_data, zip_deflate_pos;
elbits = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0],
edbits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13],
eblbits = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7],
blorder = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15];
var zip_DeflateCT = function () {
this.fc = 0; // frequency count or bit string
this.dl = 0; // father node in Huffman tree or length of bit string
};
function deflateTreeDesc() {
return {
dyn_tree : null, // the dynamic tree
static_tree : null, // corresponding static tree or NULL
extra_bits : null, // extra bits for each code or NULL
extra_base : 0, // base index for extra_bits
elems : 0, // max number of elements in the tree
max_length : 0, // max bit length for the codes
max_code : 0
}
}
var zip_DeflateTreeDesc = function () {
this.dyn_tree = null; // the dynamic tree
this.static_tree = null; // corresponding static tree or NULL
this.extra_bits = null; // extra bits for each code or NULL
this.extra_base = 0; // base index for extra_bits
this.elems = 0; // max number of elements in the tree
this.max_length = 0; // max bit length for the codes
this.max_code = 0; // largest code with non zero frequency
};
function deflateStart(level) {
var i;
compression_level = !level && 9 || level > 9 && 9 || level;
initFlag = false;
eofile = false;
/* Values for max_lazy_match, good_match and max_chain_length, depending on
* the desired pack level (0..9). The values given below have been tuned to
* exclude worst case performance for pathological files. Better values may be
* found for specific files.
*/
var zip_DeflateConfiguration = function (a, b, c, d) {
this.good_length = a; // reduce lazy search above this match length
this.max_lazy = b; // do not perform lazy search above this match length
this.nice_length = c; // quit search above this match length
this.max_chain = d;
};
if(outbuf != null)
return;
var zip_DeflateBuffer = function () {
this.next = null;
this.len = 0;
this.ptr = new Array(zip_OUTBUFSIZ);
this.off = 0;
};
freeQueue = qHead = qTail = null;
outbuf = new Buffer(LIT_BUFSIZE);
window = new Buffer(WINDOW_SIZE);
/* constant tables */
var zip_extra_lbits = new Array(
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0);
var zip_extra_dbits = new Array(
0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, 12, 12, 13, 13);
var zip_extra_blbits = new Array(
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 7);
var zip_bl_order = new Array(
16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15);
var zip_configuration_table = new Array(
new zip_DeflateConfiguration(0, 0, 0, 0),
new zip_DeflateConfiguration(4, 4, 8, 4),
new zip_DeflateConfiguration(4, 5, 16, 8),
new zip_DeflateConfiguration(4, 6, 32, 32),
new zip_DeflateConfiguration(4, 4, 16, 16),
new zip_DeflateConfiguration(8, 16, 32, 32),
new zip_DeflateConfiguration(8, 16, 128, 128),
new zip_DeflateConfiguration(8, 32, 128, 256),
new zip_DeflateConfiguration(32, 128, 258, 1024),
new zip_DeflateConfiguration(32, 258, 258, 4096));
dBuf = new Array(LIT_BUFSIZE);
lBuf = new Array(inbuf.length + 0x64); // 0x64 extra buffer length
prev = new Array(0x10000);
for(i = 0; i < HEAP_SIZE; i++) dynLTree[i] = {fc:0, dl:0};
for(i = 0; i < 2 * D_CODES + 1; i++) dynDTree[i] = {fc:0, dl:0};
for(i = 0; i < L_CODES + 2; i++) staticLTree[i] = {fc:0, dl:0};
for(i = 0; i < D_CODES; i++) staticDTree[i] = {fc:0, dl:0};
for(i = 0; i < 2 * BL_CODES + 1; i++) blTree[i] = {fc:0, dl:0};
/* routines (deflate) */
lDesc = deflateTreeDesc();
dDesc = deflateTreeDesc();
blDesc = deflateTreeDesc();
var zip_deflate_start = function (level) {
var i;
blCount = new Buffer(MAX_BITS + 1);
zip_heap = new Array(2 * L_CODES + 1);
depth = new Buffer(2 * L_CODES + 1);
lengthCode = new Buffer(MAX_MATCH - MIN_MATCH + 1);
distCode = new Buffer(0x200);
baseLength = new Buffer(0x1D);
baseDist = new Buffer(D_CODES);
flagBuf = new Buffer(parseInt(LIT_BUFSIZE / 8));
}
if (!level)
level = zip_DEFAULT_LEVEL;
else if (level < 1)
level = 1;
else if (level > 9)
level = 9;
function cleanup() {
freeQueue = qHead = qTail = null;
outbuf = null;
window = null;
dBuf = null;
lBuf = null;
prev = null;
dynLTree = null;
dynDTree = null;
staticLTree = null;
staticDTree = null;
blTree = null;
lDesc = null;
dDesc = null;
blDesc = null;
blCount = null;
zip_compr_level = level;
zip_initflag = false;
zip_eofile = false;
if (zip_outbuf != null)
return;
zip_free_queue = zip_qhead = zip_qtail = null;
zip_outbuf = new Array(zip_OUTBUFSIZ);
zip_window = new Array(zip_window_size);
zip_d_buf = new Array(zip_DIST_BUFSIZE);
zip_l_buf = new Array(zip_INBUFSIZ + zip_INBUF_EXTRA);
zip_prev = new Array(1 << zip_BITS);
zip_dyn_ltree = new Array(zip_HEAP_SIZE);
for (i = 0; i < zip_HEAP_SIZE; i++) zip_dyn_ltree[i] = new zip_DeflateCT();
zip_dyn_dtree = new Array(2 * zip_D_CODES + 1);
for (i = 0; i < 2 * zip_D_CODES + 1; i++) zip_dyn_dtree[i] = new zip_DeflateCT();
zip_static_ltree = new Array(zip_L_CODES + 2);
for (i = 0; i < zip_L_CODES + 2; i++) zip_static_ltree[i] = new zip_DeflateCT();
zip_static_dtree = new Array(zip_D_CODES);
for (i = 0; i < zip_D_CODES; i++) zip_static_dtree[i] = new zip_DeflateCT();
zip_bl_tree = new Array(2 * zip_BL_CODES + 1);
for (i = 0; i < 2 * zip_BL_CODES + 1; i++) zip_bl_tree[i] = new zip_DeflateCT();
zip_l_desc = new zip_DeflateTreeDesc();
zip_d_desc = new zip_DeflateTreeDesc();
zip_bl_desc = new zip_DeflateTreeDesc();
zip_bl_count = new Array(zip_MAX_BITS + 1);
zip_heap = new Array(2 * zip_L_CODES + 1);
zip_depth = new Array(2 * zip_L_CODES + 1);
zip_length_code = new Array(MAX_MATCH - MIN_MATCH + 1);
zip_dist_code = new Array(512);
zip_base_length = new Array(zip_LENGTH_CODES);
zip_base_dist = new Array(zip_D_CODES);
zip_flag_buf = new Array(parseInt(LIT_BUFSIZE / 8));
};
var zip_deflate_end = function () {
zip_free_queue = zip_qhead = zip_qtail = null;
zip_outbuf = null;
zip_window = null;
zip_d_buf = null;
zip_l_buf = null;
zip_prev = null;
zip_dyn_ltree = null;
zip_dyn_dtree = null;
zip_static_ltree = null;
zip_static_dtree = null;
zip_bl_tree = null;
zip_l_desc = null;
zip_d_desc = null;
zip_bl_desc = null;
zip_bl_count = null;
zip_heap = null;
depth = null;
lengthCode = null;
distCode = null;
baseLength = null;
baseDist = null;
flagBuf = null;
}
zip_depth = null;
zip_length_code = null;
zip_dist_code = null;
zip_base_length = null;
zip_base_dist = null;
zip_flag_buf = null;
};
function writeByte(c) {
outbuf[outoff + outcnt++] = c;
if(outoff + outcnt == LIT_BUFSIZE) {
if(outcnt != 0) {
var q, i;
if (freeQueue != null) {
q = freeQueue;
freeQueue = freeQueue.next;
} else {
q = {
"next" : null,
"len" : 0,
"ptr" : new Buffer(LIT_BUFSIZE),
"off" : 0
}
}
q.next = null;
q.len = q.off = 0;
var zip_reuse_queue = function (p) {
p.next = zip_free_queue;
zip_free_queue = p;
};
if(qHead == null)
qHead = qTail = q;
else
qTail = qTail.next = q;
var zip_new_queue = function () {
var p;
q.len = outcnt - outoff;
for(i = 0; i < q.len; i++)
q.ptr[i] = outbuf[outoff + i];
outcnt = outoff = 0;
}
if (zip_free_queue != null) {
p = zip_free_queue;
zip_free_queue = zip_free_queue.next;
}
}
else
p = new zip_DeflateBuffer();
p.next = null;
p.len = p.off = 0;
function writeShort(w) {
return p;
};
var zip_head1 = function (i) {
return zip_prev[WSIZE + i];
};
var zip_head2 = function (i, val) {
return zip_prev[WSIZE + i] = val;
};
/* put_byte is used for the compressed output, put_ubyte for the
* uncompressed output. However unlzw() uses window for its
* suffix table instead of its output buffer, so it does not use put_ubyte
* (to be cleaned up).
*/
var zip_put_byte = function (c) {
zip_outbuf[zip_outoff + zip_outcnt++] = c;
if (zip_outoff + zip_outcnt == zip_OUTBUFSIZ)
zip_qoutbuf();
};
/* Output a 16 bit value, lsb first */
var zip_put_short = function (w) {
w &= 0xffff;
if(outoff + outcnt < LIT_BUFSIZE - 2) {
outbuf[outoff + outcnt++] = (w & 0xff);
outbuf[outoff + outcnt++] = (w >>> 8);
if (zip_outoff + zip_outcnt < zip_OUTBUFSIZ - 2) {
zip_outbuf[zip_outoff + zip_outcnt++] = (w & 0xff);
zip_outbuf[zip_outoff + zip_outcnt++] = (w >>> 8);
} else {
writeByte(w & 0xff);
writeByte(w >>> 8);
zip_put_byte(w & 0xff);
zip_put_byte(w >>> 8);
}
return true;
}
};
function insertString() {
zip_ins_h = ((zip_ins_h << H_SHIFT) ^ (window[dataStart + MIN_MATCH - 1] & 0xff)) & 0x1FFF;
hashHead = prev[WSIZE + zip_ins_h];
prev[dataStart & 0x7FFF] = hashHead;
prev[WSIZE + zip_ins_h] = dataStart;
}
/* ==========================================================================
* Insert string s in the dictionary and set match_head to the previous head
* of the hash chain (the most recent string with same hash key). Return
* the previous length of the hash chain.
* IN assertion: all calls to to INSERT_STRING are made with consecutive
* input characters and the first MIN_MATCH bytes of s are valid
* (except for the last MIN_MATCH-1 bytes of the input file).
*/
var zip_INSERT_STRING = function () {
zip_ins_h = ((zip_ins_h << zip_H_SHIFT)
^ (zip_window[zip_strstart + MIN_MATCH - 1] & 0xff))
& zip_HASH_MASK;
zip_hash_head = zip_head1(zip_ins_h);
zip_prev[zip_strstart & zip_WMASK] = zip_hash_head;
zip_head2(zip_ins_h, zip_strstart);
};
function sendCode(c, tree) {
sendBits(tree[c].fc, tree[c].dl);
}
/* Send a code of the given tree. c and tree must not have side effects */
var zip_SEND_CODE = function (c, tree) {
zip_send_bits(tree[c].fc, tree[c].dl);
};
function zip_D_CODE(dist) {
return (dist < 256 ? distCode[dist]
: distCode[256 + (dist>>7)]) & 0xff;
}
/* Mapping from a distance to a distance code. dist is the distance - 1 and
* must not have side effects. dist_code[256] and dist_code[257] are never
* used.
*/
var zip_D_CODE = function (dist) {
return (dist < 256 ? zip_dist_code[dist]
: zip_dist_code[256 + (dist >> 7)]) & 0xff;
};
function smaller(tree, n, m) {
/* ==========================================================================
* Compares to subtrees, using the tree depth as tie breaker when
* the subtrees have equal frequency. This minimizes the worst case length.
*/
var zip_SMALLER = function (tree, n, m) {
return tree[n].fc < tree[m].fc ||
(tree[n].fc == tree[m].fc && depth[n] <= depth[m]);
}
(tree[n].fc == tree[m].fc && zip_depth[n] <= zip_depth[m]);
};
function readBuff(buff, offset, n) {
var i, len = deflateData.length;
for(i = 0; i < n && deflatePos < len; i++) {
buff[offset + i] = deflateData[deflatePos++];
}
/* ==========================================================================
* read string data
*/
var zip_read_buff = function (buff, offset, n) {
var i;
for (i = 0; i < n && zip_deflate_pos < zip_deflate_data.length; i++)
buff[offset + i] =
zip_deflate_data[zip_deflate_pos++] & 0xff;
return i;
}
};
function lmInit() {
/* ==========================================================================
* Initialize the "longest match" routines for a new file
*/
var zip_lm_init = function () {
var j;
for(j = 0; j < 0x2000; j++) prev[WSIZE + j] = 0;
/* Initialize the hash table. */
for (j = 0; j < zip_HASH_SIZE; j++)
zip_prev[WSIZE + j] = 0;
zip_max_lazy_match = zip_configuration_table[zip_compr_level].max_lazy;
zip_good_match = zip_configuration_table[zip_compr_level].good_length;
if (!zip_FULL_SEARCH)
zip_nice_match = zip_configuration_table[zip_compr_level].nice_length;
zip_max_chain_length = zip_configuration_table[zip_compr_level].max_chain;
goodMatch = [0x0, 0x4, 0x4, 0x4, 0x4, 0x8, 0x8, 0x8, 0x20, 0x20][compression_level];
maxLazyMatch = [0x0, 0x4, 0x5, 0x6, 0x4, 0x10, 0x10, 0x20, 0x80, 0x102][compression_level];
maxChainLength = [0x0, 0x4, 0x8, 0x20, 0x10, 0x20, 0x80, 0x100, 0x400, 0x1000][compression_level];
zip_strstart = 0;
zip_block_start = 0;
dataStart = 0;
blockStart = 0;
lookahead = readBuff(window, 0, 2 * WSIZE);
if(lookahead <= 0) {
eofile = true;
lookahead = 0;
zip_lookahead = zip_read_buff(zip_window, 0, 2 * WSIZE);
if (zip_lookahead <= 0) {
zip_eofile = true;
zip_lookahead = 0;
return;
}
eofile = false;
zip_eofile = false;
/* Make sure that we always have enough lookahead. This is important
* if input comes from a device such as a tty.
*/
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
while(lookahead < 0x106 && !eofile)
fillWindow();
/* If lookahead < MIN_MATCH, ins_h is garbage, but this is
* not important since only literal bytes will be emitted.
*/
zip_ins_h = 0;
for(j = 0; j < MIN_MATCH - 1; j++) {
zip_ins_h = ((zip_ins_h << H_SHIFT) ^ (window[j] & 0xFF)) & 0x1FFF;
for (j = 0; j < MIN_MATCH - 1; j++) {
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[j] & 0xff)) & zip_HASH_MASK;
}
}
};
function longestMatch(cur_match) {
var chain_length = maxChainLength, // max hash chain length
scanp = dataStart, // current string
matchp, // matched string
len, // length of current match
best_len = prevLength, // best match length so far
limit = (dataStart > MAX_DIST ? dataStart - MAX_DIST : 0),
strendp = dataStart + MAX_MATCH,
scan_end1 = window[scanp + best_len - 1],
scan_end = window[scanp + best_len];
/* ==========================================================================
* Set match_start to the longest match starting at the given string and
* return its length. Matches shorter or equal to prev_length are discarded,
* in which case the result is equal to prev_length and match_start is
* garbage.
* IN assertions: cur_match is the head of the hash chain for the current
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
*/
var zip_longest_match = function (cur_match) {
var chain_length = zip_max_chain_length; // max hash chain length
var scanp = zip_strstart; // current string
var matchp; // matched string
var len; // length of current match
var best_len = zip_prev_length; // best match length so far
prevLength >= goodMatch && (chain_length >>= 2);
/* Stop when cur_match becomes <= limit. To simplify the code,
* we prevent matches with the string of window index 0.
*/
var limit = (zip_strstart > zip_MAX_DIST ? zip_strstart - zip_MAX_DIST : zip_NIL);
var strendp = zip_strstart + MAX_MATCH;
var scan_end1 = zip_window[scanp + best_len - 1];
var scan_end = zip_window[scanp + best_len];
/* Do not waste too much time if we already have a good match: */
if (zip_prev_length >= zip_good_match)
chain_length >>= 2;
do {
matchp = cur_match;
if(window[matchp + best_len] != scan_end ||
window[matchp + best_len - 1] != scan_end1 ||
window[matchp] != window[scanp] ||
window[++matchp] != window[scanp + 1]) {
/* Skip to next match if the match length cannot increase
* or if the match length is less than 2:
*/
if (zip_window[matchp + best_len] != scan_end ||
zip_window[matchp + best_len - 1] != scan_end1 ||
zip_window[matchp] != zip_window[scanp] ||
zip_window[++matchp] != zip_window[scanp + 1]) {
continue;
}
/* The check at best_len-1 can be removed because it will be made
* again later. (This heuristic is not always a win.)
* It is not necessary to compare scan[2] and match[2] since they
* are always equal when the other bytes match, given that
* the hash keys are equal and that HASH_BITS >= 8.
*/
scanp += 2;
matchp++;
do {} while(window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
window[++scanp] == window[++matchp] &&
scanp < strendp);
/* We check for insufficient lookahead only every 8th comparison;
* the 256th check will be made at strstart+258.
*/
do {
} while (zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
zip_window[++scanp] == zip_window[++matchp] &&
scanp < strendp);

@@ -296,334 +400,536 @@ len = MAX_MATCH - (strendp - scanp);

if(len > best_len) {
matchStart = cur_match;
if (len > best_len) {
zip_match_start = cur_match;
best_len = len;
if(len >= MAX_MATCH) break;
if (zip_FULL_SEARCH) {
if (len >= MAX_MATCH) break;
} else {
if (len >= zip_nice_match) break;
}
scan_end1 = window[scanp + best_len-1];
scan_end = window[scanp + best_len];
scan_end1 = zip_window[scanp + best_len - 1];
scan_end = zip_window[scanp + best_len];
}
} while((cur_match = prev[cur_match & 0x7FFF]) > limit && --chain_length != 0);
} while ((cur_match = zip_prev[cur_match & zip_WMASK]) > limit
&& --chain_length != 0);
return best_len;
}
};
function fillWindow() {
var n, m,
more = WINDOW_SIZE - lookahead - dataStart;
/* ==========================================================================
* Fill the window when the lookahead becomes insufficient.
* Updates strstart and lookahead, and sets eofile if end of input file.
* IN assertion: lookahead < MIN_LOOKAHEAD && strstart + lookahead > 0
* OUT assertions: at least one byte has been read, or eofile is set;
* file reads are performed for at least two bytes (required for the
* translate_eol option).
*/
var zip_fill_window = function () {
var n, m;
if(more == -1) {
// Amount of free space at the end of the window.
var more = zip_window_size - zip_lookahead - zip_strstart;
/* If the window is almost full and there is insufficient lookahead,
* move the upper half to the lower one to make room in the upper half.
*/
if (more == -1) {
/* Very unlikely, but possible on 16 bit machine if strstart == 0
* and lookahead == 1 (input done one byte at time)
*/
more--;
} else if(dataStart >= WSIZE + MAX_DIST) {
} else if (zip_strstart >= WSIZE + zip_MAX_DIST) {
/* By the IN assertion, the window is not empty so we can't confuse
* more == 0 with more == 64K on a 16 bit machine.
*/
for (n = 0; n < WSIZE; n++)
zip_window[n] = zip_window[n + WSIZE];
for(n = 0; n < WSIZE; n++)
window[n] = window[n + WSIZE];
zip_match_start -= WSIZE;
zip_strstart -= WSIZE;
/* we now have strstart >= MAX_DIST: */
zip_block_start -= WSIZE;
matchStart -= WSIZE;
dataStart -= WSIZE;
blockStart -= WSIZE;
for(n = 0; n < 0x2000; n++) {
m = prev[WSIZE + n];
prev[WSIZE + n] = m >= WSIZE ? m - WSIZE : 0;
for (n = 0; n < zip_HASH_SIZE; n++) {
m = zip_head1(n);
zip_head2(n, m >= WSIZE ? m - WSIZE : zip_NIL);
}
for(n = 0; n < WSIZE; n++) {
m = prev[n];
prev[n] = (m >= WSIZE ? m - WSIZE : 0);
for (n = 0; n < WSIZE; n++) {
/* If n is not on any hash chain, prev[n] is garbage but
* its value will never be used.
*/
m = zip_prev[n];
zip_prev[n] = (m >= WSIZE ? m - WSIZE : zip_NIL);
}
more += WSIZE;
}
if(!eofile) {
n = readBuff(window, dataStart + lookahead, more);
n <= 0 && (eofile = true) || (lookahead += n);
// At this point, more >= 2
if (!zip_eofile) {
n = zip_read_buff(zip_window, zip_strstart + zip_lookahead, more);
if (n <= 0)
zip_eofile = true;
else
zip_lookahead += n;
}
}
};
function deflateFast() {
while(lookahead != 0 && qHead == null) {
/* ==========================================================================
* Processes a new input file and return its compressed length. This
* function does not perform lazy evaluationof matches and inserts
* new strings in the dictionary only for unmatched strings or for short
* matches. It is used only for the fast compression options.
*/
var zip_deflate_fast = function () {
while (zip_lookahead != 0 && zip_qhead == null) {
var flush; // set if current block must be flushed
insertString();
/* Insert the string window[strstart .. strstart+2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
zip_INSERT_STRING();
if(hashHead != 0 && dataStart - hashHead <= MAX_DIST) {
matchLength = longestMatch(hashHead);
matchLength > lookahead && (matchLength = lookahead);
/* Find the longest match, discarding those <= prev_length.
* At this point we have always match_length < MIN_MATCH
*/
if (zip_hash_head != zip_NIL &&
zip_strstart - zip_hash_head <= zip_MAX_DIST) {
/* To simplify the code, we prevent matches with the string
* of window index 0 (in particular we have to avoid a match
* of the string with itself at the start of the input file).
*/
zip_match_length = zip_longest_match(zip_hash_head);
/* longest_match() sets match_start */
if (zip_match_length > zip_lookahead)
zip_match_length = zip_lookahead;
}
if(matchLength >= MIN_MATCH) {
flush = ctTally(dataStart - matchStart, matchLength - MIN_MATCH);
lookahead -= matchLength;
if (zip_match_length >= MIN_MATCH) {
flush = zip_ct_tally(zip_strstart - zip_match_start,
zip_match_length - MIN_MATCH);
zip_lookahead -= zip_match_length;
if(matchLength <= maxLazyMatch) {
matchLength--;
/* Insert new strings in the hash table only if the match length
* is not too large. This saves time but degrades compression.
*/
if (zip_match_length <= zip_max_lazy_match) {
zip_match_length--; // string at strstart already in hash table
do {
dataStart++;
insertString();
} while(--matchLength != 0);
dataStart++;
zip_strstart++;
zip_INSERT_STRING();
/* strstart never exceeds WSIZE-MAX_MATCH, so there are
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
* these bytes are garbage, but it does not matter since
* the next lookahead bytes will be emitted as literals.
*/
} while (--zip_match_length != 0);
zip_strstart++;
} else {
dataStart += matchLength;
matchLength = 0;
zip_ins_h = (((window[dataStart] & 0xFF) << H_SHIFT) ^ (window[dataStart + 1] & 0xFF)) & 0x1FFF;
zip_strstart += zip_match_length;
zip_match_length = 0;
zip_ins_h = zip_window[zip_strstart] & 0xff;
zip_ins_h = ((zip_ins_h << zip_H_SHIFT) ^ (zip_window[zip_strstart + 1] & 0xff)) & zip_HASH_MASK;
}
} else {
flush = ctTally(0, window[dataStart] & 0xFF);
lookahead--;
dataStart++;
/* No match, output a literal byte */
flush = zip_ct_tally(0, zip_window[zip_strstart] & 0xff);
zip_lookahead--;
zip_strstart++;
}
if(flush) {
flushBlock(0);
blockStart = dataStart;
if (flush) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
while(lookahead < 0x106 && !eofile)
fillWindow();
/* Make sure that we always have enough lookahead, except
* at the end of the input file. We need MAX_MATCH bytes
* for the next match, plus MIN_MATCH bytes to insert the
* string following the next match.
*/
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
}
}
};
function deflateBetter() {
while(lookahead != 0 && qHead == null) {
insertString();
prevLength = matchLength;
prevMatch = matchStart;
matchLength = MIN_MATCH - 1;
var zip_deflate_better = function () {
/* Process the input block. */
while (zip_lookahead != 0 && zip_qhead == null) {
/* Insert the string window[strstart .. strstart+2] in the
* dictionary, and set hash_head to the head of the hash chain:
*/
zip_INSERT_STRING();
if(hashHead != 0 && prevLength < maxLazyMatch && dataStart - hashHead <= MAX_DIST) {
matchLength = longestMatch(hashHead);
matchLength > lookahead && (matchLength = lookahead);
(matchLength == MIN_MATCH && dataStart - matchStart > 0x1000) && matchLength--;
/* Find the longest match, discarding those <= prev_length.
*/
zip_prev_length = zip_match_length;
zip_prev_match = zip_match_start;
zip_match_length = MIN_MATCH - 1;
if (zip_hash_head != zip_NIL &&
zip_prev_length < zip_max_lazy_match &&
zip_strstart - zip_hash_head <= zip_MAX_DIST) {
/* To simplify the code, we prevent matches with the string
* of window index 0 (in particular we have to avoid a match
* of the string with itself at the start of the input file).
*/
zip_match_length = zip_longest_match(zip_hash_head);
/* longest_match() sets match_start */
if (zip_match_length > zip_lookahead)
zip_match_length = zip_lookahead;
/* Ignore a length 3 match if it is too distant: */
if (zip_match_length == MIN_MATCH &&
zip_strstart - zip_match_start > zip_TOO_FAR) {
/* If prev_match is also MIN_MATCH, match_start is garbage
* but we will ignore the current match anyway.
*/
zip_match_length--;
}
}
if(prevLength >= MIN_MATCH && matchLength <= prevLength) {
/* If there was a match at the previous step and the current
* match is not better, output the previous match:
*/
if (zip_prev_length >= MIN_MATCH &&
zip_match_length <= zip_prev_length) {
var flush; // set if current block must be flushed
flush = ctTally(dataStart - 1 - prevMatch, prevLength - MIN_MATCH);
lookahead -= prevLength - 1;
prevLength -= 2;
flush = zip_ct_tally(zip_strstart - 1 - zip_prev_match,
zip_prev_length - MIN_MATCH);
/* Insert in hash table all strings up to the end of the match.
* strstart-1 and strstart are already inserted.
*/
zip_lookahead -= zip_prev_length - 1;
zip_prev_length -= 2;
do {
dataStart++;
insertString();
} while(--prevLength != 0);
matchAvailable = 0;
matchLength = MIN_MATCH - 1;
dataStart++;
if(flush) {
flushBlock(0);
blockStart = dataStart;
zip_strstart++;
zip_INSERT_STRING();
/* strstart never exceeds WSIZE-MAX_MATCH, so there are
* always MIN_MATCH bytes ahead. If lookahead < MIN_MATCH
* these bytes are garbage, but it does not matter since the
* next lookahead bytes will always be emitted as literals.
*/
} while (--zip_prev_length != 0);
zip_match_available = 0;
zip_match_length = MIN_MATCH - 1;
zip_strstart++;
if (flush) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
} else if( matchAvailable != 0) {
if(ctTally(0, window[dataStart - 1] & 0xff)) {
flushBlock(0);
blockStart = dataStart;
} else if (zip_match_available != 0) {
/* If there was no match at the previous position, output a
* single literal. If there was a match but the current match
* is longer, truncate the previous match to a single literal.
*/
if (zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff)) {
zip_flush_block(0);
zip_block_start = zip_strstart;
}
dataStart++;
lookahead--;
zip_strstart++;
zip_lookahead--;
} else {
matchAvailable = 1;
dataStart++;
lookahead--;
/* There is no previous match to compare with, wait for
* the next step to decide.
*/
zip_match_available = 1;
zip_strstart++;
zip_lookahead--;
}
while(lookahead < 0x106 && !eofile)
fillWindow();
/* Make sure that we always have enough lookahead, except
* at the end of the input file. We need MAX_MATCH bytes
* for the next match, plus MIN_MATCH bytes to insert the
* string following the next match.
*/
while (zip_lookahead < zip_MIN_LOOKAHEAD && !zip_eofile)
zip_fill_window();
}
}
};
function initDeflate() {
if(eofile) return;
var zip_init_deflate = function () {
if (zip_eofile)
return;
zip_bi_buf = 0;
zip_bi_valid = 0;
zip_ct_init();
zip_lm_init();
biBuf = 0;
biValid = 0;
ctInit();
lmInit();
zip_qhead = null;
zip_outcnt = 0;
zip_outoff = 0;
zip_match_available = 0;
qHead = null;
outcnt = 0;
outoff = 0;
if(compression_level <= 3) {
prevLength = MIN_MATCH - 1;
matchLength = 0;
} else {
matchLength = MIN_MATCH - 1;
matchAvailable = 0;
if (zip_compr_level <= 3) {
zip_prev_length = MIN_MATCH - 1;
zip_match_length = 0;
}
else {
zip_match_length = MIN_MATCH - 1;
zip_match_available = 0;
zip_match_available = 0;
}
complete = false;
}
zip_complete = false;
};
function internalDeflate(buff, off, buff_size) {
/* ==========================================================================
* Same as above, but achieves better compression. We use a lazy
* evaluation for matches: a match is finally adopted only if there is
* no better match at the next window position.
*/
var zip_deflate_internal = function (buff, off, buff_size) {
var n;
if(!initFlag) {
initDeflate();
initFlag = true;
if(lookahead == 0) { // empty
complete = true;
if (!zip_initflag) {
zip_init_deflate();
zip_initflag = true;
if (zip_lookahead == 0) { // empty
zip_complete = true;
return 0;
}
}
if((n = qCopy(buff, off, buff_size)) == buff_size) return buff_size;
if(complete) return n;
if(compression_level <= 3) // optimized for speed
deflateFast();
if ((n = zip_qcopy(buff, off, buff_size)) == buff_size)
return buff_size;
if (zip_complete)
return n;
if (zip_compr_level <= 3) // optimized for speed
zip_deflate_fast();
else
deflateBetter();
if(lookahead == 0) {
matchAvailable != 0 && ctTally(0, window[dataStart - 1] & 0xff);
flushBlock(1);
complete = true;
zip_deflate_better();
if (zip_lookahead == 0) {
if (zip_match_available != 0)
zip_ct_tally(0, zip_window[zip_strstart - 1] & 0xff);
zip_flush_block(1);
zip_complete = true;
}
return n + qCopy(buff, n + off, buff_size - n);
}
return n + zip_qcopy(buff, n + off, buff_size - n);
};
function qCopy(buff, off, buff_size) {
var n = 0, i, j;
var zip_qcopy = function (buff, off, buff_size) {
var n, i, j;
while(qHead != null && n < buff_size) {
n = 0;
while (zip_qhead != null && n < buff_size) {
i = buff_size - n;
i > qHead.len && (i = qHead.len);
for(j = 0; j < i; j++) buff[off + n + j] = qHead.ptr[qHead.off + j];
qHead.off += i;
qHead.len -= i;
if (i > zip_qhead.len)
i = zip_qhead.len;
for (j = 0; j < i; j++)
buff[off + n + j] = zip_qhead.ptr[zip_qhead.off + j];
zip_qhead.off += i;
zip_qhead.len -= i;
n += i;
if(qHead.len == 0) {
if (zip_qhead.len == 0) {
var p;
p = qHead;
qHead = qHead.next;
p.next = freeQueue;
freeQueue = p;
p = zip_qhead;
zip_qhead = zip_qhead.next;
zip_reuse_queue(p);
}
}
if(n == buff_size) return n;
if (n == buff_size)
return n;
if(outoff < outcnt) {
if (zip_outoff < zip_outcnt) {
i = buff_size - n;
if(i > outcnt - outoff)
i = outcnt - outoff;
for(j = 0; j < i; j++)
buff[off + n + j] = outbuf[outoff + j];
outoff += i;
if (i > zip_outcnt - zip_outoff)
i = zip_outcnt - zip_outoff;
// System.arraycopy(outbuf, outoff, buff, off + n, i);
for (j = 0; j < i; j++)
buff[off + n + j] = zip_outbuf[zip_outoff + j];
zip_outoff += i;
n += i;
if(outcnt == outoff)
outcnt = outoff = 0;
if (zip_outcnt == zip_outoff)
zip_outcnt = zip_outoff = 0;
}
return n;
}
};
function ctInit() {
var n, // iterates over tree elements
bits, // bit counter
length, // length value
code, // code value
dist; // distance index
/* ==========================================================================
* Allocate the match buffer, initialize the various tables and save the
* location of the internal file attribute (ascii/binary) and method
* (DEFLATE/STORE).
*/
var zip_ct_init = function () {
var n; // iterates over tree elements
var bits; // bit counter
var length; // length value
var code; // code value
var dist; // distance index
if(staticDTree[0].dl != 0) return; // ct_init already called
if (zip_static_dtree[0].dl != 0) return; // ct_init already called
lDesc.dyn_tree = dynLTree;
lDesc.static_tree = staticLTree;
lDesc.extra_bits = elbits;
lDesc.extra_base = 0x101;
lDesc.elems = L_CODES;
lDesc.max_length = MAX_BITS;
lDesc.max_code = 0;
zip_l_desc.dyn_tree = zip_dyn_ltree;
zip_l_desc.static_tree = zip_static_ltree;
zip_l_desc.extra_bits = zip_extra_lbits;
zip_l_desc.extra_base = zip_LITERALS + 1;
zip_l_desc.elems = zip_L_CODES;
zip_l_desc.max_length = zip_MAX_BITS;
zip_l_desc.max_code = 0;
dDesc.dyn_tree = dynDTree;
dDesc.static_tree = staticDTree;
dDesc.extra_bits = edbits;
dDesc.extra_base = 0;
dDesc.elems = D_CODES;
dDesc.max_length = MAX_BITS;
dDesc.max_code = 0;
zip_d_desc.dyn_tree = zip_dyn_dtree;
zip_d_desc.static_tree = zip_static_dtree;
zip_d_desc.extra_bits = zip_extra_dbits;
zip_d_desc.extra_base = 0;
zip_d_desc.elems = zip_D_CODES;
zip_d_desc.max_length = zip_MAX_BITS;
zip_d_desc.max_code = 0;
blDesc.dyn_tree = blTree;
blDesc.static_tree = null;
blDesc.extra_bits = eblbits;
blDesc.extra_base = 0;
blDesc.elems = BL_CODES;
blDesc.max_length = MAX_BL_BITS;
blDesc.max_code = 0;
zip_bl_desc.dyn_tree = zip_bl_tree;
zip_bl_desc.static_tree = null;
zip_bl_desc.extra_bits = zip_extra_blbits;
zip_bl_desc.extra_base = 0;
zip_bl_desc.elems = zip_BL_CODES;
zip_bl_desc.max_length = zip_MAX_BL_BITS;
zip_bl_desc.max_code = 0;
// Initialize the mapping length (0..255) -> length code (0..28)
length = 0;
for(code = 0; code < 0x1E; code++) {
baseLength[code] = length;
for(n = 0; n < (1 << elbits[code]); n++)
lengthCode[length++] = code;
for (code = 0; code < zip_LENGTH_CODES - 1; code++) {
zip_base_length[code] = length;
for (n = 0; n < (1 << zip_extra_lbits[code]); n++)
zip_length_code[length++] = code;
}
lengthCode[length - 1] = code;
/* Note that the length 255 (match length 258) can be represented
* in two different ways: code 284 + 5 bits or code 285, so we
* overwrite length_code[255] to use the best encoding:
*/
zip_length_code[length - 1] = code;
/* Initialize the mapping dist (0..32K) -> dist code (0..29) */
dist = 0;
for(code = 0 ; code < 16; code++) {
baseDist[code] = dist;
for(n = 0; n < (1 << edbits[code]); n++)
distCode[dist++] = code;
for (code = 0; code < 16; code++) {
zip_base_dist[code] = dist;
for (n = 0; n < (1 << zip_extra_dbits[code]); n++) {
zip_dist_code[dist++] = code;
}
}
dist >>= 7; // from now on, all distances are divided by 128
for( ; code < D_CODES; code++) {
baseDist[code] = dist << 7;
for(n = 0; n < (1<<(edbits[code]-7)); n++)
distCode[256 + dist++] = code;
for (; code < zip_D_CODES; code++) {
zip_base_dist[code] = dist << 7;
for (n = 0; n < (1 << (zip_extra_dbits[code] - 7)); n++)
zip_dist_code[256 + dist++] = code;
}
for(bits = 0; bits <= MAX_BITS; bits++) blCount[bits] = 0;
// Construct the codes of the static literal tree
for (bits = 0; bits <= zip_MAX_BITS; bits++)
zip_bl_count[bits] = 0;
n = 0;
while(n <= 143) { staticLTree[n++].dl = 8; blCount[8]++; }
while(n <= 255) { staticLTree[n++].dl = 9; blCount[9]++; }
while(n <= 279) { staticLTree[n++].dl = 7; blCount[7]++; }
while(n <= 287) { staticLTree[n++].dl = 8; blCount[8]++; }
while (n <= 143) {
zip_static_ltree[n++].dl = 8;
zip_bl_count[8]++;
}
while (n <= 255) {
zip_static_ltree[n++].dl = 9;
zip_bl_count[9]++;
}
while (n <= 279) {
zip_static_ltree[n++].dl = 7;
zip_bl_count[7]++;
}
while (n <= 287) {
zip_static_ltree[n++].dl = 8;
zip_bl_count[8]++;
}
/* Codes 286 and 287 do not exist, but we must include them in the
* tree construction to get a canonical Huffman tree (longest code
* all ones)
*/
zip_gen_codes(zip_static_ltree, zip_L_CODES + 1);
genCodes(staticLTree, L_CODES + 1);
for(n = 0; n < D_CODES; n++) {
staticDTree[n].dl = 5;
staticDTree[n].fc = reverse(n, 5);
/* The static distance tree is trivial: */
for (n = 0; n < zip_D_CODES; n++) {
zip_static_dtree[n].dl = 5;
zip_static_dtree[n].fc = zip_bi_reverse(n, 5);
}
initBlock();
}
function initBlock() {
var n;
// Initialize the first block of the first file:
zip_init_block();
};
for(n = 0; n < L_CODES; n++) dynLTree[n].fc = 0;
for(n = 0; n < D_CODES; n++) dynDTree[n].fc = 0;
for(n = 0; n < BL_CODES; n++) blTree[n].fc = 0;
/* ==========================================================================
* Initialize a new block.
*/
var zip_init_block = function () {
var n; // iterates over tree elements
dynLTree[0x100].fc = flagBit = 1; // end block
flags = optLen = staticLen = lastLit = lastDist = lastFlags = 0;
}
// Initialize the trees.
for (n = 0; n < zip_L_CODES; n++) zip_dyn_ltree[n].fc = 0;
for (n = 0; n < zip_D_CODES; n++) zip_dyn_dtree[n].fc = 0;
for (n = 0; n < zip_BL_CODES; n++) zip_bl_tree[n].fc = 0;
function pqDownHeap(tree, k) {
var v = zip_heap[k],
j = k << 1;
zip_dyn_ltree[zip_END_BLOCK].fc = 1;
zip_opt_len = zip_static_len = 0;
zip_last_lit = zip_last_dist = zip_last_flags = 0;
zip_flags = 0;
zip_flag_bit = 1;
};
while(j <= heapLen) {
(j < heapLen && smaller(tree, zip_heap[j + 1], zip_heap[j])) && j++;
if(smaller(tree, v, zip_heap[j])) break;
/* ==========================================================================
* Restore the heap property by moving down the tree starting at node k,
* exchanging a node with the smallest of its two sons if necessary, stopping
* when the heap property is re-established (each father smaller than its
* two sons).
*/
var zip_pqdownheap = function (tree, // the tree to restore
k) { // node to move down
var v = zip_heap[k];
var j = k << 1; // left son of k
while (j <= zip_heap_len) {
// Set j to the smallest of the two sons:
if (j < zip_heap_len &&
zip_SMALLER(tree, zip_heap[j + 1], zip_heap[j]))
j++;
// Exit if v is smaller than both sons
if (zip_SMALLER(tree, v, zip_heap[j]))
break;
// Exchange v with the smallest son
zip_heap[k] = zip_heap[j];
k = j;
// And continue down the tree, setting j to the left son of k
j <<= 1;
}
zip_heap[k] = v;
}
};
/* ==========================================================================
* Compute the optimal bit lengths for a tree and update the total bit length
* for the current block.
* IN assertion: the fields freq and dad are set, heap[heap_max] and
* above are the tree nodes sorted by increasing frequency.
* OUT assertions: the field len is set to the optimal bit length, the
* array bl_count contains the frequencies for each bit length.
* The length opt_len is updated; static_len is also updated if stree is
* not null.
*/
var zip_gen_bitlen = function (desc) { // the tree descriptor
var tree = desc.dyn_tree;
var extra = desc.extra_bits;
var base = desc.extra_base;
var max_code = desc.max_code;
var max_length = desc.max_length;
var stree = desc.static_tree;
var h; // heap index
var n, m; // iterate over the tree elements
var bits; // bit length
var xbits; // extra bits
var f; // frequency
var overflow = 0; // number of elements with bit length too large
function genBitLen(desc) {
var tree = desc.dyn_tree,
extra = desc.extra_bits,
base = desc.extra_base,
max_code = desc.max_code,
max_length = desc.max_length,
stree = desc.static_tree,
h, // heap index
n, m, // iterate over the tree elements
bits, // bit length
xbits, // extra bits
f, // frequency
overflow = 0; // number of elements with bit length too large
for (bits = 0; bits <= zip_MAX_BITS; bits++)
zip_bl_count[bits] = 0;
for(bits = 0; bits <= MAX_BITS; bits++)
blCount[bits] = 0;
/* In a first pass, compute the optimal bit lengths (which may
* overflow in the case of the bit length tree).
*/
tree[zip_heap[zip_heap_max]].dl = 0; // root of the heap
tree[zip_heap[heapMax]].dl = 0; // root of the heap
for(h = heapMax + 1; h < HEAP_SIZE; h++) {
for (h = zip_heap_max + 1; h < zip_HEAP_SIZE; h++) {
n = zip_heap[h];
bits = tree[tree[n].dl].dl + 1;
if(bits > max_length) {
if (bits > max_length) {
bits = max_length;

@@ -633,29 +939,48 @@ overflow++;

tree[n].dl = bits;
// We overwrite tree[n].dl which is no longer needed
if(n > max_code) continue; // not a leaf node
if (n > max_code)
continue; // not a leaf node
blCount[bits]++;
zip_bl_count[bits]++;
xbits = 0;
n >= base && (xbits = extra[n - base]);
if (n >= base)
xbits = extra[n - base];
f = tree[n].fc;
optLen += f * (bits + xbits);
stree != null && (staticLen += f * (stree[n].dl + xbits));
zip_opt_len += f * (bits + xbits);
if (stree != null)
zip_static_len += f * (stree[n].dl + xbits);
}
if (!overflow) return;
if (overflow == 0)
return;
// This happens for example on obj2 and pic of the Calgary corpus
// Find the first bit length which could increase:
do {
bits = max_length - 1;
while(blCount[bits] == 0) bits--;
blCount[bits]--; // move one leaf down the tree
blCount[bits + 1] += 2; // move one overflow item as its brother
blCount[max_length]--;
while (zip_bl_count[bits] == 0)
bits--;
zip_bl_count[bits]--; // move one leaf down the tree
zip_bl_count[bits + 1] += 2; // move one overflow item as its brother
zip_bl_count[max_length]--;
/* The brother of the overflow item also moves one step up,
* but this does not affect bl_count[max_length]
*/
overflow -= 2;
} while(overflow > 0);
} while (overflow > 0);
for(bits = max_length; bits != 0; bits--) {
n = blCount[bits];
while(n != 0) {
/* Now recompute all bit lengths, scanning in increasing frequency.
* h is still equal to HEAP_SIZE. (It is simpler to reconstruct all
* lengths instead of fixing only the wrong ones. This idea is taken
* from 'ar' written by Haruhiko Okumura.)
*/
for (bits = max_length; bits != 0; bits--) {
n = zip_bl_count[bits];
while (n != 0) {
m = zip_heap[--h];
if(m > max_code) continue;
if(tree[m].dl != bits) {
optLen += (bits - tree[m].dl) * tree[m].fc;
if (m > max_code)
continue;
if (tree[m].dl != bits) {
zip_opt_len += (bits - tree[m].dl) * tree[m].fc;
tree[m].fc = bits;

@@ -666,37 +991,66 @@ }

}
}
};
function genCodes(tree, max_code) {
var next_code = new Array(MAX_BITS + 1), // next code value for each bit length
code = 0, // running code value
bits, // bit index
n; // code index
/* ==========================================================================
* Generate the codes for a given tree and bit counts (which need not be
* optimal).
* IN assertion: the array bl_count contains the bit length statistics for
* the given tree and the field len is set for all tree elements.
* OUT assertion: the field code is set for all tree elements of non
* zero code length.
*/
var zip_gen_codes = function (tree, // the tree to decorate
max_code) { // largest code with non zero frequency
var next_code = new Array(zip_MAX_BITS + 1); // next code value for each bit length
var code = 0; // running code value
var bits; // bit index
var n; // code index
for(bits = 1; bits <= MAX_BITS; bits++) {
code = ((code + blCount[bits-1]) << 1);
/* The distribution counts are first used to generate the code values
* without bit reversal.
*/
for (bits = 1; bits <= zip_MAX_BITS; bits++) {
code = ((code + zip_bl_count[bits - 1]) << 1);
next_code[bits] = code;
}
for(n = 0; n <= max_code; n++) {
/* Check that the bit counts in bl_count are consistent. The last code
* must be all ones.
*/
for (n = 0; n <= max_code; n++) {
var len = tree[n].dl;
if (len == 0)
continue;
tree[n].fc = reverse(next_code[len]++, len);
// Now reverse the bits
tree[n].fc = zip_bi_reverse(next_code[len]++, len);
}
}
};
function buildTree(desc) { // the tree descriptor
var tree = desc.dyn_tree,
stree = desc.static_tree,
elems = desc.elems,
n, m, // iterate over heap elements
max_code = -1, // largest code with non zero frequency
node = elems; // next internal node of the tree
heapLen = 0;
heapMax = HEAP_SIZE;
/* ==========================================================================
* Construct one Huffman tree and assigns the code bit strings and lengths.
* Update the total bit length for the current block.
* IN assertion: the field freq is set for all tree elements.
* OUT assertions: the fields len and code are set to the optimal bit length
* and corresponding code. The length opt_len is updated; static_len is
* also updated if stree is not null. The field max_code is set.
*/
var zip_build_tree = function (desc) { // the tree descriptor
var tree = desc.dyn_tree;
var stree = desc.static_tree;
var elems = desc.elems;
var n, m; // iterate over heap elements
var max_code = -1; // largest code with non zero frequency
var node = elems; // next internal node of the tree
for(n = 0; n < elems; n++) {
if(tree[n].fc != 0) {
zip_heap[++heapLen] = max_code = n;
depth[n] = 0;
/* Construct the initial heap, with least frequent element in
* heap[SMALLEST]. The sons of heap[n] are heap[2*n] and heap[2*n+1].
* heap[0] is not used.
*/
zip_heap_len = 0;
zip_heap_max = zip_HEAP_SIZE;
for (n = 0; n < elems; n++) {
if (tree[n].fc != 0) {
zip_heap[++zip_heap_len] = max_code = n;
zip_depth[n] = 0;
} else

@@ -706,56 +1060,80 @@ tree[n].dl = 0;

while(heapLen < 2) {
var xnew = zip_heap[++heapLen] = (max_code < 2 ? ++max_code : 0);
/* The pkzip format requires that at least one distance code exists,
* and that at least one bit should be sent even if there is only one
* possible code. So to avoid special checks later on we force at least
* two codes of non zero frequency.
*/
while (zip_heap_len < 2) {
var xnew = zip_heap[++zip_heap_len] = (max_code < 2 ? ++max_code : 0);
tree[xnew].fc = 1;
depth[xnew] = 0;
optLen--;
stree != null && (staticLen -= stree[xnew].dl);
zip_depth[xnew] = 0;
zip_opt_len--;
if (stree != null)
zip_static_len -= stree[xnew].dl;
// new is 0 or 1 so it does not have extra bits
}
desc.max_code = max_code;
for(n = heapLen >> 1; n >= 1; n--) pqDownHeap(tree, n);
/* The elements heap[heap_len/2+1 .. heap_len] are leaves of the tree,
* establish sub-heaps of increasing lengths:
*/
for (n = zip_heap_len >> 1; n >= 1; n--)
zip_pqdownheap(tree, n);
/* Construct the Huffman tree by repeatedly combining the least two
* frequent nodes.
*/
do {
n = zip_heap[1];
zip_heap[1] = zip_heap[heapLen--];
pqDownHeap(tree, 1);
n = zip_heap[zip_SMALLEST];
zip_heap[zip_SMALLEST] = zip_heap[zip_heap_len--];
zip_pqdownheap(tree, zip_SMALLEST);
m = zip_heap[1]; // m = node of next least frequency
m = zip_heap[zip_SMALLEST]; // m = node of next least frequency
// keep the nodes sorted by frequency
zip_heap[--heapMax] = n;
zip_heap[--heapMax] = m;
zip_heap[--zip_heap_max] = n;
zip_heap[--zip_heap_max] = m;
// Create a new node father of n and m
tree[node].fc = tree[n].fc + tree[m].fc;
if(depth[n] > depth[m] + 1)
depth[node] = depth[n];
if (zip_depth[n] > zip_depth[m] + 1)
zip_depth[node] = zip_depth[n];
else
depth[node] = depth[m] + 1;
zip_depth[node] = zip_depth[m] + 1;
tree[n].dl = tree[m].dl = node;
// and insert the new node in the heap
zip_heap[1] = node++;
pqDownHeap(tree, 1);
zip_heap[zip_SMALLEST] = node++;
zip_pqdownheap(tree, zip_SMALLEST);
} while(heapLen >= 2);
} while (zip_heap_len >= 2);
zip_heap[--heapMax] = zip_heap[1];
zip_heap[--zip_heap_max] = zip_heap[zip_SMALLEST];
genBitLen(desc);
genCodes(tree, max_code);
}
/* At this point, the fields freq and dad are set. We can now
* generate the bit lengths.
*/
zip_gen_bitlen(desc);
function scanTree(tree, max_code) {
var n, // iterates over all tree elements
prevlen = -1, // last emitted length
curlen, // length of current code
nextlen = tree[0].dl, // length of next code
count = 0, // repeat count of the current code
max_count = 7, // max repeat count
min_count = 4; // min repeat count
// The field len is now set, we can generate the bit codes
zip_gen_codes(tree, max_code);
};
if(nextlen == 0) {
/* ==========================================================================
* Scan a literal or distance tree to determine the frequencies of the codes
* in the bit length tree. Updates opt_len to take into account the repeat
* counts. (The contribution of the bit length codes will be added later
* during the construction of bl_tree.)
*/
var zip_scan_tree = function (tree,// the tree to be scanned
max_code) { // and its largest code of non zero frequency
var n; // iterates over all tree elements
var prevlen = -1; // last emitted length
var curlen; // length of current code
var nextlen = tree[0].dl; // length of next code
var count = 0; // repeat count of the current code
var max_count = 7; // max repeat count
var min_count = 4; // min repeat count
if (nextlen == 0) {
max_count = 138;

@@ -766,22 +1144,23 @@ min_count = 3;

for(n = 0; n <= max_code; n++) {
for (n = 0; n <= max_code; n++) {
curlen = nextlen;
nextlen = tree[n + 1].dl;
if(++count < max_count && curlen == nextlen)
if (++count < max_count && curlen == nextlen)
continue;
else if(count < min_count)
blTree[curlen].fc += count;
else if(curlen != 0) {
if(curlen != prevlen)
blTree[curlen].fc++;
blTree[REP_3_6].fc++;
} else if(count <= 10)
blTree[REPZ_3_10].fc++;
else if (count < min_count)
zip_bl_tree[curlen].fc += count;
else if (curlen != 0) {
if (curlen != prevlen)
zip_bl_tree[curlen].fc++;
zip_bl_tree[zip_REP_3_6].fc++;
} else if (count <= 10)
zip_bl_tree[zip_REPZ_3_10].fc++;
else
blTree[REPZ_11_138].fc++;
count = 0; prevlen = curlen;
if(nextlen == 0) {
zip_bl_tree[zip_REPZ_11_138].fc++;
count = 0;
prevlen = curlen;
if (nextlen == 0) {
max_count = 138;
min_count = 3;
} else if(curlen == nextlen) {
} else if (curlen == nextlen) {
max_count = 6;

@@ -794,14 +1173,21 @@ min_count = 3;

}
}
};
function sendTree(tree, max_code) {
var n, // iterates over all tree elements
prevlen = -1, // last emitted length
curlen, // length of current code
nextlen = tree[0].dl, // length of next code
count = 0, // repeat count of the current code
max_count = 7, // max repeat count
min_count = 4; // min repeat count
/* ==========================================================================
* Send a literal or distance tree in compressed form, using the codes in
* bl_tree.
*/
var zip_send_tree = function (tree, // the tree to be scanned
max_code) { // and its largest code of non zero frequency
var n; // iterates over all tree elements
var prevlen = -1; // last emitted length
var curlen; // length of current code
var nextlen = tree[0].dl; // length of next code
var count = 0; // repeat count of the current code
var max_count = 7; // max repeat count
var min_count = 4; // min repeat count
if(nextlen == 0) {
/* tree[max_code+1].dl = -1; */
/* guard already set */
if (nextlen == 0) {
max_count = 138;

@@ -811,29 +1197,32 @@ min_count = 3;

for(n = 0; n <= max_code; n++) {
for (n = 0; n <= max_code; n++) {
curlen = nextlen;
nextlen = tree[n+1].dl;
if(++count < max_count && curlen == nextlen) {
nextlen = tree[n + 1].dl;
if (++count < max_count && curlen == nextlen) {
continue;
} else if(count < min_count) {
do { sendCode(curlen, blTree); } while(--count != 0);
} else if(curlen != 0) {
if(curlen != prevlen) {
sendCode(curlen, blTree);
} else if (count < min_count) {
do {
zip_SEND_CODE(curlen, zip_bl_tree);
} while (--count != 0);
} else if (curlen != 0) {
if (curlen != prevlen) {
zip_SEND_CODE(curlen, zip_bl_tree);
count--;
}
sendCode(REP_3_6, blTree);
sendBits(count - 3, 2);
} else if(count <= 10) {
sendCode(REPZ_3_10, blTree);
sendBits(count-3, 3);
// Assert(count >= 3 && count <= 6, " 3_6?");
zip_SEND_CODE(zip_REP_3_6, zip_bl_tree);
zip_send_bits(count - 3, 2);
} else if (count <= 10) {
zip_SEND_CODE(zip_REPZ_3_10, zip_bl_tree);
zip_send_bits(count - 3, 3);
} else {
sendCode(REPZ_11_138, blTree);
sendBits(count-11, 7);
zip_SEND_CODE(zip_REPZ_11_138, zip_bl_tree);
zip_send_bits(count - 11, 7);
}
count = 0;
prevlen = curlen;
if(nextlen == 0) {
if (nextlen == 0) {
max_count = 138;
min_count = 3;
} else if(curlen == nextlen) {
} else if (curlen == nextlen) {
max_count = 6;

@@ -846,153 +1235,246 @@ min_count = 3;

}
}
};
function buildBLTree() {
/* ==========================================================================
* Construct the Huffman tree for the bit lengths and return the index in
* bl_order of the last bit length code to send.
*/
var zip_build_bl_tree = function () {
var max_blindex; // index of last bit length code of non zero freq
scanTree(dynLTree, lDesc.max_code);
scanTree(dynDTree, dDesc.max_code);
buildTree(blDesc);
for(max_blindex = BL_CODES-1; max_blindex >= 3; max_blindex--) {
if(blTree[blorder[max_blindex]].dl != 0) break;
// Determine the bit length frequencies for literal and distance trees
zip_scan_tree(zip_dyn_ltree, zip_l_desc.max_code);
zip_scan_tree(zip_dyn_dtree, zip_d_desc.max_code);
// Build the bit length tree:
zip_build_tree(zip_bl_desc);
/* opt_len now includes the length of the tree representations, except
* the lengths of the bit lengths codes and the 5+5+4 bits for the counts.
*/
/* Determine the number of bit length codes to send. The pkzip format
* requires that at least 4 bit length codes be sent. (appnote.txt says
* 3 but the actual value used is 4.)
*/
for (max_blindex = zip_BL_CODES - 1; max_blindex >= 3; max_blindex--) {
if (zip_bl_tree[zip_bl_order[max_blindex]].dl != 0) break;
}
/* Update opt_len to include the bit length tree and counts */
optLen += 3 * (max_blindex + 1) + 0xE;
zip_opt_len += 3 * (max_blindex + 1) + 5 + 5 + 4;
return max_blindex;
}
};
function sendTrees(lcodes, dcodes, blcodes) {
/* ==========================================================================
* Send the header for a block using dynamic Huffman trees: the counts, the
* lengths of the bit length codes, the literal tree and the distance tree.
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
*/
var zip_send_all_trees = function (lcodes, dcodes, blcodes) { // number of codes for each tree
var rank; // index in bl_order
sendBits(lcodes - 0x101, 5);
sendBits(dcodes - 1, 5);
sendBits(blcodes - 4, 4);
for(rank = 0; rank < blcodes; rank++)
sendBits(blTree[blorder[rank]].dl, 3);
zip_send_bits(lcodes - 257, 5); // not +255 as stated in appnote.txt
zip_send_bits(dcodes - 1, 5);
zip_send_bits(blcodes - 4, 4); // not -3 as stated in appnote.txt
for (rank = 0; rank < blcodes; rank++) {
zip_send_bits(zip_bl_tree[zip_bl_order[rank]].dl, 3);
}
sendTree(dynLTree, lcodes - 1);
sendTree(dynDTree, dcodes - 1);
}
// send the literal tree
zip_send_tree(zip_dyn_ltree, lcodes - 1);
function flushBlock(eof) { // true if this is the last block for a file
var opt_lenb, static_lenb, // opt_len and static_len in bytes
max_blindex, // index of last bit length code of non zero freq
stored_len = dataStart - blockStart; // length of input block
// send the distance tree
zip_send_tree(zip_dyn_dtree, dcodes - 1);
};
flagBuf[lastFlags] = flags; // Save the flags for the last 8 items
/* ==========================================================================
* Determine the best encoding for the current block: dynamic trees, static
* trees or store, and output the encoded block to the zip file.
*/
var zip_flush_block = function (eof) { // true if this is the last block for a file
var opt_lenb, static_lenb; // opt_len and static_len in bytes
var max_blindex; // index of last bit length code of non zero freq
var stored_len; // length of input block
buildTree(lDesc);
buildTree(dDesc);
stored_len = zip_strstart - zip_block_start;
zip_flag_buf[zip_last_flags] = zip_flags; // Save the flags for the last 8 items
max_blindex = buildBLTree();
// Construct the literal and distance trees
zip_build_tree(zip_l_desc);
zip_build_tree(zip_d_desc);
/* At this point, opt_len and static_len are the total bit lengths of
* the compressed block data, excluding the tree representations.
*/
/* Build the bit length tree for the above two trees, and get the index
* in bl_order of the last bit length code to send.
*/
max_blindex = zip_build_bl_tree();
// Determine the best encoding. Compute first the block length in bytes
opt_lenb = (optLen + 3 + 7) >> 3;
static_lenb = (staticLen + 3 + 7) >> 3;
opt_lenb = (zip_opt_len + 3 + 7) >> 3;
static_lenb = (zip_static_len + 3 + 7) >> 3;
if (static_lenb <= opt_lenb)
opt_lenb = static_lenb;
if (stored_len + 4 <= opt_lenb // 4: two words for the lengths
&& zip_block_start >= 0) {
var i;
static_lenb <= opt_lenb && (opt_lenb = static_lenb);
/* The test buf != NULL is only necessary if LIT_BUFSIZE > WSIZE.
* Otherwise we can't have processed more than WSIZE input bytes since
* the last block flush, because compression would have been
* successful. If LIT_BUFSIZE <= WSIZE, it is never too late to
* transform a block into a stored block.
*/
zip_send_bits((zip_STORED_BLOCK << 1) + eof, 3);
/* send block type */
zip_bi_windup();
/* align on byte boundary */
zip_put_short(stored_len);
zip_put_short(~stored_len);
if(stored_len + 4 <= opt_lenb && blockStart >= 0) {
var i;
sendBits(eof, 3); /* send block type */
biValid && writeShort(biBuf) && (biBuf = biValid = 0); /* align on byte boundary */
writeShort(stored_len);
writeShort(~stored_len);
for(i = 0; i < stored_len; i++) writeByte(window[blockStart + i]);
// copy block
for (i = 0; i < stored_len; i++)
zip_put_byte(zip_window[zip_block_start + i]);
} else if(static_lenb == opt_lenb) {
sendBits(eof + 2, 3);
compress(staticLTree, staticDTree);
} else if (static_lenb == opt_lenb) {
zip_send_bits((zip_STATIC_TREES << 1) + eof, 3);
zip_compress_block(zip_static_ltree, zip_static_dtree);
} else {
sendBits(eof + 4, 3);
sendTrees(lDesc.max_code + 1, dDesc.max_code + 1, max_blindex + 1);
compress(dynLTree, dynDTree);
zip_send_bits((zip_DYN_TREES << 1) + eof, 3);
zip_send_all_trees(zip_l_desc.max_code + 1,
zip_d_desc.max_code + 1,
max_blindex + 1);
zip_compress_block(zip_dyn_ltree, zip_dyn_dtree);
}
initBlock();
zip_init_block();
(eof != 0) && (biValid && writeShort(biBuf) && (biBuf = biValid = 0));
}
if (eof != 0)
zip_bi_windup();
};
function ctTally(dist, lc) {
lBuf[lastLit++] = lc;
if(dist == 0) {
dynLTree[lc].fc++;
/* ==========================================================================
* Save the match info and tally the frequency counts. Return true if
* the current block must be flushed.
*/
var zip_ct_tally = function (dist, // distance of matched string
lc) { // match length-MIN_MATCH or unmatched char (if dist==0)
zip_l_buf[zip_last_lit++] = lc;
if (dist == 0) {
// lc is the unmatched char
zip_dyn_ltree[lc].fc++;
} else {
dist--;
dynLTree[lengthCode[lc] + 0x101].fc++;
dynDTree[zip_D_CODE(dist)].fc++;
dBuf[lastDist++] = dist;
flags |= flagBit;
// Here, lc is the match length - MIN_MATCH
dist--; // dist = match distance - 1
zip_dyn_ltree[zip_length_code[lc] + zip_LITERALS + 1].fc++;
zip_dyn_dtree[zip_D_CODE(dist)].fc++;
zip_d_buf[zip_last_dist++] = dist;
zip_flags |= zip_flag_bit;
}
flagBit <<= 1;
if((lastLit & 7) == 0) {
flagBuf[lastFlags++] = flags;
flags = 0;
flagBit = 1;
zip_flag_bit <<= 1;
// Output the flags if they fill a byte
if ((zip_last_lit & 7) == 0) {
zip_flag_buf[zip_last_flags++] = zip_flags;
zip_flags = 0;
zip_flag_bit = 1;
}
if(compression_level > 2 && (lastLit & 0xfff) == 0) {
var out_length = lastLit * 8,
in_length = dataStart - blockStart,
dcode;
// Try to guess if it is profitable to stop the current block here
if (zip_compr_level > 2 && (zip_last_lit & 0xfff) == 0) {
// Compute an upper bound for the compressed length
var out_length = zip_last_lit * 8;
var in_length = zip_strstart - zip_block_start;
var dcode;
for(dcode = 0; dcode < D_CODES; dcode++) {
out_length += dynDTree[dcode].fc * (5 + edbits[dcode]);
for (dcode = 0; dcode < zip_D_CODES; dcode++) {
out_length += zip_dyn_dtree[dcode].fc * (5 + zip_extra_dbits[dcode]);
}
out_length >>= 3;
if(lastDist < parseInt(lastLit / 2) && out_length < parseInt(in_length / 2))
if (zip_last_dist < parseInt(zip_last_lit / 2) &&
out_length < parseInt(in_length / 2))
return true;
}
return (lastLit == LIT_BUFSIZE - 1 || lastDist == LIT_BUFSIZE);
}
return (zip_last_lit == LIT_BUFSIZE - 1 ||
zip_last_dist == zip_DIST_BUFSIZE);
/* We avoid equality with LIT_BUFSIZE because of wraparound at 64K
* on 16 bit machines and because stored blocks are restricted to
* 64K-1 bytes.
*/
};
function compress(ltree, dtree) {
var dist, // distance of matched string
lc, // match length or unmatched char (if dist == 0)
lx = 0, // running index in l_buf
dx = 0, // running index in d_buf
fx = 0, // running index in flag_buf
flag = 0, // current flags
code, // the code to send
extra; // number of extra bits to send
/* ==========================================================================
* Send the block data compressed using the given Huffman trees
*/
var zip_compress_block = function (ltree, // literal tree
dtree) { // distance tree
var dist; // distance of matched string
var lc; // match length or unmatched char (if dist == 0)
var lx = 0; // running index in l_buf
var dx = 0; // running index in d_buf
var fx = 0; // running index in flag_buf
var flag = 0; // current flags
var code; // the code to send
var extra; // number of extra bits to send
if (lastLit != 0) do {
(lx & 7) == 0 && (flag = flagBuf[fx++]);
lc = lBuf[lx++] & 0xff;
if (zip_last_lit != 0) do {
if ((lx & 7) == 0)
flag = zip_flag_buf[fx++];
lc = zip_l_buf[lx++] & 0xff;
if ((flag & 1) == 0) {
sendCode(lc, ltree); /* send a literal byte */
zip_SEND_CODE(lc, ltree);
/* send a literal byte */
} else {
code = lengthCode[lc];
sendCode(code + 0x101, ltree); // send the length code
extra = elbits[code];
if(extra != 0) {
lc -= baseLength[code];
sendBits(lc, extra); // send the extra length bits
// Here, lc is the match length - MIN_MATCH
code = zip_length_code[lc];
zip_SEND_CODE(code + zip_LITERALS + 1, ltree); // send the length code
extra = zip_extra_lbits[code];
if (extra != 0) {
lc -= zip_base_length[code];
zip_send_bits(lc, extra); // send the extra length bits
}
dist = dBuf[dx++];
dist = zip_d_buf[dx++];
// Here, dist is the match distance - 1
code = zip_D_CODE(dist);
sendCode(code, dtree); // send the distance code
extra = edbits[code];
if(extra != 0) {
dist -= baseDist[code];
sendBits(dist, extra); // send the extra distance bits
zip_SEND_CODE(code, dtree); // send the distance code
extra = zip_extra_dbits[code];
if (extra != 0) {
dist -= zip_base_dist[code];
zip_send_bits(dist, extra); // send the extra distance bits
}
} // literal or match pair ?
flag >>= 1;
} while(lx < lastLit);
} while (lx < zip_last_lit);
sendCode(0x100, ltree); // end block
}
zip_SEND_CODE(zip_END_BLOCK, ltree);
};
function sendBits(value, length) {
if(biValid > 0x10 - length) {
biBuf |= (value << biValid);
writeShort(biBuf);
biBuf = (value >> (0x10 - biValid));
biValid += length - 0x10;
/* ==========================================================================
* Send a value on a given number of bits.
* IN assertion: length <= 16 and value fits in length bits.
*/
var zip_Buf_size = 16; // bit size of bi_buf
var zip_send_bits = function (value, // value to send
length) { // number of bits
/* If not enough room in bi_buf, use (valid) bits from bi_buf and
* (16 - bi_valid) bits from value, leaving (width - (16-bi_valid))
* unused bits in value.
*/
if (zip_bi_valid > zip_Buf_size - length) {
zip_bi_buf |= (value << zip_bi_valid);
zip_put_short(zip_bi_buf);
zip_bi_buf = (value >> (zip_Buf_size - zip_bi_valid));
zip_bi_valid += length - zip_Buf_size;
} else {
biBuf |= value << biValid;
biValid += length;
zip_bi_buf |= value << zip_bi_valid;
zip_bi_valid += length;
}
}
};
function reverse(code, len) {
/* ==========================================================================
* Reverse the first len bits of a code, using straightforward code (a faster
* method would use a table)
* IN assertion: 1 <= len <= 15
*/
var zip_bi_reverse = function (code, // the value to invert
len) { // its bit length
var res = 0;

@@ -1003,10 +1485,38 @@ do {

res <<= 1;
} while(--len > 0);
} while (--len > 0);
return res >> 1;
}
};
/* ==========================================================================
* Write out any remaining bits in an incomplete byte.
*/
var zip_bi_windup = function () {
if (zip_bi_valid > 8) {
zip_put_short(zip_bi_buf);
} else if (zip_bi_valid > 0) {
zip_put_byte(zip_bi_buf);
}
zip_bi_buf = 0;
zip_bi_valid = 0;
};
var zip_qoutbuf = function () {
if (zip_outcnt != 0) {
var q, i;
q = zip_new_queue();
if (zip_qhead == null)
zip_qhead = zip_qtail = q;
else
zip_qtail = zip_qtail.next = q;
q.len = zip_outcnt - zip_outoff;
for (i = 0; i < q.len; i++)
q.ptr[i] = zip_outbuf[zip_outoff + i];
zip_outcnt = zip_outoff = 0;
}
};
function deflate(buffData, level) {
deflateData = buffData;
deflatePos = 0;
deflateStart(level);
zip_deflate_data = buffData;
zip_deflate_pos = 0;
zip_deflate_start(level);

@@ -1019,4 +1529,3 @@ var buff = new Array(1024),

for (i = 0; i < 1024; i++) buff[i] = 0;
while((i = internalDeflate(buff, 0, buff.length)) > 0) {
while ((i = zip_deflate_internal(buff, 0, buff.length)) > 0) {
var buf = new Buffer(buff.slice(0, i));

@@ -1026,2 +1535,7 @@ pages.push(buf);

}
if (pages.length == 1) {
return pages[0];
}
var result = new Buffer(totalSize),

@@ -1039,3 +1553,3 @@ index = 0;

return {
deflate : function() {
deflate: function () {
return deflate(inbuf, 8);

@@ -1046,3 +1560,3 @@ }

module.exports = function(/*Buffer*/inbuf) {
module.exports = function (/*Buffer*/inbuf) {

@@ -1052,9 +1566,9 @@ var zlib = require("zlib");

return {
deflate : function() {
deflate: function () {
return new JSDeflater(inbuf).deflate();
},
deflateAsync : function(/*Function*/callback) {
deflateAsync: function (/*Function*/callback) {
var tmp = zlib.createDeflateRaw();
tmp.on('data', function(data) {
tmp.on('data', function (data) {
callback(data);

@@ -1061,0 +1575,0 @@ });

@@ -382,3 +382,3 @@ var Buffer = require("buffer").Buffer;

windowPos &= WSIZE - 1;
outputBuffer[offset++] = slide[windowPos++] = slide[copyDist++];
outputBuffer[offset++] = (slide[windowPos++] = slide[copyDist++]);
}

@@ -389,3 +389,3 @@ } else {

windowPos &= WSIZE - 1;
outputBuffer[offset++] = slide[windowPos++] = maskBits(8);
outputBuffer[offset++] = (slide[windowPos++] = maskBits(8));
}

@@ -448,2 +448,2 @@ copyLen == 0 && (method = -1); // done

}
};
};
{
"name": "adm-zip",
"version": "0.2.1",
"version": "0.4.3",
"description": "A Javascript implementation of zip for nodejs. Allows user to create or extract zip files both in memory or to/from disk",

@@ -26,3 +26,3 @@ "keywords": [

"type": "git",
"url": "git://github.com/git@github.com:cthackers/adm-zip.git"
"url": "https://github.com/cthackers/adm-zip.git"
},

@@ -32,2 +32,2 @@ "engines": {

}
}
}
module.exports = {
/* The local file header */
LOCHDR : 30, // LOC header size
LOCSIG : 0x04034b50, // "PK\003\004"
LOCVER : 4, // version needed to extract
LOCFLG : 6, // general purpose bit flag
LOCHOW : 8, // compression method
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
LOCCRC : 14, // uncompressed file crc-32 value
LOCSIZ : 18, // compressed size
LOCLEN : 22, // uncompressed size
LOCNAM : 26, // filename length
LOCEXT : 28, // extra field length
LOCHDR : 30, // LOC header size
LOCSIG : 0x04034b50, // "PK\003\004"
LOCVER : 4, // version needed to extract
LOCFLG : 6, // general purpose bit flag
LOCHOW : 8, // compression method
LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
LOCCRC : 14, // uncompressed file crc-32 value
LOCSIZ : 18, // compressed size
LOCLEN : 22, // uncompressed size
LOCNAM : 26, // filename length
LOCEXT : 28, // extra field length
/* The Data descriptor */
EXTSIG : 0x08074b50, // "PK\007\008"
EXTHDR : 16, // EXT header size
EXTCRC : 4, // uncompressed file crc-32 value
EXTSIZ : 8, // compressed size
EXTLEN : 12, // uncompressed size
EXTSIG : 0x08074b50, // "PK\007\008"
EXTHDR : 16, // EXT header size
EXTCRC : 4, // uncompressed file crc-32 value
EXTSIZ : 8, // compressed size
EXTLEN : 12, // uncompressed size
/* The central directory file header */
CENHDR : 46, // CEN header size
CENSIG : 0x02014b50, // "PK\001\002"
CENVEM : 4, // version made by
CENVER : 6, // version needed to extract
CENFLG : 8, // encrypt, decrypt flags
CENHOW : 10, // compression method
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
CENCRC : 16, // uncompressed file crc-32 value
CENSIZ : 20, // compressed size
CENLEN : 24, // uncompressed size
CENNAM : 28, // filename length
CENEXT : 30, // extra field length
CENCOM : 32, // file comment length
CENDSK : 34, // volume number start
CENATT : 36, // internal file attributes
CENATX : 38, // external file attributes
CENOFF : 42, // LOC header offset
CENHDR : 46, // CEN header size
CENSIG : 0x02014b50, // "PK\001\002"
CENVEM : 4, // version made by
CENVER : 6, // version needed to extract
CENFLG : 8, // encrypt, decrypt flags
CENHOW : 10, // compression method
CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
CENCRC : 16, // uncompressed file crc-32 value
CENSIZ : 20, // compressed size
CENLEN : 24, // uncompressed size
CENNAM : 28, // filename length
CENEXT : 30, // extra field length
CENCOM : 32, // file comment length
CENDSK : 34, // volume number start
CENATT : 36, // internal file attributes
CENATX : 38, // external file attributes (host system dependent)
CENOFF : 42, // LOC header offset
/* The entries in the end of central directory */
ENDHDR : 22, // END header size
ENDSIG : 0x06054b50, // "PK\005\006"
ENDSUB : 8, // number of entries on this disk
ENDTOT : 10, // total number of entries
ENDSIZ : 12, // central directory size in bytes
ENDOFF : 16, // offset of first CEN header
ENDCOM : 20, // zip file comment length
ENDHDR : 22, // END header size
ENDSIG : 0x06054b50, // "PK\005\006"
ENDSUB : 8, // number of entries on this disk
ENDTOT : 10, // total number of entries
ENDSIZ : 12, // central directory size in bytes
ENDOFF : 16, // offset of first CEN header
ENDCOM : 20, // zip file comment length
/* Compression methods */
STORED : 0,
DEFLATED : 8
};
STORED : 0, // no compression
SHRUNK : 1, // shrunk
REDUCED1 : 2, // reduced with compression factor 1
REDUCED2 : 3, // reduced with compression factor 2
REDUCED3 : 4, // reduced with compression factor 3
REDUCED4 : 5, // reduced with compression factor 4
IMPLODED : 6, // imploded
// 7 reserved
DEFLATED : 8, // deflated
ENHANCED_DEFLATED: 9, // enhanced deflated
PKWARE : 10,// PKWare DCL imploded
// 11 reserved
BZIP2 : 12, // compressed using BZIP2
// 13 reserved
LZMA : 14, // LZMA
// 15-17 reserved
IBM_TERSE : 18, // compressed using IBM TERSE
IBM_LZ77 : 19, //IBM LZ77 z
/* General purpose bit flag */
FLG_ENC : 0, // encripted file
FLG_COMP1 : 1, // compression option
FLG_COMP2 : 2, // compression option
FLG_DESC : 4, // data descriptor
FLG_ENH : 8, // enhanced deflation
FLG_STR : 16, // strong encryption
FLG_LNG : 1024, // language encoding
FLG_MSK : 4096, // mask header values
/* Load type */
FILE : 0,
BUFFER : 1,
NONE : 2
};

@@ -131,2 +131,13 @@ var fs = require("fs"),

toBuffer : function(input) {
if (Buffer.isBuffer(input)) {
return input;
} else {
if (input.length == 0) {
return new Buffer(0)
}
return new Buffer(input, 'utf8');
}
},
Constants : Constants,

@@ -133,0 +144,0 @@ Errors : Errors

var Utils = require("./util"),
Headers = require("./headers"),
Constants = Utils.Constants,
Methods = require("./methods");
module.exports = function () {
module.exports = function (/*Buffer*/input) {
var _entryHeader = new Headers.EntryHeader(),
_dataHeader = new Headers.DataHeader(),
_entryName = "",
_entryName = new Buffer(0),
_comment = new Buffer(0),
_isDirectory = false,
_extra = null,
_compressedData = null,
_data = null,
_comment = "",
_needDeflate = false;
uncompressedData = null,
_extra = new Buffer(0);
function getCompressedDataFromZip() {
if (!input || !Buffer.isBuffer(input)) {
return new Buffer(0);
}
_entryHeader.loadDataHeaderFromBinary(input);
return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)
}
function crc32OK(data) {
// if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
if (_entryHeader.flags & 0x8 != 0x8) {
if (Utils.crc32(data) != _entryHeader.crc) {
return false;
}
} else {
// @TODO: load and check data descriptor header
// The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
// (optionally preceded by a 4-byte signature) immediately after the compressed data:
}
return true;
}
function decompress(/*Boolean*/async, /*Function*/callback) {
// if (_data == null) {
if (true) {
if (_compressedData == null) {
if (_isDirectory) {
if (async && callback) {
callback(new Buffer(), "directory"); //si added error.
}
return;
}
//throw 'Noting to decompress';
callback(new Buffer(), "Nothing to decompress");//si added error.
}
switch (_dataHeader.method) {
case Utils.Constants.STORED:
_data = new Buffer(_dataHeader.size);
_compressedData.copy(_data, 0, _dataHeader.fileHeaderSize);
if (Utils.crc32(_data) != _dataHeader.crc) {
//throw Utils.Errors.BAD_CRC
callback(_data, Utils.Errors.BAD_CRC);//si added error
return Utils.Errors.BAD_CRC;
} else {//si added otherwise did not seem to return data.
if (callback) callback(_data);
return 'ok';
}
break;
case Utils.Constants.DEFLATED:
var inflater = new Methods.Inflater(_compressedData.slice(_dataHeader.fileHeaderSize));
if (!async) {
_data = new Buffer(_entryHeader.size);
_data.fill(0);
inflater.inflate(_data);
if (Utils.crc32(_data) != _dataHeader.crc) {
console.warn( Utils.Errors.BAD_CRC + " " + _entryName)
}
} else {
inflater.inflateAsync(function(data) {
_data = new Buffer(_entryHeader.size);
_data.fill(0);
data.copy(_data, 0);
if (Utils.crc32(_data) != _dataHeader.crc) {
//throw Utils.Errors.BAD_CRC
callback(_data,Utils.Errors.BAD_CRC); //avoid throw it would bring down node.
return Utils.Errors.BAD_CRC
} else {
callback(_data);
return 'ok';
}
})
}
break;
default:
// throw Utils.Errors.UNKNOWN_METHOD;
callback(new Buffer(),Utils.Errors.BAD_CRC); //avoid throw it would bring down node.
return Utils.Errors.UNKNOWN_METHOD;
}
} else {
if (_isDirectory) {
if (async && callback) {
callback(_data);
callback(new Buffer(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
}
return new Buffer(0);
}
var compressedData = getCompressedDataFromZip();
if (compressedData.length == 0) {
if (async && callback) callback(compressedData, Utils.Errors.NO_DATA);//si added error.
return compressedData;
}
var data = new Buffer(_entryHeader.size);
data.fill(0);
switch (_entryHeader.method) {
case Utils.Constants.STORED:
compressedData.copy(data);
if (!crc32OK(data)) {
if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error
return Utils.Errors.BAD_CRC;
} else {//si added otherwise did not seem to return data.
if (async && callback) callback(data);
return data;
}
break;
case Utils.Constants.DEFLATED:
var inflater = new Methods.Inflater(compressedData);
if (!async) {
inflater.inflate(data);
if (!crc32OK(data)) {
console.warn(Utils.Errors.BAD_CRC + " " + _entryName.toString())
}
return data;
} else {
inflater.inflateAsync(function(result) {
result.copy(data, 0);
if (crc32OK(data)) {
if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error
} else { //si added otherwise did not seem to return data.
if (callback) callback(data);
}
})
}
break;
default:
if (async && callback) callback(new Buffer(0), Utils.Errors.UNKNOWN_METHOD);
return Utils.Errors.UNKNOWN_METHOD;
}
}
function compress(/*Boolean*/async, /*Function*/callback) {
if ( _needDeflate) {
_compressedData = null;
if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
// no data set or the data wasn't changed to require recompression
if (async && callback) callback(getCompressedDataFromZip());
return getCompressedDataFromZip();
}
if (_compressedData == null) {
if (_isDirectory || !_data) {
_data = new Buffer(0);
_compressedData = new Buffer(0);
return;
}
if (uncompressedData.length && !_isDirectory) {
var compressedData;
// Local file header
_dataHeader.version = 10;
_dataHeader.flags = 0;
_dataHeader.time = _entryHeader.time;
_dataHeader.compressedSize = _data.length;
_dataHeader.fileNameLength = _entryName.length;
_dataHeader.method = 8;
switch (_dataHeader.method) {
switch (_entryHeader.method) {
case Utils.Constants.STORED:
_dataHeader.method = Utils.Constants.STORED;
_compressedData = new Buffer(Utils.Constants.LOCHDR + _entryName.length + _data.length);
_dataHeader.toBinary().copy(_compressedData);
_compressedData.write(_entryName, Utils.Constants.LOCHDR);
_data.copy(_compressedData, Utils.Constants.LOCHDR + _entryName.length);
_entryHeader.compressedSize = _entryHeader.size;
compressedData = new Buffer(uncompressedData.length);
uncompressedData.copy(compressedData);
if (async && callback) callback(compressedData);
return compressedData;
break;
default:
case Utils.Constants.DEFLATED:
_dataHeader.method = Utils.Constants.DEFLATED;
_entryHeader.method = Utils.Constants.DEFLATED;
var deflater = new Methods.Deflater(_data);
var deflater = new Methods.Deflater(uncompressedData);
if (!async) {
console.log(_entryName.toString());
var deflated = deflater.deflate();
_compressedData = new Buffer(deflated.length + Utils.Constants.LOCHDR + _entryName.length);
_compressedData.fill(0);
_dataHeader.toBinary().copy(_compressedData);
_compressedData.write(_entryName, Utils.Constants.LOCHDR);
deflated.copy(_compressedData, Utils.Constants.LOCHDR + _entryName.length);
deflated = null;
_entryHeader.compressedSize = deflated.length;
return deflated;
} else {
deflater.deflateAsync(function(data) {
_compressedData = new Buffer(data.length + Utils.Constants.LOCHDR + _entryName.length);
_dataHeader.toBinary().copy(_compressedData);
_compressedData.write(_entryName, Utils.Constants.LOCHDR);
data.copy(_compressedData, Utils.Constants.LOCHDR + _entryName.length);
callback(_compressedData);
compressedData = new Buffer(data.length);
_entryHeader.compressedSize = data.length;
data.copy(compressedData);
callback && callback(compressedData);
})

@@ -134,6 +130,7 @@ }

}
_needDeflate = false;
} else {
if (async && callback) {
callback(_compressedData);
callback(new Buffer(0));
} else {
return new Buffer(0);
}

@@ -144,9 +141,9 @@ }

return {
get entryName () { return _entryName; },
get entryName () { return _entryName.toString(); },
get rawEntryName() { return _entryName; },
set entryName (val) {
_compressedData && (_needDeflate = true);
_entryName = val;
_isDirectory = val.charAt(_entryName.length - 1) == "/";
_entryHeader.fileNameLength = val.length;
_dataHeader.fileNameLenght = val.length;
_entryName = Utils.toBuffer(val);
var lastChar = _entryName[_entryName.length - 1];
_isDirectory = (lastChar == 47) || (lastChar == 92);
_entryHeader.fileNameLength = _entryName.length;
},

@@ -160,22 +157,15 @@

get comment () { return _comment; },
get comment () { return _comment.toString(); },
set comment (val) {
_comment = val;
_entryHeader.commentLength = val.length;
_comment = Utils.toBuffer(val);
_entryHeader.commentLength = _comment.length;
},
get name () { return _entryName.split("/").pop(); },
get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); },
get isDirectory () { return _isDirectory },
setCompressedData : function(value) {
_compressedData = value;
_dataHeader.loadFromBinary(_compressedData.slice(0, Utils.Constants.LOCHDR));
_data = null;
_needDeflate = false;
getCompressedData : function() {
return compress(false, null)
},
getCompressedData : function() {
compress(false, null);
return _compressedData
},
getCompressedDataAsync : function(/*Function*/callback) {

@@ -186,26 +176,14 @@ compress(true, callback)

setData : function(value) {
if (typeof value == "string") {
value = new Buffer(value);
uncompressedData = Utils.toBuffer(value);
if (!_isDirectory && uncompressedData.length) {
_entryHeader.size = uncompressedData.length;
_entryHeader.method = Utils.Constants.DEFLATED;
_entryHeader.crc = Utils.crc32(value);
} else { // folders and blank files should be stored
_entryHeader.method = Utils.Constants.STORED;
}
_needDeflate = true;
_compressedData = null;
_dataHeader.time = +new Date();
_entryHeader.size = _dataHeader.size;
if (value && value.length) {
_dataHeader.compressedSize = value.length;
_entryHeader.compressedSize = _dataHeader.compressedSize;
_dataHeader.size = value.length;
_entryHeader.size = value.length;
_dataHeader.crc = Utils.crc32(value);
_entryHeader.crc = _dataHeader.crc;
}
//_entryHeader.method = _dataHeader.method;
_data = value;
},
getData : function() {
decompress(false, null);
return _data
return decompress(false, null);
},

@@ -226,4 +204,5 @@

packHeader : function() {
var header = _entryHeader.toBinary();
header.write(_entryName, Utils.Constants.CENHDR);
var header = _entryHeader.entryHeaderToBinary();
// add
_entryName.copy(header, Utils.Constants.CENHDR);
if (_entryHeader.extraLength) {

@@ -233,3 +212,3 @@ _extra.copy(header, Utils.Constants.CENHDR + _entryName.length)

if (_entryHeader.commentLength) {
header.write(_comment, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length, 'utf8');
_comment.copy(header, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length);
}

@@ -241,12 +220,12 @@ return header;

return '{\n' +
'\t"entryName" : "' + _entryName + "\",\n" +
'\t"name" : "' + _entryName.split("/").pop() + "\",\n" +
'\t"comment" : "' + _comment + "\",\n" +
'\t"entryName" : "' + _entryName.toString() + "\",\n" +
'\t"name" : "' + _entryName.toString().split("/").pop() + "\",\n" +
'\t"comment" : "' + _comment.toString() + "\",\n" +
'\t"isDirectory" : ' + _isDirectory + ",\n" +
'\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t") + ",\n" +
'\t"compressedData" : <' + (_compressedData && _compressedData.length + " bytes buffer" || "null") + ">\n" +
'\t"data" : <' + (_data && _data.length + " bytes buffer" || "null") + ">\n" +
'\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" +
'\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" +
'}';
}
}
};
};

@@ -5,10 +5,22 @@ var ZipEntry = require("./zipEntry"),

module.exports = function(/*Buffer*/buf) {
module.exports = function(/*String|Buffer*/input, /*Number*/inputType) {
var entryList = [],
entryTable = {},
_comment = '',
endHeader = new Headers.MainHeader();
_comment = new Buffer(0),
filename = "",
fs = require("fs"),
inBuffer = null,
mainHeader = new Headers.MainHeader();
if (buf) {
if (inputType == Utils.Constants.FILE) {
// is a filename
filename = input;
inBuffer = fs.readFileSync(filename);
readMainHeader();
} else if (inputType == Utils.Constants.BUFFER) {
// is a memory buffer
inBuffer = input;
readMainHeader();
} else {
// none. is a new file
}

@@ -18,26 +30,21 @@

entryTable = {};
entryList = new Array(endHeader.diskEntries); // total number of entries
var index = endHeader.offset; // offset of first CEN header
entryList = new Array(mainHeader.diskEntries); // total number of entries
var index = mainHeader.offset; // offset of first CEN header
for(var i = 0; i < entryList.length; i++) {
var tmp = index,
entry = new ZipEntry();
entry = new ZipEntry(inBuffer);
entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
entry.header = buf.slice(tmp, tmp += Utils.Constants.CENHDR);
entry.entryName = buf.toString('utf8', tmp, tmp += entry.header.fileNameLength);
entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
if (entry.header.extraLength)
entry.extra = buf.slice(tmp, tmp += entry.header.extraLength);
if (entry.header.extraLength) {
entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);
}
if (entry.header.commentLength)
entry.comment = buf.toString('utf8', tmp, tmp + entry.header.commentLength);
entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
index += entry.header.entryHeaderSize;
if (!entry.isDirectory) {
// read data
//entry.setCompressedData(buf.slice(entry.header.offset, entry.header.offset + Utils.Constants.LOCHDR + entry.header.compressedSize + entry.entryName.length));
entry.setCompressedData(buf.slice(entry.header.offset, entry.header.offset + Utils.Constants.LOCHDR + entry.header.compressedSize + entry.entryName.length + buf.readUInt16LE(entry.header.offset + Utils.Constants.LOCEXT)));
}
entryList[i] = entry;

@@ -49,3 +56,3 @@ entryTable[entry.entryName] = entry;

function readMainHeader() {
var i = buf.length - Utils.Constants.ENDHDR, // END header size
var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
n = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length

@@ -55,4 +62,4 @@ endOffset = 0; // Start offset of the END header

for (i; i >= n; i--) {
if (buf[i] != 0x50) continue; // quick check that the byte is 'P'
if (buf.readUInt32LE(i) == Utils.Constants.ENDSIG) { // "PK\005\006"
if (inBuffer[i] != 0x50) continue; // quick check that the byte is 'P'
if (inBuffer.readUInt32LE(i) == Utils.Constants.ENDSIG) { // "PK\005\006"
endOffset = i;

@@ -65,5 +72,5 @@ break;

endHeader.loadFromBinary(buf.slice(endOffset, endOffset + Utils.Constants.ENDHDR));
if (endHeader.commentLength) {
_comment = buf.toString('utf8', endOffset + Utils.Constants.ENDHDR);
mainHeader.loadFromBinary(inBuffer.slice(endOffset, endOffset + Utils.Constants.ENDHDR));
if (mainHeader.commentLength) {
_comment = inBuffer.slice(endOffset + Utils.Constants.ENDHDR);
}

@@ -86,5 +93,5 @@ readEntries();

*/
get comment () { return _comment; },
get comment () { return _comment.toString(); },
set comment(val) {
endHeader.commentLength = val.length;
mainHeader.commentLength = val.length;
_comment = val;

@@ -111,3 +118,3 @@ },

entryTable[entry.entryName] = entry;
endHeader.totalEntries = entryList.length;
mainHeader.totalEntries = entryList.length;
},

@@ -133,3 +140,3 @@

delete(entryTable[entryName]);
endHeader.totalEntries = entryList.length;
mainHeader.totalEntries = entryList.length;
},

@@ -164,52 +171,64 @@

*/
toBuffer : function() {
entryList.sort(function(a, b) {
var nameA = a.entryName.toLowerCase( );
var nameB = b.entryName.toLowerCase( );
if (nameA < nameB) {return -1}
if (nameA > nameB) {return 1}
return 0;
});
compressToBuffer : function() {
if (entryList.length > 1) {
entryList.sort(function(a, b) {
var nameA = a.entryName.toLowerCase();
var nameB = b.entryName.toLowerCase();
if (nameA < nameB) {return -1}
if (nameA > nameB) {return 1}
return 0;
});
}
var totalSize = 0,
data = [],
header = [],
dataBlock = [],
entryHeaders = [],
dindex = 0;
endHeader.size = 0;
endHeader.offset = 0;
mainHeader.size = 0;
mainHeader.offset = 0;
entryList.forEach(function(entry) {
entry.header.offset = dindex;
// compress data and set local and entry header accordingly. Reason why is called first
var compressedData = entry.getCompressedData();
dindex += compressedData.length;
data.push(compressedData);
// data header
var dataHeader = entry.header.dataHeaderToBinary();
var postHeader = new Buffer(entry.entryName + entry.extra.toString());
var dataLength = dataHeader.length + postHeader.length + compressedData.length;
var headerData = entry.packHeader();
header.push(headerData);
endHeader.size += headerData.length;
totalSize += compressedData.length + headerData.length;
dindex += dataLength;
dataBlock.push(dataHeader);
dataBlock.push(postHeader);
dataBlock.push(compressedData);
var entryHeader = entry.packHeader();
entryHeaders.push(entryHeader);
mainHeader.size += entryHeader.length;
totalSize += (dataLength + entryHeader.length);
});
totalSize += endHeader.mainHeaderSize;
totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
// point to end of data and begining of central directory first record
endHeader.offset = dindex;
mainHeader.offset = dindex;
dindex = 0;
var outBuffer = new Buffer(totalSize);
data.forEach(function(content) {
content.copy(outBuffer, dindex); // write data
dataBlock.forEach(function(content) {
content.copy(outBuffer, dindex); // write data blocks
dindex += content.length;
});
header.forEach(function(content) {
content.copy(outBuffer, dindex); // write data
entryHeaders.forEach(function(content) {
content.copy(outBuffer, dindex); // write central directory entries
dindex += content.length;
});
var mainHeader = endHeader.toBinary();
var mh = mainHeader.toBinary();
if (_comment) {
mainHeader.write(_comment, Utils.Constants.ENDHDR);
_comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
}
mainHeader.copy(outBuffer, dindex);
mh.copy(outBuffer, dindex); // write main header

@@ -216,0 +235,0 @@ return outBuffer

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc