New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@loaders.gl/zip

Package Overview
Dependencies
Maintainers
9
Versions
332
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@loaders.gl/zip - npm Package Compare versions

Comparing version 4.1.0-alpha.10 to 4.1.0-alpha.11

dist/parse-zip/zip-composition.d.ts

17

dist/hash-file-utility.d.ts
import { FileProvider } from '@loaders.gl/loader-utils';
import { ZipCDFileHeader } from './parse-zip/cd-file-header';
/**

@@ -15,7 +16,19 @@ * Reads hash file from buffer and returns it in ready-to-use form

/**
* creates hash table from file offset iterator
* @param zipCDIterator iterator to use
* @returns hash table
*/
export declare function getHashTable(zipCDIterator: AsyncIterable<ZipCDFileHeader>): Promise<Record<string, bigint>>;
/** item of the file offset list */
type FileListItem = {
fileName: string;
localHeaderOffset: bigint;
};
/**
* creates hash file that later can be added to the SLPK archive
* @param fileProvider SLPK archive where we need to add hash file
* @param zipCDIterator iterator to use
* @returns ArrayBuffer containing hash file
*/
export declare function composeHashFile(fileProvider: FileProvider): Promise<ArrayBuffer>;
export declare function composeHashFile(zipCDIterator: AsyncIterable<FileListItem> | Iterable<FileListItem>): Promise<ArrayBuffer>;
export {};
//# sourceMappingURL=hash-file-utility.d.ts.map

20

dist/hash-file-utility.js

@@ -19,2 +19,5 @@ import { MD5Hash } from '@loaders.gl/crypto';

const zipCDIterator = makeZipCDHeaderIterator(fileProvider);
return getHashTable(zipCDIterator);
}
export async function getHashTable(zipCDIterator) {
const md5Hash = new MD5Hash();

@@ -31,8 +34,13 @@ const textEncoder = new TextEncoder();

}
export async function composeHashFile(fileProvider) {
const hashArray = await makeHashTableFromZipHeaders(fileProvider);
const bufferArray = Object.entries(hashArray).map(_ref => {
let [key, value] = _ref;
return concatenateArrayBuffers(hexStringToBuffer(key), bigintToBuffer(value));
}).sort(compareHashes);
export async function composeHashFile(zipCDIterator) {
const md5Hash = new MD5Hash();
const textEncoder = new TextEncoder();
const hashArray = [];
for await (const cdHeader of zipCDIterator) {
const filename = cdHeader.fileName.split('\\').join('/').toLocaleLowerCase();
const arrayBuffer = textEncoder.encode(filename).buffer;
const md5 = await md5Hash.hash(arrayBuffer, 'hex');
hashArray.push(concatenateArrayBuffers(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset)));
}
const bufferArray = hashArray.sort(compareHashes);
return concatenateArrayBuffersFromArray(bufferArray);

@@ -39,0 +47,0 @@ }

@@ -8,5 +8,5 @@ export { ZipLoader } from './zip-loader';

export { searchFromTheEnd } from './parse-zip/search-from-the-end';
export { addOneFile } from './parse-zip/zip-compozition';
export { addOneFile, createZip } from './parse-zip/zip-composition';
export { parseHashTable, makeHashTableFromZipHeaders, composeHashFile } from './hash-file-utility';
export { ZipFileSystem } from './filesystems/zip-filesystem';
//# sourceMappingURL=index.d.ts.map

@@ -8,5 +8,5 @@ export { ZipLoader } from "./zip-loader.js";

export { searchFromTheEnd } from "./parse-zip/search-from-the-end.js";
export { addOneFile } from "./parse-zip/zip-compozition.js";
export { addOneFile, createZip } from "./parse-zip/zip-composition.js";
export { parseHashTable, makeHashTableFromZipHeaders, composeHashFile } from "./hash-file-utility.js";
export { ZipFileSystem } from "./filesystems/zip-filesystem.js";
//# sourceMappingURL=index.js.map

@@ -23,2 +23,12 @@ import { FileProvider } from '@loaders.gl/loader-utils';

/**
* Data to generate End of central directory record
* according to https://en.wikipedia.org/wiki/ZIP_(file_format)
*/
export type ZipEoCDGenerationOptions = {
recordsNumber: number;
cdSize: number;
cdOffset: bigint;
eoCDStart: bigint;
};
/**
* Parses end of central directory record of zip file

@@ -37,3 +47,9 @@ * @param file - FileProvider instance

*/
export declare function updateEoCD(eocdBody: ArrayBuffer, oldEoCDOffsets: ZipEoCDRecordOffsets, newCDStartOffset: bigint, eocdStartOffset: bigint, newCDRecordsNumber: bigint): Promise<Uint8Array>;
export declare function updateEoCD(eocdBody: ArrayBuffer, oldEoCDOffsets: ZipEoCDRecordOffsets, newCDStartOffset: bigint, eocdStartOffset: bigint, newCDRecordsNumber: bigint): Uint8Array;
/**
* generates EoCD record
* @param options data to generate EoCD record
* @returns ArrayBuffer with EoCD record
*/
export declare function generateEoCD(options: ZipEoCDGenerationOptions): ArrayBuffer;
//# sourceMappingURL=end-of-central-directory.d.ts.map

@@ -1,2 +0,2 @@

import { compareArrayBuffers } from '@loaders.gl/loader-utils';
import { compareArrayBuffers, concatenateArrayBuffers } from '@loaders.gl/loader-utils';
import { searchFromTheEnd } from "./search-from-the-end.js";

@@ -11,2 +11,3 @@ import { setFieldToNumber } from "./zip64-info-generation.js";

const CD_START_OFFSET_OFFSET = 16n;
const CD_COMMENT_OFFSET = 22n;
const ZIP64_EOCD_START_OFFSET_OFFSET = 8n;

@@ -17,2 +18,3 @@ const ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;

const ZIP64_CD_START_OFFSET_OFFSET = 48n;
const ZIP64_COMMENT_OFFSET = 56n;
export const parseEoCDRecord = async file => {

@@ -46,3 +48,3 @@ const zipEoCDOffset = await searchFromTheEnd(file, eoCDSignature);

};
export async function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {
export function updateEoCD(eocdBody, oldEoCDOffsets, newCDStartOffset, eocdStartOffset, newCDRecordsNumber) {
const eocd = new DataView(eocdBody);

@@ -70,2 +72,119 @@ const classicEoCDOffset = oldEoCDOffsets.zip64EoCDOffset ? oldEoCDOffsets.zipEoCDOffset - oldEoCDOffsets.zip64EoCDOffset : 0n;

}
export function generateEoCD(options) {
const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));
for (const field of EOCD_FIELDS) {
var _ref, _options, _field$name;
setFieldToNumber(header, field.size, field.offset, (_ref = (_options = options[(_field$name = field.name) !== null && _field$name !== void 0 ? _field$name : '']) !== null && _options !== void 0 ? _options : field.default) !== null && _ref !== void 0 ? _ref : 0);
}
const locator = generateZip64InfoLocator(options);
const zip64Record = generateZip64Info(options);
return concatenateArrayBuffers(zip64Record, locator, header.buffer);
}
const EOCD_FIELDS = [{
offset: 0,
size: 4,
default: new DataView(eoCDSignature.buffer).getUint32(0, true)
}, {
offset: 4,
size: 2,
default: 0
}, {
offset: 6,
size: 2,
default: 0
}, {
offset: 8,
size: 2,
name: 'recordsNumber'
}, {
offset: 10,
size: 2,
name: 'recordsNumber'
}, {
offset: 12,
size: 4,
name: 'cdSize'
}, {
offset: 16,
size: 4,
name: 'cdOffset'
}, {
offset: 20,
size: 2,
default: 0
}];
function generateZip64Info(options) {
const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));
for (const field of ZIP64_EOCD_FIELDS) {
var _ref2, _options2, _field$name2;
setFieldToNumber(record, field.size, field.offset, (_ref2 = (_options2 = options[(_field$name2 = field.name) !== null && _field$name2 !== void 0 ? _field$name2 : '']) !== null && _options2 !== void 0 ? _options2 : field.default) !== null && _ref2 !== void 0 ? _ref2 : 0);
}
return record.buffer;
}
function generateZip64InfoLocator(options) {
const locator = new DataView(new ArrayBuffer(Number(20)));
for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {
var _ref3, _options3, _field$name3;
setFieldToNumber(locator, field.size, field.offset, (_ref3 = (_options3 = options[(_field$name3 = field.name) !== null && _field$name3 !== void 0 ? _field$name3 : '']) !== null && _options3 !== void 0 ? _options3 : field.default) !== null && _ref3 !== void 0 ? _ref3 : 0);
}
return locator.buffer;
}
const ZIP64_EOCD_LOCATOR_FIELDS = [{
offset: 0,
size: 4,
default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)
}, {
offset: 4,
size: 4,
default: 0
}, {
offset: 8,
size: 8,
name: 'eoCDStart'
}, {
offset: 16,
size: 4,
default: 1
}];
const ZIP64_EOCD_FIELDS = [{
offset: 0,
size: 4,
default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)
}, {
offset: 4,
size: 8,
default: 44
}, {
offset: 12,
size: 2,
default: 45
}, {
offset: 14,
size: 2,
default: 45
}, {
offset: 16,
size: 4,
default: 0
}, {
offset: 20,
size: 4,
default: 0
}, {
offset: 24,
size: 8,
name: 'recordsNumber'
}, {
offset: 32,
size: 8,
name: 'recordsNumber'
}, {
offset: 40,
size: 8,
name: 'cdSize'
}, {
offset: 48,
size: 8,
name: 'cdOffset'
}];
//# sourceMappingURL=end-of-central-directory.js.map

@@ -25,6 +25,6 @@ import { concatenateArrayBuffers } from '@loaders.gl/loader-utils';

2: (header, offset, value) => {
header.setUint16(offset, Number(value), true);
header.setUint16(offset, Number(value > 0xffff ? 0xffff : value), true);
},
4: (header, offset, value) => {
header.setUint32(offset, Number(value), true);
header.setUint32(offset, Number(value > 0xffffffff ? 0xffffffff : value), true);
},

@@ -31,0 +31,0 @@ 8: (header, offset, value) => {

import JSZip from 'jszip';
const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
export const ZipLoader = {

@@ -4,0 +4,0 @@ id: 'zip',

import JSZip from 'jszip';
const VERSION = typeof "4.1.0-alpha.10" !== 'undefined' ? "4.1.0-alpha.10" : 'latest';
const VERSION = typeof "4.1.0-alpha.11" !== 'undefined' ? "4.1.0-alpha.11" : 'latest';
export const ZipWriter = {

@@ -4,0 +4,0 @@ name: 'Zip Archive',

{
"name": "@loaders.gl/zip",
"version": "4.1.0-alpha.10",
"version": "4.1.0-alpha.11",
"description": "Zip Archive Loader",

@@ -41,9 +41,9 @@ "license": "MIT",

"dependencies": {
"@loaders.gl/compression": "4.1.0-alpha.10",
"@loaders.gl/crypto": "4.1.0-alpha.10",
"@loaders.gl/loader-utils": "4.1.0-alpha.10",
"@loaders.gl/compression": "4.1.0-alpha.11",
"@loaders.gl/crypto": "4.1.0-alpha.11",
"@loaders.gl/loader-utils": "4.1.0-alpha.11",
"jszip": "^3.1.5",
"md5": "^2.3.0"
},
"gitHead": "19f43c2d90d8b50860c3f8e487429779a386287d"
"gitHead": "5d3e23bf93762b48c8c1d6d926ede7a97fe43ab0"
}

@@ -11,3 +11,3 @@ // loaders.gl

} from '@loaders.gl/loader-utils';
import {makeZipCDHeaderIterator} from './parse-zip/cd-file-header';
import {ZipCDFileHeader, makeZipCDHeaderIterator} from './parse-zip/cd-file-header';

@@ -49,2 +49,13 @@ /**

const zipCDIterator = makeZipCDHeaderIterator(fileProvider);
return getHashTable(zipCDIterator);
}
/**
* creates hash table from file offset iterator
* @param zipCDIterator iterator to use
* @returns hash table
*/
export async function getHashTable(
zipCDIterator: AsyncIterable<ZipCDFileHeader>
): Promise<Record<string, bigint>> {
const md5Hash = new MD5Hash();

@@ -65,12 +76,32 @@ const textEncoder = new TextEncoder();

/** item of the file offset list */
type FileListItem = {
fileName: string;
localHeaderOffset: bigint;
};
/**
* creates hash file that later can be added to the SLPK archive
* @param fileProvider SLPK archive where we need to add hash file
* @param zipCDIterator iterator to use
* @returns ArrayBuffer containing hash file
*/
export async function composeHashFile(fileProvider: FileProvider): Promise<ArrayBuffer> {
const hashArray = await makeHashTableFromZipHeaders(fileProvider);
const bufferArray = Object.entries(hashArray)
.map(([key, value]) => concatenateArrayBuffers(hexStringToBuffer(key), bigintToBuffer(value)))
.sort(compareHashes);
export async function composeHashFile(
zipCDIterator: AsyncIterable<FileListItem> | Iterable<FileListItem>
): Promise<ArrayBuffer> {
const md5Hash = new MD5Hash();
const textEncoder = new TextEncoder();
const hashArray: ArrayBuffer[] = [];
for await (const cdHeader of zipCDIterator) {
const filename = cdHeader.fileName.split('\\').join('/').toLocaleLowerCase();
const arrayBuffer = textEncoder.encode(filename).buffer;
const md5 = await md5Hash.hash(arrayBuffer, 'hex');
hashArray.push(
concatenateArrayBuffers(hexStringToBuffer(md5), bigintToBuffer(cdHeader.localHeaderOffset))
);
}
const bufferArray = hashArray.sort(compareHashes);
return concatenateArrayBuffersFromArray(bufferArray);

@@ -77,0 +108,0 @@ }

@@ -22,3 +22,3 @@ // loaders.gl

export {searchFromTheEnd} from './parse-zip/search-from-the-end';
export {addOneFile} from './parse-zip/zip-compozition';
export {addOneFile, createZip} from './parse-zip/zip-composition';

@@ -25,0 +25,0 @@ // export type {HashElement} from './hash-file-utility';

@@ -5,3 +5,3 @@ // loaders.gl

import {FileProvider, compareArrayBuffers} from '@loaders.gl/loader-utils';
import {FileProvider, compareArrayBuffers, concatenateArrayBuffers} from '@loaders.gl/loader-utils';
import {ZipSignature, searchFromTheEnd} from './search-from-the-end';

@@ -33,2 +33,13 @@ import {setFieldToNumber} from './zip64-info-generation';

/**
* Data to generate End of central directory record
* according to https://en.wikipedia.org/wiki/ZIP_(file_format)
*/
export type ZipEoCDGenerationOptions = {
recordsNumber: number;
cdSize: number;
cdOffset: bigint;
eoCDStart: bigint;
};
const eoCDSignature: ZipSignature = new Uint8Array([0x50, 0x4b, 0x05, 0x06]);

@@ -43,2 +54,3 @@ const zip64EoCDLocatorSignature = new Uint8Array([0x50, 0x4b, 0x06, 0x07]);

const CD_START_OFFSET_OFFSET = 16n;
const CD_COMMENT_OFFSET = 22n;
const ZIP64_EOCD_START_OFFSET_OFFSET = 8n;

@@ -49,2 +61,3 @@ const ZIP64_CD_RECORDS_NUMBER_OFFSET = 24n;

const ZIP64_CD_START_OFFSET_OFFSET = 48n;
const ZIP64_COMMENT_OFFSET = 56n;

@@ -101,3 +114,3 @@ /**

*/
export async function updateEoCD(
export function updateEoCD(
eocdBody: ArrayBuffer,

@@ -108,3 +121,3 @@ oldEoCDOffsets: ZipEoCDRecordOffsets,

newCDRecordsNumber: bigint
): Promise<Uint8Array> {
): Uint8Array {
const eocd = new DataView(eocdBody);

@@ -161,1 +174,226 @@

}
/**
* generates EoCD record
* @param options data to generate EoCD record
* @returns ArrayBuffer with EoCD record
*/
export function generateEoCD(options: ZipEoCDGenerationOptions): ArrayBuffer {
const header = new DataView(new ArrayBuffer(Number(CD_COMMENT_OFFSET)));
for (const field of EOCD_FIELDS) {
setFieldToNumber(
header,
field.size,
field.offset,
options[field.name ?? ''] ?? field.default ?? 0
);
}
const locator = generateZip64InfoLocator(options);
const zip64Record = generateZip64Info(options);
return concatenateArrayBuffers(zip64Record, locator, header.buffer);
}
/** standart EoCD fields */
const EOCD_FIELDS = [
// End of central directory signature = 0x06054b50
{
offset: 0,
size: 4,
default: new DataView(eoCDSignature.buffer).getUint32(0, true)
},
// Number of this disk (or 0xffff for ZIP64)
{
offset: 4,
size: 2,
default: 0
},
// Disk where central directory starts (or 0xffff for ZIP64)
{
offset: 6,
size: 2,
default: 0
},
// Number of central directory records on this disk (or 0xffff for ZIP64)
{
offset: 8,
size: 2,
name: 'recordsNumber'
},
// Total number of central directory records (or 0xffff for ZIP64)
{
offset: 10,
size: 2,
name: 'recordsNumber'
},
// Size of central directory (bytes) (or 0xffffffff for ZIP64)
{
offset: 12,
size: 4,
name: 'cdSize'
},
// Offset of start of central directory, relative to start of archive (or 0xffffffff for ZIP64)
{
offset: 16,
size: 4,
name: 'cdOffset'
},
// Comment length (n)
{
offset: 20,
size: 2,
default: 0
}
];
/**
* generates eocd zip64 record
* @param options data to generate eocd zip64 record
* @returns buffer with eocd zip64 record
*/
function generateZip64Info(options: ZipEoCDGenerationOptions): ArrayBuffer {
const record = new DataView(new ArrayBuffer(Number(ZIP64_COMMENT_OFFSET)));
for (const field of ZIP64_EOCD_FIELDS) {
setFieldToNumber(
record,
field.size,
field.offset,
options[field.name ?? ''] ?? field.default ?? 0
);
}
return record.buffer;
}
/**
* generates eocd zip64 record locator
* @param options data to generate eocd zip64 record
* @returns buffer with eocd zip64 record
*/
function generateZip64InfoLocator(options: ZipEoCDGenerationOptions): ArrayBuffer {
const locator = new DataView(new ArrayBuffer(Number(20)));
for (const field of ZIP64_EOCD_LOCATOR_FIELDS) {
setFieldToNumber(
locator,
field.size,
field.offset,
options[field.name ?? ''] ?? field.default ?? 0
);
}
return locator.buffer;
}
/** zip64 EoCD record locater fields */
const ZIP64_EOCD_LOCATOR_FIELDS = [
// zip64 end of central dir locator signature
{
offset: 0,
size: 4,
default: new DataView(zip64EoCDLocatorSignature.buffer).getUint32(0, true)
},
// number of the disk with the start of the zip64 end of
{
offset: 4,
size: 4,
default: 0
},
// start of the zip64 end of central directory
{
offset: 8,
size: 8,
name: 'eoCDStart'
},
// total number of disks
{
offset: 16,
size: 4,
default: 1
}
];
/** zip64 EoCD recodrd fields */
const ZIP64_EOCD_FIELDS = [
// End of central directory signature = 0x06064b50
{
offset: 0,
size: 4,
default: new DataView(zip64EoCDSignature.buffer).getUint32(0, true)
},
// Size of the EOCD64 minus 12
{
offset: 4,
size: 8,
default: 44
},
// Version made by
{
offset: 12,
size: 2,
default: 45
},
// Version needed to extract (minimum)
{
offset: 14,
size: 2,
default: 45
},
// Number of this disk
{
offset: 16,
size: 4,
default: 0
},
// Disk where central directory starts
{
offset: 20,
size: 4,
default: 0
},
// Number of central directory records on this disk
{
offset: 24,
size: 8,
name: 'recordsNumber'
},
// Total number of central directory records
{
offset: 32,
size: 8,
name: 'recordsNumber'
},
// Size of central directory (bytes)
{
offset: 40,
size: 8,
name: 'cdSize'
},
// Offset of start of central directory, relative to start of archive
{
offset: 48,
size: 8,
name: 'cdOffset'
}
];

@@ -28,3 +28,3 @@ import {concatenateArrayBuffers} from '@loaders.gl/loader-utils';

if (!optionsToUse[field.name ?? ''] && !field.default) {
continue;
continue; // eslint-disable-line no-continue
}

@@ -66,6 +66,6 @@ const newValue = new DataView(new ArrayBuffer(field.size));

2: (header, offset, value) => {
header.setUint16(offset, Number(value), true);
header.setUint16(offset, Number(value > 0xffff ? 0xffff : value), true);
},
4: (header, offset, value) => {
header.setUint32(offset, Number(value), true);
header.setUint32(offset, Number(value > 0xffffffff ? 0xffffffff : value), true);
},

@@ -72,0 +72,0 @@ 8: (header, offset, value) => {

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc