@vltpkg/tar
Advanced tools
| export declare const findTarDir: (path: string | undefined, tarDir?: string) => string | undefined; | ||
| //# sourceMappingURL=find-tar-dir.d.ts.map |
| {"version":3,"file":"find-tar-dir.d.ts","sourceRoot":"","sources":["../src/find-tar-dir.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,UAAU,SACf,MAAM,GAAG,SAAS,WACf,MAAM,uBAkBhB,CAAA"} |
| // usually this will be 'package/', but could also be anything | ||
| // eg, github tarballs are ${user}-${project}-${committish} | ||
| // if it starts with `./` then all entries must as well. | ||
| export const findTarDir = (path, tarDir) => { | ||
| if (tarDir !== undefined) | ||
| return tarDir; | ||
| if (!path) | ||
| return undefined; | ||
| const i = path.indexOf('/', path.startsWith('./') ? 2 : 0); | ||
| if (i === -1) | ||
| return undefined; | ||
| const chomp = path.substring(0, i); | ||
| if (chomp === '.' || | ||
| chomp === '..' || | ||
| chomp === '' || | ||
| chomp === './.' || | ||
| chomp === './..' || | ||
| chomp === './') { | ||
| return undefined; | ||
| } | ||
| return chomp + '/'; | ||
| }; | ||
| //# sourceMappingURL=find-tar-dir.js.map |
| {"version":3,"file":"find-tar-dir.js","sourceRoot":"","sources":["../src/find-tar-dir.ts"],"names":[],"mappings":"AAAA,8DAA8D;AAC9D,2DAA2D;AAC3D,wDAAwD;AACxD,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,IAAwB,EACxB,MAAe,EACf,EAAE;IACF,IAAI,MAAM,KAAK,SAAS;QAAE,OAAO,MAAM,CAAA;IACvC,IAAI,CAAC,IAAI;QAAE,OAAO,SAAS,CAAA;IAC3B,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,IAAI,CAAC,KAAK,CAAC,CAAC;QAAE,OAAO,SAAS,CAAA;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAClC,IACE,KAAK,KAAK,GAAG;QACb,KAAK,KAAK,IAAI;QACd,KAAK,KAAK,EAAE;QACZ,KAAK,KAAK,KAAK;QACf,KAAK,KAAK,MAAM;QAChB,KAAK,KAAK,IAAI,EACd,CAAC;QACD,OAAO,SAAS,CAAA;IAClB,CAAC;IACD,OAAO,KAAK,GAAG,GAAG,CAAA;AACpB,CAAC,CAAA","sourcesContent":["// usually this will be 'package/', but could also be anything\n// eg, github tarballs are ${user}-${project}-${committish}\n// if it starts with `./` then all entries must as well.\nexport const findTarDir = (\n path: string | undefined,\n tarDir?: string,\n) => {\n if (tarDir !== undefined) return tarDir\n if (!path) return undefined\n const i = path.indexOf('/', path.startsWith('./') ? 2 : 0)\n if (i === -1) return undefined\n const chomp = path.substring(0, i)\n if (\n chomp === '.' ||\n chomp === '..' ||\n chomp === '' ||\n chomp === './.' ||\n chomp === './..' ||\n chomp === './'\n ) {\n return undefined\n }\n return chomp + '/'\n}\n"]} |
| export * from './unpack.ts'; | ||
| export * from './pool.ts'; | ||
| export * from './unpack-request.ts'; | ||
| //# sourceMappingURL=index.d.ts.map |
| {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA"} |
| export * from "./unpack.js"; | ||
| export * from "./pool.js"; | ||
| export * from "./unpack-request.js"; | ||
| //# sourceMappingURL=index.js.map |
| {"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA","sourcesContent":["export * from './unpack.ts'\nexport * from './pool.ts'\nexport * from './unpack-request.ts'\n"]} |
| import { UnpackRequest } from './unpack-request.ts'; | ||
| import { Worker } from './worker.ts'; | ||
| export * from './worker.ts'; | ||
| /** | ||
| * Automatically expanding/contracting set of workers to maximize parallelism | ||
| * of unpack operations up to 1 less than the number of CPUs (or 1). | ||
| * | ||
| * `pool.unpack(tarData, target)` will perform the unpack operation | ||
| * synchronously, in one of these workers, and returns a promise when the | ||
| * worker has confirmed completion of the task. | ||
| */ | ||
| export declare class Pool { | ||
| #private; | ||
| /** | ||
| * Number of workers to emplly. Defaults to 1 less than the number of | ||
| * CPUs, or 1. | ||
| */ | ||
| jobs: number; | ||
| /** | ||
| * Set of currently active worker threads | ||
| */ | ||
| workers: Set<Worker>; | ||
| /** | ||
| * Queue of requests awaiting an available worker | ||
| */ | ||
| queue: UnpackRequest[]; | ||
| /** | ||
| * Requests that have been assigned to a worker, but have not yet | ||
| * been confirmed completed. | ||
| */ | ||
| pending: Map<number, UnpackRequest>; | ||
| /** | ||
| * Provide the tardata to be unpacked, and the location where it's to be | ||
| * placed. Will create a new worker up to the `jobs` value, and then start | ||
| * pushing in the queue for workers to pick up as they become available. | ||
| * | ||
| * Returned promise resolves when the provided tarball has been extracted. | ||
| */ | ||
| unpack(tarData: Buffer, target: string): Promise<void>; | ||
| } | ||
| //# sourceMappingURL=pool.d.ts.map |
| {"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../src/pool.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAgB,MAAM,EAAE,MAAM,aAAa,CAAA;AAGlD,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,qBAAa,IAAI;;IACf;;;OAGG;IAEH,IAAI,EAAE,MAAM,CAAmD;IAC/D;;OAEG;IACH,OAAO,cAAoB;IAC3B;;OAEG;IACH,KAAK,EAAE,aAAa,EAAE,CAAK;IAC3B;;;OAGG;IACH,OAAO,6BAAmC;IA0C1C;;;;;;OAMG;IACG,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAU7C"} |
+88
| import { error } from '@vltpkg/error-cause'; | ||
| import { asError } from '@vltpkg/types'; | ||
| import os from 'node:os'; | ||
| import { UnpackRequest } from "./unpack-request.js"; | ||
| import { isResponseOK, Worker } from "./worker.js"; | ||
| export * from "./worker.js"; | ||
| /** | ||
| * Automatically expanding/contracting set of workers to maximize parallelism | ||
| * of unpack operations up to 1 less than the number of CPUs (or 1). | ||
| * | ||
| * `pool.unpack(tarData, target)` will perform the unpack operation | ||
| * synchronously, in one of these workers, and returns a promise when the | ||
| * worker has confirmed completion of the task. | ||
| */ | ||
| export class Pool { | ||
| /** | ||
| * Number of workers to emplly. Defaults to 1 less than the number of | ||
| * CPUs, or 1. | ||
| */ | ||
| /* c8 ignore next */ | ||
| jobs = 8 * (Math.max(os.availableParallelism(), 2) - 1); | ||
| /** | ||
| * Set of currently active worker threads | ||
| */ | ||
| workers = new Set(); | ||
| /** | ||
| * Queue of requests awaiting an available worker | ||
| */ | ||
| queue = []; | ||
| /** | ||
| * Requests that have been assigned to a worker, but have not yet | ||
| * been confirmed completed. | ||
| */ | ||
| pending = new Map(); | ||
| // handle a message from the worker | ||
| #onMessage(w, m) { | ||
| const { id } = m; | ||
| // a request has been met or failed, report and either | ||
| // pick up the next item in the queue, or terminate worker | ||
| const ur = this.pending.get(id); | ||
| /* c8 ignore next */ | ||
| if (!ur) | ||
| return; | ||
| if (isResponseOK(m)) { | ||
| ur.resolve(); | ||
| /* c8 ignore start - nearly impossible in normal circumstances */ | ||
| } | ||
| else { | ||
| ur.reject(error(asError(m.error, 'failed without error message').message, { | ||
| found: m, | ||
| cause: m.error, | ||
| })); | ||
| } | ||
| /* c8 ignore stop */ | ||
| const next = this.queue.shift(); | ||
| if (!next) { | ||
| this.workers.delete(w); | ||
| } | ||
| else { | ||
| void w.process(next); | ||
| } | ||
| } | ||
| // create a new worker | ||
| #createWorker(req) { | ||
| const w = new Worker((m) => this.#onMessage(w, m)); | ||
| this.workers.add(w); | ||
| void w.process(req); | ||
| } | ||
| /** | ||
| * Provide the tardata to be unpacked, and the location where it's to be | ||
| * placed. Will create a new worker up to the `jobs` value, and then start | ||
| * pushing in the queue for workers to pick up as they become available. | ||
| * | ||
| * Returned promise resolves when the provided tarball has been extracted. | ||
| */ | ||
| async unpack(tarData, target) { | ||
| const ur = new UnpackRequest(tarData, target); | ||
| this.pending.set(ur.id, ur); | ||
| if (this.workers.size < this.jobs) { | ||
| this.#createWorker(ur); | ||
| } | ||
| else { | ||
| this.queue.push(ur); | ||
| } | ||
| return ur.promise; | ||
| } | ||
| } | ||
| //# sourceMappingURL=pool.js.map |
| {"version":3,"file":"pool.js","sourceRoot":"","sources":["../src/pool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAA;AAC3C,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAGlD,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,MAAM,OAAO,IAAI;IACf;;;OAGG;IACH,oBAAoB;IACpB,IAAI,GAAW,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;IAC/D;;OAEG;IACH,OAAO,GAAG,IAAI,GAAG,EAAU,CAAA;IAC3B;;OAEG;IACH,KAAK,GAAoB,EAAE,CAAA;IAC3B;;;OAGG;IACH,OAAO,GAAG,IAAI,GAAG,EAAyB,CAAA;IAE1C,mCAAmC;IACnC,UAAU,CAAC,CAAS,EAAE,CAA6B;QACjD,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;QAChB,sDAAsD;QACtD,0DAA0D;QAC1D,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QAC/B,oBAAoB;QACpB,IAAI,CAAC,EAAE;YAAE,OAAM;QACf,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC;YACpB,EAAE,CAAC,OAAO,EAAE,CAAA;YACZ,iEAAiE;QACnE,CAAC;aAAM,CAAC;YACN,EAAE,CAAC,MAAM,CACP,KAAK,CACH,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,8BAA8B,CAAC,CAAC,OAAO,EACxD;gBACE,KAAK,EAAE,CAAC;gBACR,KAAK,EAAE,CAAC,CAAC,KAAK;aACf,CACF,CACF,CAAA;QACH,CAAC;QACD,oBAAoB;QACpB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAA;QAC/B,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;QACtB,CAAC;IACH,CAAC;IAED,sBAAsB;IACtB,aAAa,CAAC,GAAkB;QAC9B,MAAM,CAAC,GAAW,IAAI,MAAM,CAAC,CAAC,CAA6B,EAAE,EAAE,CAC7D,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CACtB,CAAA;QACD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACnB,KAAK,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;IACrB,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,MAAM,CAAC,OAAe,EAAE,MAAc;QAC1C,MAAM,EAAE,GAAG,IAAI,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QAC3B,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;YAClC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACrB,CAAC;QACD,OAAO,EAAE,CAAC,OAAO,CAAA;IACnB,CAAC;CACF","sourcesContent":["import { error } from '@vltpkg/error-cause'\nimport { asError } from '@vltpkg/types'\nimport os from 'node:os'\nimport { UnpackRequest } from './unpack-request.ts'\nimport { isResponseOK, Worker } from './worker.ts'\nimport type { ResponseError, ResponseOK } from './worker.ts'\n\nexport * from './worker.ts'\n\n/**\n * Automatically expanding/contracting set of workers to maximize parallelism\n * of unpack operations up to 1 less than the number of CPUs (or 1).\n *\n * `pool.unpack(tarData, target)` will perform the unpack operation\n * synchronously, in one of these workers, and returns a promise when the\n * worker has confirmed completion of the task.\n */\nexport class Pool {\n /**\n * Number of workers to emplly. Defaults to 1 less than the number of\n * CPUs, or 1.\n */\n /* c8 ignore next */\n jobs: number = 8 * (Math.max(os.availableParallelism(), 2) - 1)\n /**\n * Set of currently active worker threads\n */\n workers = new Set<Worker>()\n /**\n * Queue of requests awaiting an available worker\n */\n queue: UnpackRequest[] = []\n /**\n * Requests that have been assigned to a worker, but have not yet\n * been confirmed completed.\n */\n pending = new Map<number, UnpackRequest>()\n\n // handle a message from the worker\n #onMessage(w: Worker, m: ResponseError | ResponseOK) {\n const { id } = m\n // a request has been met or failed, report and either\n // pick up the next item in the queue, or terminate worker\n const ur = this.pending.get(id)\n /* c8 ignore next */\n if (!ur) return\n if (isResponseOK(m)) {\n ur.resolve()\n /* c8 ignore start - nearly impossible in normal circumstances */\n } else {\n ur.reject(\n error(\n asError(m.error, 'failed without error message').message,\n {\n found: m,\n cause: m.error,\n },\n ),\n )\n }\n /* c8 ignore stop */\n const next = this.queue.shift()\n if (!next) {\n this.workers.delete(w)\n } else {\n void w.process(next)\n }\n }\n\n // create a new worker\n #createWorker(req: UnpackRequest) {\n const w: Worker = new Worker((m: ResponseError | ResponseOK) =>\n this.#onMessage(w, m),\n )\n this.workers.add(w)\n void w.process(req)\n }\n\n /**\n * Provide the tardata to be unpacked, and the location where it's to be\n * placed. Will create a new worker up to the `jobs` value, and then start\n * pushing in the queue for workers to pick up as they become available.\n *\n * Returned promise resolves when the provided tarball has been extracted.\n */\n async unpack(tarData: Buffer, target: string) {\n const ur = new UnpackRequest(tarData, target)\n this.pending.set(ur.id, ur)\n if (this.workers.size < this.jobs) {\n this.#createWorker(ur)\n } else {\n this.queue.push(ur)\n }\n return ur.promise\n }\n}\n"]} |
| export declare class UnpackRequest { | ||
| id: number; | ||
| tarData: Buffer; | ||
| target: string; | ||
| resolve: () => void; | ||
| reject: (reason?: any) => void; | ||
| promise: Promise<void>; | ||
| constructor(tarData: Buffer, target: string); | ||
| } | ||
| //# sourceMappingURL=unpack-request.d.ts.map |
| {"version":3,"file":"unpack-request.d.ts","sourceRoot":"","sources":["../src/unpack-request.ts"],"names":[],"mappings":"AACA,qBAAa,aAAa;IACxB,EAAE,EAAE,MAAM,CAAO;IACjB,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAG,MAAM,IAAI,CAAA;IACpB,MAAM,EAAG,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,CAAA;IAC/B,OAAO,gBAGL;gBACU,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAI5C"} |
| let ID = 1; | ||
| export class UnpackRequest { | ||
| id = ID++; | ||
| tarData; | ||
| target; | ||
| resolve; | ||
| reject; | ||
| promise = new Promise((res, rej) => { | ||
| this.resolve = res; | ||
| this.reject = rej; | ||
| }); | ||
| constructor(tarData, target) { | ||
| this.tarData = tarData; | ||
| this.target = target; | ||
| } | ||
| } | ||
| //# sourceMappingURL=unpack-request.js.map |
| {"version":3,"file":"unpack-request.js","sourceRoot":"","sources":["../src/unpack-request.ts"],"names":[],"mappings":"AAAA,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,OAAO,aAAa;IACxB,EAAE,GAAW,EAAE,EAAE,CAAA;IACjB,OAAO,CAAQ;IACf,MAAM,CAAQ;IACd,OAAO,CAAa;IACpB,MAAM,CAAyB;IAC/B,OAAO,GAAG,IAAI,OAAO,CAAO,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;QACvC,IAAI,CAAC,OAAO,GAAG,GAAG,CAAA;QAClB,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;IACnB,CAAC,CAAC,CAAA;IACF,YAAY,OAAe,EAAE,MAAc;QACzC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;CACF","sourcesContent":["let ID = 1\nexport class UnpackRequest {\n id: number = ID++\n tarData: Buffer\n target: string\n resolve!: () => void\n reject!: (reason?: any) => void\n promise = new Promise<void>((res, rej) => {\n this.resolve = res\n this.reject = rej\n })\n constructor(tarData: Buffer, target: string) {\n this.tarData = tarData\n this.target = target\n }\n}\n"]} |
| export declare const unpack: (tarData: Buffer, target: string) => Promise<void>; | ||
| //# sourceMappingURL=unpack.d.ts.map |
| {"version":3,"file":"unpack.d.ts","sourceRoot":"","sources":["../src/unpack.ts"],"names":[],"mappings":"AAwFA,eAAO,MAAM,MAAM,YACR,MAAM,UACP,MAAM,KACb,OAAO,CAAC,IAAI,CAMd,CAAA"} |
+176
| import { error } from '@vltpkg/error-cause'; | ||
| import { randomBytes } from 'node:crypto'; | ||
| import { lstat, mkdir, rename, writeFile } from 'node:fs/promises'; | ||
| import { basename, dirname, resolve, sep } from 'node:path'; | ||
| import { rimraf } from 'rimraf'; | ||
| import { Header } from 'tar/header'; | ||
| import { Pax } from 'tar/pax'; | ||
| import { unzip as unzipCB } from 'node:zlib'; | ||
| import { findTarDir } from "./find-tar-dir.js"; | ||
| const unzip = async (input) => new Promise((res, rej) => | ||
| /* c8 ignore start */ | ||
| unzipCB(input, (er, result) => (er ? rej(er) : res(result)))); | ||
| const exists = async (path) => { | ||
| try { | ||
| await lstat(path); | ||
| return true; | ||
| } | ||
| catch { | ||
| return false; | ||
| } | ||
| }; | ||
| let id = 1; | ||
| const tmp = randomBytes(6).toString('hex') + '.'; | ||
| const tmpSuffix = () => tmp + String(id++); | ||
| const checkFs = (h, tarDir, target) => { | ||
| /* c8 ignore start - impossible */ | ||
| if (!h.path) | ||
| return false; | ||
| if (!tarDir) | ||
| return false; | ||
| /* c8 ignore stop */ | ||
| h.path = h.path.replace(/[\\/]+/g, '/'); | ||
| // packages should always be in a 'package' tarDir in the archive | ||
| if (!h.path.startsWith(tarDir)) | ||
| return false; | ||
| // Package root | ||
| const absoluteBasePath = target; | ||
| const itemAbsolutePath = resolve(target, h.path.slice(tarDir.length)); | ||
| if (!itemAbsolutePath.startsWith(absoluteBasePath)) { | ||
| return false; | ||
| } | ||
| return true; | ||
| }; | ||
| const write = async (path, body, executable = false) => { | ||
| await mkdirp(dirname(path)); | ||
| // if the mode is world-executable, then make it executable | ||
| // this is needed for some packages that have a file that is | ||
| // not a declared bin, but still used as a cli executable. | ||
| await writeFile(path, body, { | ||
| mode: executable ? 0o777 : 0o666, | ||
| }); | ||
| }; | ||
| const made = new Set(); | ||
| const making = new Map(); | ||
| const mkdirp = async (d) => { | ||
| if (!made.has(d)) { | ||
| const m = making.get(d) ?? | ||
| mkdir(d, { recursive: true, mode: 0o777 }).then(() => making.delete(d)); | ||
| making.set(d, m); | ||
| await m; | ||
| made.add(d); | ||
| } | ||
| }; | ||
| export const unpack = async (tarData, target) => { | ||
| const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b; | ||
| await unpackUnzipped(isGzip ? await unzip(tarData) : tarData, target); | ||
| }; | ||
| const unpackUnzipped = async (buffer, target) => { | ||
| /* c8 ignore start */ | ||
| const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b; | ||
| if (isGzip) { | ||
| throw error('still gzipped after unzipping', { | ||
| found: isGzip, | ||
| wanted: false, | ||
| }); | ||
| } | ||
| /* c8 ignore stop */ | ||
| // another real quick gutcheck before we get started | ||
| if (buffer.length % 512 !== 0) { | ||
| throw error('Invalid tarball: length not divisible by 512', { | ||
| found: buffer.length, | ||
| }); | ||
| } | ||
| if (buffer.length < 1024) { | ||
| throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.length }); | ||
| } | ||
| // make sure the last kb is all zeros | ||
| for (let i = buffer.length - 1024; i < buffer.length; i++) { | ||
| if (buffer[i] !== 0) { | ||
| throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.subarray(i, i + 10) }); | ||
| } | ||
| } | ||
| const tmp = dirname(target) + sep + '.' + basename(target) + '.' + tmpSuffix(); | ||
| const og = tmp + '.ORIGINAL'; | ||
| await Promise.all([rimraf(tmp), rimraf(og)]); | ||
| let succeeded = false; | ||
| try { | ||
| let tarDir = undefined; | ||
| let offset = 0; | ||
| let h; | ||
| let ex = undefined; | ||
| let gex = undefined; | ||
| while (offset < buffer.length && | ||
| !(h = new Header(buffer, offset, ex, gex)).nullBlock) { | ||
| offset += 512; | ||
| ex = undefined; | ||
| gex = undefined; | ||
| const size = h.size ?? 0; | ||
| const body = buffer.subarray(offset, offset + size); | ||
| // skip invalid headers | ||
| if (!h.cksumValid) | ||
| continue; | ||
| offset += 512 * Math.ceil(size / 512); | ||
| // TODO: tarDir might not be named "package/" | ||
| // find the first tarDir in the first entry, and use that. | ||
| switch (h.type) { | ||
| case 'File': | ||
| if (!tarDir) | ||
| tarDir = findTarDir(h.path, tarDir); | ||
| /* c8 ignore next */ | ||
| if (!tarDir) | ||
| continue; | ||
| if (!checkFs(h, tarDir, tmp)) | ||
| continue; | ||
| await write(resolve(tmp, h.path.substring(tarDir.length)), body, | ||
| // if it's world-executable, it's an executable | ||
| // otherwise, make it read-only. | ||
| 1 === ((h.mode ?? 0x666) & 1)); | ||
| break; | ||
| case 'Directory': | ||
| /* c8 ignore next 2 */ | ||
| if (!tarDir) | ||
| tarDir = findTarDir(h.path, tarDir); | ||
| if (!tarDir) | ||
| continue; | ||
| if (!checkFs(h, tarDir, tmp)) | ||
| continue; | ||
| await mkdirp(resolve(tmp, h.path.substring(tarDir.length))); | ||
| break; | ||
| case 'GlobalExtendedHeader': | ||
| gex = Pax.parse(body.toString(), gex, true); | ||
| break; | ||
| case 'ExtendedHeader': | ||
| case 'OldExtendedHeader': | ||
| ex = Pax.parse(body.toString(), ex, false); | ||
| break; | ||
| case 'NextFileHasLongPath': | ||
| case 'OldGnuLongPath': | ||
| ex ??= Object.create(null); | ||
| ex.path = body.toString().replace(/\0.*/, ''); | ||
| break; | ||
| } | ||
| } | ||
| const targetExists = await exists(target); | ||
| if (targetExists) | ||
| await rename(target, og); | ||
| await rename(tmp, target); | ||
| if (targetExists) | ||
| await rimraf(og); | ||
| succeeded = true; | ||
| } | ||
| finally { | ||
| // do not handle error or obscure throw site, just do the cleanup | ||
| // if it didn't complete successfully. | ||
| if (!succeeded) { | ||
| /* c8 ignore start */ | ||
| if (await exists(og)) { | ||
| await rimraf(target); | ||
| await rename(og, target); | ||
| } | ||
| /* c8 ignore stop */ | ||
| await rimraf(tmp); | ||
| } | ||
| } | ||
| }; | ||
| //# sourceMappingURL=unpack.js.map |
| {"version":3,"file":"unpack.js","sourceRoot":"","sources":["../src/unpack.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAA;AAC3C,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AACzC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;AAClE,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,MAAM,WAAW,CAAA;AAC3D,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAA;AAC7B,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAE9C,MAAM,KAAK,GAAG,KAAK,EAAE,KAAa,EAAE,EAAE,CACpC,IAAI,OAAO,CACT,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;AACX,qBAAqB;AACrB,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAE/D,CAAA;AAEH,MAAM,MAAM,GAAG,KAAK,EAAE,IAAY,EAAoB,EAAE;IACtD,IAAI,CAAC;QACH,MAAM,KAAK,CAAC,IAAI,CAAC,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,GAAG,CAAA;AAChD,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,GAAG,GAAG,MAAM,CAAC,EAAE,EAAE,CAAC,CAAA;AAE1C,MAAM,OAAO,GAAG,CACd,CAAS,EACT,MAA0B,EAC1B,MAAc,EACkB,EAAE;IAClC,kCAAkC;IAClC,IAAI,CAAC,CAAC,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACzB,IAAI,CAAC,MAAM;QAAE,OAAO,KAAK,CAAA;IACzB,oBAAoB;IACpB,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;IAEvC,iEAAiE;IACjE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QAAE,OAAO,KAAK,CAAA;IAE5C,eAAe;IACf,MAAM,gBAAgB,GAAG,MAAM,CAAA;IAC/B,MAAM,gBAAgB,GAAG,OAAO,CAC9B,MAAM,EACN,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAC5B,CAAA;IAED,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,CAAC;QACnD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,KAAK,EACjB,IAAY,EACZ,IAAY,EACZ,UAAU,GAAG,KAAK,EAClB,EAAE;IACF,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3B,2DAA2D;IAC3D,4DAA4D;IAC5D,0DAA0D;IAC1D,MAAM,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE;QAC1B,IAAI,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK;KACjC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAA;AAC9B,MAAM,MAAM,GAAG,IAAI,GAAG,EAA4B,CAAA;AAClD,MAAM,MAAM,GAAG,KAAK,EAAE,CAAS,EAAE,EAAE;IACjC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACjB,MAAM,CAAC,GACL,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YACb,KAAK,CAAC,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CACnD,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CACjB,CAAA;QACH,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAChB,MAAM,CAAC,CAAA;QACP,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,KAAK,EACzB,OAAe,EACf,MAAc,EACC,EAAE;IACjB,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,MAAM,cAAc,CAClB,MAAM,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,EACvC,MAAM,CACP,CAAA;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,MAAc,EACd,MAAc,EACC,EAAE;IACjB,qBAAqB;IACrB,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACvD,IAAI,MAAM,EAAE,CAAC;QACX,MAAM,KAAK,CAAC,+BAA+B,EAAE;YAC3C,KAAK,EAAE,MAAM;YACb,MAAM,EAAE,KAAK;SACd,CAAC,CAAA;IACJ,CAAC;IACD,oBAAoB;IAEpB,oDAAoD;IACpD,IAAI,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,CAAC,EAAE,CAAC;QAC9B,MAAM,KAAK,CAAC,8CAA8C,EAAE;YAC1D,KAAK,EAAE,MAAM,CAAC,MAAM;SACrB,CAAC,CAAA;IACJ,CAAC;IACD,IAAI,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC;QACzB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,CACzB,CAAA;IACH,CAAC;IACD,qCAAqC;IACrC,KAAK,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;YACpB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,CACtC,CAAA;QACH,CAAC;IACH,CAAC;IAED,MAAM,GAAG,GACP,OAAO,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,SAAS,EAAE,CAAA;IACpE,MAAM,EAAE,GAAG,GAAG,GAAG,WAAW,CAAA;IAC5B,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAE5C,IAAI,SAAS,GAAG,KAAK,CAAA;IACrB,IAAI,CAAC;QACH,IAAI,MAAM,GAAuB,SAAS,CAAA;QAC1C,IAAI,MAAM,GAAG,CAAC,CAAA;QACd,IAAI,CAAS,CAAA;QACb,IAAI,EAAE,GAA2B,SAAS,CAAA;QAC1C,IAAI,GAAG,GAA2B,SAAS,CAAA;QAC3C,OACE,MAAM,GAAG,MAAM,CAAC,MAAM;YACtB,CAAC,CAAC,CAAC,GAAG,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS,EACpD,CAAC;YACD,MAAM,IAAI,GAAG,CAAA;YACb,EAAE,GAAG,SAAS,CAAA;YACd,GAAG,GAAG,SAAS,CAAA;YACf,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAA;YACxB,MAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,CAAA;YACnD,uBAAuB;YACvB,IAAI,CAAC,CAAC,CAAC,UAAU;gBAAE,SAAQ;YAC3B,MAAM,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,CAAA;YAErC,6CAA6C;YAC7C,0DAA0D;YAC1D,QAAQ,CAAC,CAAC,IAAI,EAAE,CAAC;gBACf,KAAK,MAAM;oBACT,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,oBAAoB;oBACpB,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC;wBAAE,SAAQ;oBACtC,MAAM,KAAK,CACT,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAC7C,IAAI;oBACJ,+CAA+C;oBAC/C,gCAAgC;oBAChC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAC9B,CAAA;oBACD,MAAK;gBAEP,KAAK,WAAW;oBACd,sBAAsB;oBACtB,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC;wBAAE,SAAQ;oBACtC,MAAM,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;oBAC3D,MAAK;gBAEP,KAAK,sBAAsB;oBACzB,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;oBAC3C,MAAK;gBAEP,KAAK,gBAAgB,CAAC;gBACtB,KAAK,mBAAmB;oBACtB,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,EAAE,KAAK,CAAC,CAAA;oBAC1C,MAAK;gBAEP,KAAK,qBAAqB,CAAC;gBAC3B,KAAK,gBAAgB;oBACnB,EAAE,KAAK,MAAM,CAAC,MAAM,CAAC,IAAI,CAAe,CAAA;oBACxC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;oBAC7C,MAAK;YACT,CAAC;QACH,CAAC;QAED,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;QACzC,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;QAC1C,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;QACzB,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,EAAE,CAAC,CAAA;QAClC,SAAS,GAAG,IAAI,CAAA;IAClB,CAAC;YAAS,CAAC;QACT,iEAAiE;QACjE,sCAAsC;QACtC,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,qBAAqB;YACrB,IAAI,MAAM,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;gBACrB,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;gBACpB,MAAM,MAAM,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;YAC1B,CAAC;YACD,oBAAoB;YACpB,MAAM,MAAM,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { error } from '@vltpkg/error-cause'\nimport { randomBytes } from 'node:crypto'\nimport { lstat, mkdir, rename, writeFile } from 'node:fs/promises'\nimport { basename, dirname, resolve, sep } from 'node:path'\nimport { rimraf } from 'rimraf'\nimport { Header } from 'tar/header'\nimport type { HeaderData } from 'tar/header'\nimport { Pax } from 'tar/pax'\nimport { unzip as unzipCB } from 'node:zlib'\nimport { findTarDir } from './find-tar-dir.ts'\n\nconst unzip = async (input: Buffer) =>\n new Promise<Buffer>(\n (res, rej) =>\n /* c8 ignore start */\n unzipCB(input, (er, result) => (er ? rej(er) : res(result))),\n /* c8 ignore stop */\n )\n\nconst exists = async (path: string): Promise<boolean> => {\n try {\n await lstat(path)\n return true\n } catch {\n return false\n }\n}\n\nlet id = 1\nconst tmp = randomBytes(6).toString('hex') + '.'\nconst tmpSuffix = () => tmp + String(id++)\n\nconst checkFs = (\n h: Header,\n tarDir: string | undefined,\n target: string,\n): h is Header & { path: string } => {\n /* c8 ignore start - impossible */\n if (!h.path) return false\n if (!tarDir) return false\n /* c8 ignore stop */\n h.path = h.path.replace(/[\\\\/]+/g, '/')\n\n // packages should always be in a 'package' tarDir in the archive\n if (!h.path.startsWith(tarDir)) return false\n\n // Package root\n const absoluteBasePath = target\n const itemAbsolutePath = resolve(\n target,\n h.path.slice(tarDir.length),\n )\n\n if (!itemAbsolutePath.startsWith(absoluteBasePath)) {\n return false\n }\n return true\n}\n\nconst write = async (\n path: string,\n body: Buffer,\n executable = false,\n) => {\n await mkdirp(dirname(path))\n // if the mode is world-executable, then make it executable\n // this is needed for some packages that have a file that is\n // not a declared bin, but still used as a cli executable.\n await writeFile(path, body, {\n mode: executable ? 0o777 : 0o666,\n })\n}\n\nconst made = new Set<string>()\nconst making = new Map<string, Promise<boolean>>()\nconst mkdirp = async (d: string) => {\n if (!made.has(d)) {\n const m =\n making.get(d) ??\n mkdir(d, { recursive: true, mode: 0o777 }).then(() =>\n making.delete(d),\n )\n making.set(d, m)\n await m\n made.add(d)\n }\n}\n\nexport const unpack = async (\n tarData: Buffer,\n target: string,\n): Promise<void> => {\n const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b\n await unpackUnzipped(\n isGzip ? await unzip(tarData) : tarData,\n target,\n )\n}\n\nconst unpackUnzipped = async (\n buffer: Buffer,\n target: string,\n): Promise<void> => {\n /* c8 ignore start */\n const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b\n if (isGzip) {\n throw error('still gzipped after unzipping', {\n found: isGzip,\n wanted: false,\n })\n }\n /* c8 ignore stop */\n\n // another real quick gutcheck before we get started\n if (buffer.length % 512 !== 0) {\n throw error('Invalid tarball: length not divisible by 512', {\n found: buffer.length,\n })\n }\n if (buffer.length < 1024) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.length },\n )\n }\n // make sure the last kb is all zeros\n for (let i = buffer.length - 1024; i < buffer.length; i++) {\n if (buffer[i] !== 0) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.subarray(i, i + 10) },\n )\n }\n }\n\n const tmp =\n dirname(target) + sep + '.' + basename(target) + '.' + tmpSuffix()\n const og = tmp + '.ORIGINAL'\n await Promise.all([rimraf(tmp), rimraf(og)])\n\n let succeeded = false\n try {\n let tarDir: string | undefined = undefined\n let offset = 0\n let h: Header\n let ex: HeaderData | undefined = undefined\n let gex: HeaderData | undefined = undefined\n while (\n offset < buffer.length &&\n !(h = new Header(buffer, offset, ex, gex)).nullBlock\n ) {\n offset += 512\n ex = undefined\n gex = undefined\n const size = h.size ?? 0\n const body = buffer.subarray(offset, offset + size)\n // skip invalid headers\n if (!h.cksumValid) continue\n offset += 512 * Math.ceil(size / 512)\n\n // TODO: tarDir might not be named \"package/\"\n // find the first tarDir in the first entry, and use that.\n switch (h.type) {\n case 'File':\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n /* c8 ignore next */\n if (!tarDir) continue\n if (!checkFs(h, tarDir, tmp)) continue\n await write(\n resolve(tmp, h.path.substring(tarDir.length)),\n body,\n // if it's world-executable, it's an executable\n // otherwise, make it read-only.\n 1 === ((h.mode ?? 0x666) & 1),\n )\n break\n\n case 'Directory':\n /* c8 ignore next 2 */\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n if (!tarDir) continue\n if (!checkFs(h, tarDir, tmp)) continue\n await mkdirp(resolve(tmp, h.path.substring(tarDir.length)))\n break\n\n case 'GlobalExtendedHeader':\n gex = Pax.parse(body.toString(), gex, true)\n break\n\n case 'ExtendedHeader':\n case 'OldExtendedHeader':\n ex = Pax.parse(body.toString(), ex, false)\n break\n\n case 'NextFileHasLongPath':\n case 'OldGnuLongPath':\n ex ??= Object.create(null) as HeaderData\n ex.path = body.toString().replace(/\\0.*/, '')\n break\n }\n }\n\n const targetExists = await exists(target)\n if (targetExists) await rename(target, og)\n await rename(tmp, target)\n if (targetExists) await rimraf(og)\n succeeded = true\n } finally {\n // do not handle error or obscure throw site, just do the cleanup\n // if it didn't complete successfully.\n if (!succeeded) {\n /* c8 ignore start */\n if (await exists(og)) {\n await rimraf(target)\n await rename(og, target)\n }\n /* c8 ignore stop */\n await rimraf(tmp)\n }\n }\n}\n"]} |
| import type { UnpackRequest } from './unpack-request.ts'; | ||
| export type ResponseError = { | ||
| id: number; | ||
| error: unknown; | ||
| }; | ||
| export type ResponseOK = { | ||
| id: number; | ||
| ok: true; | ||
| }; | ||
| export declare const isResponseOK: (o: unknown) => o is ResponseOK; | ||
| /** | ||
| * Basically just a queue of unpack requests, | ||
| * to keep them throttled to a reasonable amount of parallelism | ||
| */ | ||
| export declare class Worker { | ||
| onMessage: (m: ResponseError | ResponseOK) => void; | ||
| constructor(onMessage: (m: ResponseError | ResponseOK) => void); | ||
| process(req: UnpackRequest): Promise<void>; | ||
| } | ||
| //# sourceMappingURL=worker.d.ts.map |
| {"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../src/worker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAGxD,MAAM,MAAM,aAAa,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,OAAO,CAAA;CAAE,CAAA;AAC1D,MAAM,MAAM,UAAU,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,IAAI,CAAA;CAAE,CAAA;AAKjD,eAAO,MAAM,YAAY,MAAO,OAAO,KAAG,CAAC,IAAI,UACQ,CAAA;AAEvD;;;GAGG;AACH,qBAAa,MAAM;IACjB,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI,CAAA;gBAEtC,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI;IAIxD,OAAO,CAAC,GAAG,EAAE,aAAa;CAWjC"} |
| import { unpack } from "./unpack.js"; | ||
| const isObj = (o) => !!o && typeof o === 'object'; | ||
| export const isResponseOK = (o) => isObj(o) && typeof o.id === 'number' && o.ok === true; | ||
| /** | ||
| * Basically just a queue of unpack requests, | ||
| * to keep them throttled to a reasonable amount of parallelism | ||
| */ | ||
| export class Worker { | ||
| onMessage; | ||
| constructor(onMessage) { | ||
| this.onMessage = onMessage; | ||
| } | ||
| async process(req) { | ||
| const { target, tarData, id } = req; | ||
| try { | ||
| await unpack(tarData, target); | ||
| const m = { id, ok: true }; | ||
| this.onMessage(m); | ||
| } | ||
| catch (error) { | ||
| const m = { id, error }; | ||
| this.onMessage(m); | ||
| } | ||
| } | ||
| } | ||
| //# sourceMappingURL=worker.js.map |
| {"version":3,"file":"worker.js","sourceRoot":"","sources":["../src/worker.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAKpC,MAAM,KAAK,GAAG,CAAC,CAAU,EAAgC,EAAE,CACzD,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,KAAK,QAAQ,CAAA;AAE9B,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,CAAU,EAAmB,EAAE,CAC1D,KAAK,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,EAAE,KAAK,QAAQ,IAAI,CAAC,CAAC,EAAE,KAAK,IAAI,CAAA;AAEvD;;;GAGG;AACH,MAAM,OAAO,MAAM;IACjB,SAAS,CAAyC;IAElD,YAAY,SAAkD;QAC5D,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,GAAkB;QAC9B,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,EAAE,EAAE,GAAG,GAAG,CAAA;QACnC,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;YAC7B,MAAM,CAAC,GAAe,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,CAAC,GAAkB,EAAE,EAAE,EAAE,KAAK,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;CACF","sourcesContent":["import type { UnpackRequest } from './unpack-request.ts'\nimport { unpack } from './unpack.ts'\n\nexport type ResponseError = { id: number; error: unknown }\nexport type ResponseOK = { id: number; ok: true }\n\nconst isObj = (o: unknown): o is Record<string, unknown> =>\n !!o && typeof o === 'object'\n\nexport const isResponseOK = (o: unknown): o is ResponseOK =>\n isObj(o) && typeof o.id === 'number' && o.ok === true\n\n/**\n * Basically just a queue of unpack requests,\n * to keep them throttled to a reasonable amount of parallelism\n */\nexport class Worker {\n onMessage: (m: ResponseError | ResponseOK) => void\n\n constructor(onMessage: (m: ResponseError | ResponseOK) => void) {\n this.onMessage = onMessage\n }\n\n async process(req: UnpackRequest) {\n const { target, tarData, id } = req\n try {\n await unpack(tarData, target)\n const m: ResponseOK = { id, ok: true }\n this.onMessage(m)\n } catch (error) {\n const m: ResponseError = { id, error }\n this.onMessage(m)\n }\n }\n}\n"]} |
+8
-28
| { | ||
| "name": "@vltpkg/tar", | ||
| "description": "An extremely limited and very fast tar extractor", | ||
| "version": "1.0.0-rc.10", | ||
| "version": "1.0.0-rc.11", | ||
| "repository": { | ||
@@ -11,21 +11,7 @@ "type": "git", | ||
| "author": "vlt technology inc. <support@vlt.sh> (http://vlt.sh)", | ||
| "tshy": { | ||
| "selfLink": false, | ||
| "liveDev": true, | ||
| "dialects": [ | ||
| "esm" | ||
| ], | ||
| "exports": { | ||
| "./package.json": "./package.json", | ||
| ".": "./src/index.ts", | ||
| "./pool": "./src/pool.ts", | ||
| "./unpack": "./src/unpack.ts", | ||
| "./unpack-request": "./src/unpack-request.ts" | ||
| } | ||
| }, | ||
| "dependencies": { | ||
| "rimraf": "^6.1.2", | ||
| "tar": "^7.5.2", | ||
| "@vltpkg/error-cause": "1.0.0-rc.10", | ||
| "@vltpkg/types": "1.0.0-rc.10" | ||
| "@vltpkg/error-cause": "1.0.0-rc.11", | ||
| "@vltpkg/types": "1.0.0-rc.11" | ||
| }, | ||
@@ -40,3 +26,2 @@ "devDependencies": { | ||
| "tap": "^21.5.0", | ||
| "tshy": "^3.1.0", | ||
| "typedoc": "~0.27.9", | ||
@@ -55,3 +40,3 @@ "typescript": "5.7.3", | ||
| "prettier": "../../.prettierrc.js", | ||
| "module": "./dist/esm/index.js", | ||
| "module": "./dist/index.js", | ||
| "type": "module", | ||
@@ -62,4 +47,3 @@ "exports": { | ||
| "import": { | ||
| "types": "./dist/esm/index.d.ts", | ||
| "default": "./dist/esm/index.js" | ||
| "default": "./dist/index.js" | ||
| } | ||
@@ -69,4 +53,3 @@ }, | ||
| "import": { | ||
| "types": "./dist/esm/pool.d.ts", | ||
| "default": "./dist/esm/pool.js" | ||
| "default": "./dist/pool.js" | ||
| } | ||
@@ -76,4 +59,3 @@ }, | ||
| "import": { | ||
| "types": "./dist/esm/unpack.d.ts", | ||
| "default": "./dist/esm/unpack.js" | ||
| "default": "./dist/unpack.js" | ||
| } | ||
@@ -83,4 +65,3 @@ }, | ||
| "import": { | ||
| "types": "./dist/esm/unpack-request.d.ts", | ||
| "default": "./dist/esm/unpack-request.js" | ||
| "default": "./dist/unpack-request.js" | ||
| } | ||
@@ -102,5 +83,4 @@ } | ||
| "posttest": "tsc --noEmit", | ||
| "tshy": "tshy", | ||
| "typecheck": "tsc --noEmit" | ||
| } | ||
| } |
| export declare const findTarDir: (path: string | undefined, tarDir?: string) => string | undefined; | ||
| //# sourceMappingURL=find-tar-dir.d.ts.map |
| {"version":3,"file":"find-tar-dir.d.ts","sourceRoot":"","sources":["../../src/find-tar-dir.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,UAAU,SACf,MAAM,GAAG,SAAS,WACf,MAAM,uBAkBhB,CAAA"} |
| // usually this will be 'package/', but could also be anything | ||
| // eg, github tarballs are ${user}-${project}-${committish} | ||
| // if it starts with `./` then all entries must as well. | ||
| export const findTarDir = (path, tarDir) => { | ||
| if (tarDir !== undefined) | ||
| return tarDir; | ||
| if (!path) | ||
| return undefined; | ||
| const i = path.indexOf('/', path.startsWith('./') ? 2 : 0); | ||
| if (i === -1) | ||
| return undefined; | ||
| const chomp = path.substring(0, i); | ||
| if (chomp === '.' || | ||
| chomp === '..' || | ||
| chomp === '' || | ||
| chomp === './.' || | ||
| chomp === './..' || | ||
| chomp === './') { | ||
| return undefined; | ||
| } | ||
| return chomp + '/'; | ||
| }; | ||
| //# sourceMappingURL=find-tar-dir.js.map |
| {"version":3,"file":"find-tar-dir.js","sourceRoot":"","sources":["../../src/find-tar-dir.ts"],"names":[],"mappings":"AAAA,8DAA8D;AAC9D,2DAA2D;AAC3D,wDAAwD;AACxD,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,IAAwB,EACxB,MAAe,EACf,EAAE;IACF,IAAI,MAAM,KAAK,SAAS;QAAE,OAAO,MAAM,CAAA;IACvC,IAAI,CAAC,IAAI;QAAE,OAAO,SAAS,CAAA;IAC3B,MAAM,CAAC,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,IAAI,CAAC,KAAK,CAAC,CAAC;QAAE,OAAO,SAAS,CAAA;IAC9B,MAAM,KAAK,GAAG,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAClC,IACE,KAAK,KAAK,GAAG;QACb,KAAK,KAAK,IAAI;QACd,KAAK,KAAK,EAAE;QACZ,KAAK,KAAK,KAAK;QACf,KAAK,KAAK,MAAM;QAChB,KAAK,KAAK,IAAI,EACd,CAAC;QACD,OAAO,SAAS,CAAA;IAClB,CAAC;IACD,OAAO,KAAK,GAAG,GAAG,CAAA;AACpB,CAAC,CAAA","sourcesContent":["// usually this will be 'package/', but could also be anything\n// eg, github tarballs are ${user}-${project}-${committish}\n// if it starts with `./` then all entries must as well.\nexport const findTarDir = (\n path: string | undefined,\n tarDir?: string,\n) => {\n if (tarDir !== undefined) return tarDir\n if (!path) return undefined\n const i = path.indexOf('/', path.startsWith('./') ? 2 : 0)\n if (i === -1) return undefined\n const chomp = path.substring(0, i)\n if (\n chomp === '.' ||\n chomp === '..' ||\n chomp === '' ||\n chomp === './.' ||\n chomp === './..' ||\n chomp === './'\n ) {\n return undefined\n }\n return chomp + '/'\n}\n"]} |
| export * from './unpack.ts'; | ||
| export * from './pool.ts'; | ||
| export * from './unpack-request.ts'; | ||
| //# sourceMappingURL=index.d.ts.map |
| {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA"} |
| export * from "./unpack.js"; | ||
| export * from "./pool.js"; | ||
| export * from "./unpack-request.js"; | ||
| //# sourceMappingURL=index.js.map |
| {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,aAAa,CAAA;AAC3B,cAAc,WAAW,CAAA;AACzB,cAAc,qBAAqB,CAAA","sourcesContent":["export * from './unpack.ts'\nexport * from './pool.ts'\nexport * from './unpack-request.ts'\n"]} |
| { | ||
| "type": "module" | ||
| } |
| import { UnpackRequest } from './unpack-request.ts'; | ||
| import { Worker } from './worker.ts'; | ||
| export * from './worker.ts'; | ||
| /** | ||
| * Automatically expanding/contracting set of workers to maximize parallelism | ||
| * of unpack operations up to 1 less than the number of CPUs (or 1). | ||
| * | ||
| * `pool.unpack(tarData, target)` will perform the unpack operation | ||
| * synchronously, in one of these workers, and returns a promise when the | ||
| * worker has confirmed completion of the task. | ||
| */ | ||
| export declare class Pool { | ||
| #private; | ||
| /** | ||
| * Number of workers to emplly. Defaults to 1 less than the number of | ||
| * CPUs, or 1. | ||
| */ | ||
| jobs: number; | ||
| /** | ||
| * Set of currently active worker threads | ||
| */ | ||
| workers: Set<Worker>; | ||
| /** | ||
| * Queue of requests awaiting an available worker | ||
| */ | ||
| queue: UnpackRequest[]; | ||
| /** | ||
| * Requests that have been assigned to a worker, but have not yet | ||
| * been confirmed completed. | ||
| */ | ||
| pending: Map<number, UnpackRequest>; | ||
| /** | ||
| * Provide the tardata to be unpacked, and the location where it's to be | ||
| * placed. Will create a new worker up to the `jobs` value, and then start | ||
| * pushing in the queue for workers to pick up as they become available. | ||
| * | ||
| * Returned promise resolves when the provided tarball has been extracted. | ||
| */ | ||
| unpack(tarData: Buffer, target: string): Promise<void>; | ||
| } | ||
| //# sourceMappingURL=pool.d.ts.map |
| {"version":3,"file":"pool.d.ts","sourceRoot":"","sources":["../../src/pool.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAgB,MAAM,EAAE,MAAM,aAAa,CAAA;AAGlD,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,qBAAa,IAAI;;IACf;;;OAGG;IAEH,IAAI,EAAE,MAAM,CAAmD;IAC/D;;OAEG;IACH,OAAO,cAAoB;IAC3B;;OAEG;IACH,KAAK,EAAE,aAAa,EAAE,CAAK;IAC3B;;;OAGG;IACH,OAAO,6BAAmC;IA0C1C;;;;;;OAMG;IACG,MAAM,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAU7C"} |
| import { error } from '@vltpkg/error-cause'; | ||
| import { asError } from '@vltpkg/types'; | ||
| import os from 'node:os'; | ||
| import { UnpackRequest } from "./unpack-request.js"; | ||
| import { isResponseOK, Worker } from "./worker.js"; | ||
| export * from "./worker.js"; | ||
| /** | ||
| * Automatically expanding/contracting set of workers to maximize parallelism | ||
| * of unpack operations up to 1 less than the number of CPUs (or 1). | ||
| * | ||
| * `pool.unpack(tarData, target)` will perform the unpack operation | ||
| * synchronously, in one of these workers, and returns a promise when the | ||
| * worker has confirmed completion of the task. | ||
| */ | ||
| export class Pool { | ||
| /** | ||
| * Number of workers to emplly. Defaults to 1 less than the number of | ||
| * CPUs, or 1. | ||
| */ | ||
| /* c8 ignore next */ | ||
| jobs = 8 * (Math.max(os.availableParallelism(), 2) - 1); | ||
| /** | ||
| * Set of currently active worker threads | ||
| */ | ||
| workers = new Set(); | ||
| /** | ||
| * Queue of requests awaiting an available worker | ||
| */ | ||
| queue = []; | ||
| /** | ||
| * Requests that have been assigned to a worker, but have not yet | ||
| * been confirmed completed. | ||
| */ | ||
| pending = new Map(); | ||
| // handle a message from the worker | ||
| #onMessage(w, m) { | ||
| const { id } = m; | ||
| // a request has been met or failed, report and either | ||
| // pick up the next item in the queue, or terminate worker | ||
| const ur = this.pending.get(id); | ||
| /* c8 ignore next */ | ||
| if (!ur) | ||
| return; | ||
| if (isResponseOK(m)) { | ||
| ur.resolve(); | ||
| /* c8 ignore start - nearly impossible in normal circumstances */ | ||
| } | ||
| else { | ||
| ur.reject(error(asError(m.error, 'failed without error message').message, { | ||
| found: m, | ||
| cause: m.error, | ||
| })); | ||
| } | ||
| /* c8 ignore stop */ | ||
| const next = this.queue.shift(); | ||
| if (!next) { | ||
| this.workers.delete(w); | ||
| } | ||
| else { | ||
| void w.process(next); | ||
| } | ||
| } | ||
| // create a new worker | ||
| #createWorker(req) { | ||
| const w = new Worker((m) => this.#onMessage(w, m)); | ||
| this.workers.add(w); | ||
| void w.process(req); | ||
| } | ||
| /** | ||
| * Provide the tardata to be unpacked, and the location where it's to be | ||
| * placed. Will create a new worker up to the `jobs` value, and then start | ||
| * pushing in the queue for workers to pick up as they become available. | ||
| * | ||
| * Returned promise resolves when the provided tarball has been extracted. | ||
| */ | ||
| async unpack(tarData, target) { | ||
| const ur = new UnpackRequest(tarData, target); | ||
| this.pending.set(ur.id, ur); | ||
| if (this.workers.size < this.jobs) { | ||
| this.#createWorker(ur); | ||
| } | ||
| else { | ||
| this.queue.push(ur); | ||
| } | ||
| return ur.promise; | ||
| } | ||
| } | ||
| //# sourceMappingURL=pool.js.map |
| {"version":3,"file":"pool.js","sourceRoot":"","sources":["../../src/pool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAA;AAC3C,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,MAAM,SAAS,CAAA;AACxB,OAAO,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AACnD,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAGlD,cAAc,aAAa,CAAA;AAE3B;;;;;;;GAOG;AACH,MAAM,OAAO,IAAI;IACf;;;OAGG;IACH,oBAAoB;IACpB,IAAI,GAAW,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,oBAAoB,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;IAC/D;;OAEG;IACH,OAAO,GAAG,IAAI,GAAG,EAAU,CAAA;IAC3B;;OAEG;IACH,KAAK,GAAoB,EAAE,CAAA;IAC3B;;;OAGG;IACH,OAAO,GAAG,IAAI,GAAG,EAAyB,CAAA;IAE1C,mCAAmC;IACnC,UAAU,CAAC,CAAS,EAAE,CAA6B;QACjD,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAA;QAChB,sDAAsD;QACtD,0DAA0D;QAC1D,MAAM,EAAE,GAAG,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAA;QAC/B,oBAAoB;QACpB,IAAI,CAAC,EAAE;YAAE,OAAM;QACf,IAAI,YAAY,CAAC,CAAC,CAAC,EAAE,CAAC;YACpB,EAAE,CAAC,OAAO,EAAE,CAAA;YACZ,iEAAiE;QACnE,CAAC;aAAM,CAAC;YACN,EAAE,CAAC,MAAM,CACP,KAAK,CACH,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,8BAA8B,CAAC,CAAC,OAAO,EACxD;gBACE,KAAK,EAAE,CAAC;gBACR,KAAK,EAAE,CAAC,CAAC,KAAK;aACf,CACF,CACF,CAAA;QACH,CAAC;QACD,oBAAoB;QACpB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,EAAE,CAAA;QAC/B,IAAI,CAAC,IAAI,EAAE,CAAC;YACV,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAA;QACtB,CAAC;IACH,CAAC;IAED,sBAAsB;IACtB,aAAa,CAAC,GAAkB;QAC9B,MAAM,CAAC,GAAW,IAAI,MAAM,CAAC,CAAC,CAA6B,EAAE,EAAE,CAC7D,IAAI,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,CAAC,CACtB,CAAA;QACD,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;QACnB,KAAK,CAAC,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;IACrB,CAAC;IAED;;;;;;OAMG;IACH,KAAK,CAAC,MAAM,CAAC,OAAe,EAAE,MAAc;QAC1C,MAAM,EAAE,GAAG,IAAI,aAAa,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;QAC7C,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;QAC3B,IAAI,IAAI,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,EAAE,CAAC;YAClC,IAAI,CAAC,aAAa,CAAC,EAAE,CAAC,CAAA;QACxB,CAAC;aAAM,CAAC;YACN,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,EAAE,CAAC,CAAA;QACrB,CAAC;QACD,OAAO,EAAE,CAAC,OAAO,CAAA;IACnB,CAAC;CACF","sourcesContent":["import { error } from '@vltpkg/error-cause'\nimport { asError } from '@vltpkg/types'\nimport os from 'node:os'\nimport { UnpackRequest } from './unpack-request.ts'\nimport { isResponseOK, Worker } from './worker.ts'\nimport type { ResponseError, ResponseOK } from './worker.ts'\n\nexport * from './worker.ts'\n\n/**\n * Automatically expanding/contracting set of workers to maximize parallelism\n * of unpack operations up to 1 less than the number of CPUs (or 1).\n *\n * `pool.unpack(tarData, target)` will perform the unpack operation\n * synchronously, in one of these workers, and returns a promise when the\n * worker has confirmed completion of the task.\n */\nexport class Pool {\n /**\n * Number of workers to emplly. Defaults to 1 less than the number of\n * CPUs, or 1.\n */\n /* c8 ignore next */\n jobs: number = 8 * (Math.max(os.availableParallelism(), 2) - 1)\n /**\n * Set of currently active worker threads\n */\n workers = new Set<Worker>()\n /**\n * Queue of requests awaiting an available worker\n */\n queue: UnpackRequest[] = []\n /**\n * Requests that have been assigned to a worker, but have not yet\n * been confirmed completed.\n */\n pending = new Map<number, UnpackRequest>()\n\n // handle a message from the worker\n #onMessage(w: Worker, m: ResponseError | ResponseOK) {\n const { id } = m\n // a request has been met or failed, report and either\n // pick up the next item in the queue, or terminate worker\n const ur = this.pending.get(id)\n /* c8 ignore next */\n if (!ur) return\n if (isResponseOK(m)) {\n ur.resolve()\n /* c8 ignore start - nearly impossible in normal circumstances */\n } else {\n ur.reject(\n error(\n asError(m.error, 'failed without error message').message,\n {\n found: m,\n cause: m.error,\n },\n ),\n )\n }\n /* c8 ignore stop */\n const next = this.queue.shift()\n if (!next) {\n this.workers.delete(w)\n } else {\n void w.process(next)\n }\n }\n\n // create a new worker\n #createWorker(req: UnpackRequest) {\n const w: Worker = new Worker((m: ResponseError | ResponseOK) =>\n this.#onMessage(w, m),\n )\n this.workers.add(w)\n void w.process(req)\n }\n\n /**\n * Provide the tardata to be unpacked, and the location where it's to be\n * placed. Will create a new worker up to the `jobs` value, and then start\n * pushing in the queue for workers to pick up as they become available.\n *\n * Returned promise resolves when the provided tarball has been extracted.\n */\n async unpack(tarData: Buffer, target: string) {\n const ur = new UnpackRequest(tarData, target)\n this.pending.set(ur.id, ur)\n if (this.workers.size < this.jobs) {\n this.#createWorker(ur)\n } else {\n this.queue.push(ur)\n }\n return ur.promise\n }\n}\n"]} |
| export declare class UnpackRequest { | ||
| id: number; | ||
| tarData: Buffer; | ||
| target: string; | ||
| resolve: () => void; | ||
| reject: (reason?: any) => void; | ||
| promise: Promise<void>; | ||
| constructor(tarData: Buffer, target: string); | ||
| } | ||
| //# sourceMappingURL=unpack-request.d.ts.map |
| {"version":3,"file":"unpack-request.d.ts","sourceRoot":"","sources":["../../src/unpack-request.ts"],"names":[],"mappings":"AACA,qBAAa,aAAa;IACxB,EAAE,EAAE,MAAM,CAAO;IACjB,OAAO,EAAE,MAAM,CAAA;IACf,MAAM,EAAE,MAAM,CAAA;IACd,OAAO,EAAG,MAAM,IAAI,CAAA;IACpB,MAAM,EAAG,CAAC,MAAM,CAAC,EAAE,GAAG,KAAK,IAAI,CAAA;IAC/B,OAAO,gBAGL;gBACU,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;CAI5C"} |
| let ID = 1; | ||
| export class UnpackRequest { | ||
| id = ID++; | ||
| tarData; | ||
| target; | ||
| resolve; | ||
| reject; | ||
| promise = new Promise((res, rej) => { | ||
| this.resolve = res; | ||
| this.reject = rej; | ||
| }); | ||
| constructor(tarData, target) { | ||
| this.tarData = tarData; | ||
| this.target = target; | ||
| } | ||
| } | ||
| //# sourceMappingURL=unpack-request.js.map |
| {"version":3,"file":"unpack-request.js","sourceRoot":"","sources":["../../src/unpack-request.ts"],"names":[],"mappings":"AAAA,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,OAAO,aAAa;IACxB,EAAE,GAAW,EAAE,EAAE,CAAA;IACjB,OAAO,CAAQ;IACf,MAAM,CAAQ;IACd,OAAO,CAAa;IACpB,MAAM,CAAyB;IAC/B,OAAO,GAAG,IAAI,OAAO,CAAO,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;QACvC,IAAI,CAAC,OAAO,GAAG,GAAG,CAAA;QAClB,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;IACnB,CAAC,CAAC,CAAA;IACF,YAAY,OAAe,EAAE,MAAc;QACzC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAA;QACtB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;CACF","sourcesContent":["let ID = 1\nexport class UnpackRequest {\n id: number = ID++\n tarData: Buffer\n target: string\n resolve!: () => void\n reject!: (reason?: any) => void\n promise = new Promise<void>((res, rej) => {\n this.resolve = res\n this.reject = rej\n })\n constructor(tarData: Buffer, target: string) {\n this.tarData = tarData\n this.target = target\n }\n}\n"]} |
| export declare const unpack: (tarData: Buffer, target: string) => Promise<void>; | ||
| //# sourceMappingURL=unpack.d.ts.map |
| {"version":3,"file":"unpack.d.ts","sourceRoot":"","sources":["../../src/unpack.ts"],"names":[],"mappings":"AAwFA,eAAO,MAAM,MAAM,YACR,MAAM,UACP,MAAM,KACb,OAAO,CAAC,IAAI,CAMd,CAAA"} |
| import { error } from '@vltpkg/error-cause'; | ||
| import { randomBytes } from 'node:crypto'; | ||
| import { lstat, mkdir, rename, writeFile } from 'node:fs/promises'; | ||
| import { basename, dirname, resolve, sep } from 'node:path'; | ||
| import { rimraf } from 'rimraf'; | ||
| import { Header } from 'tar/header'; | ||
| import { Pax } from 'tar/pax'; | ||
| import { unzip as unzipCB } from 'node:zlib'; | ||
| import { findTarDir } from "./find-tar-dir.js"; | ||
| const unzip = async (input) => new Promise((res, rej) => | ||
| /* c8 ignore start */ | ||
| unzipCB(input, (er, result) => (er ? rej(er) : res(result)))); | ||
| const exists = async (path) => { | ||
| try { | ||
| await lstat(path); | ||
| return true; | ||
| } | ||
| catch { | ||
| return false; | ||
| } | ||
| }; | ||
| let id = 1; | ||
| const tmp = randomBytes(6).toString('hex') + '.'; | ||
| const tmpSuffix = () => tmp + String(id++); | ||
| const checkFs = (h, tarDir, target) => { | ||
| /* c8 ignore start - impossible */ | ||
| if (!h.path) | ||
| return false; | ||
| if (!tarDir) | ||
| return false; | ||
| /* c8 ignore stop */ | ||
| h.path = h.path.replace(/[\\/]+/g, '/'); | ||
| // packages should always be in a 'package' tarDir in the archive | ||
| if (!h.path.startsWith(tarDir)) | ||
| return false; | ||
| // Package root | ||
| const absoluteBasePath = target; | ||
| const itemAbsolutePath = resolve(target, h.path.slice(tarDir.length)); | ||
| if (!itemAbsolutePath.startsWith(absoluteBasePath)) { | ||
| return false; | ||
| } | ||
| return true; | ||
| }; | ||
| const write = async (path, body, executable = false) => { | ||
| await mkdirp(dirname(path)); | ||
| // if the mode is world-executable, then make it executable | ||
| // this is needed for some packages that have a file that is | ||
| // not a declared bin, but still used as a cli executable. | ||
| await writeFile(path, body, { | ||
| mode: executable ? 0o777 : 0o666, | ||
| }); | ||
| }; | ||
| const made = new Set(); | ||
| const making = new Map(); | ||
| const mkdirp = async (d) => { | ||
| if (!made.has(d)) { | ||
| const m = making.get(d) ?? | ||
| mkdir(d, { recursive: true, mode: 0o777 }).then(() => making.delete(d)); | ||
| making.set(d, m); | ||
| await m; | ||
| made.add(d); | ||
| } | ||
| }; | ||
| export const unpack = async (tarData, target) => { | ||
| const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b; | ||
| await unpackUnzipped(isGzip ? await unzip(tarData) : tarData, target); | ||
| }; | ||
| const unpackUnzipped = async (buffer, target) => { | ||
| /* c8 ignore start */ | ||
| const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b; | ||
| if (isGzip) { | ||
| throw error('still gzipped after unzipping', { | ||
| found: isGzip, | ||
| wanted: false, | ||
| }); | ||
| } | ||
| /* c8 ignore stop */ | ||
| // another real quick gutcheck before we get started | ||
| if (buffer.length % 512 !== 0) { | ||
| throw error('Invalid tarball: length not divisible by 512', { | ||
| found: buffer.length, | ||
| }); | ||
| } | ||
| if (buffer.length < 1024) { | ||
| throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.length }); | ||
| } | ||
| // make sure the last kb is all zeros | ||
| for (let i = buffer.length - 1024; i < buffer.length; i++) { | ||
| if (buffer[i] !== 0) { | ||
| throw error('Invalid tarball: not terminated by 1024 null bytes', { found: buffer.subarray(i, i + 10) }); | ||
| } | ||
| } | ||
| const tmp = dirname(target) + sep + '.' + basename(target) + '.' + tmpSuffix(); | ||
| const og = tmp + '.ORIGINAL'; | ||
| await Promise.all([rimraf(tmp), rimraf(og)]); | ||
| let succeeded = false; | ||
| try { | ||
| let tarDir = undefined; | ||
| let offset = 0; | ||
| let h; | ||
| let ex = undefined; | ||
| let gex = undefined; | ||
| while (offset < buffer.length && | ||
| !(h = new Header(buffer, offset, ex, gex)).nullBlock) { | ||
| offset += 512; | ||
| ex = undefined; | ||
| gex = undefined; | ||
| const size = h.size ?? 0; | ||
| const body = buffer.subarray(offset, offset + size); | ||
| // skip invalid headers | ||
| if (!h.cksumValid) | ||
| continue; | ||
| offset += 512 * Math.ceil(size / 512); | ||
| // TODO: tarDir might not be named "package/" | ||
| // find the first tarDir in the first entry, and use that. | ||
| switch (h.type) { | ||
| case 'File': | ||
| if (!tarDir) | ||
| tarDir = findTarDir(h.path, tarDir); | ||
| /* c8 ignore next */ | ||
| if (!tarDir) | ||
| continue; | ||
| if (!checkFs(h, tarDir, tmp)) | ||
| continue; | ||
| await write(resolve(tmp, h.path.substring(tarDir.length)), body, | ||
| // if it's world-executable, it's an executable | ||
| // otherwise, make it read-only. | ||
| 1 === ((h.mode ?? 0x666) & 1)); | ||
| break; | ||
| case 'Directory': | ||
| /* c8 ignore next 2 */ | ||
| if (!tarDir) | ||
| tarDir = findTarDir(h.path, tarDir); | ||
| if (!tarDir) | ||
| continue; | ||
| if (!checkFs(h, tarDir, tmp)) | ||
| continue; | ||
| await mkdirp(resolve(tmp, h.path.substring(tarDir.length))); | ||
| break; | ||
| case 'GlobalExtendedHeader': | ||
| gex = Pax.parse(body.toString(), gex, true); | ||
| break; | ||
| case 'ExtendedHeader': | ||
| case 'OldExtendedHeader': | ||
| ex = Pax.parse(body.toString(), ex, false); | ||
| break; | ||
| case 'NextFileHasLongPath': | ||
| case 'OldGnuLongPath': | ||
| ex ??= Object.create(null); | ||
| ex.path = body.toString().replace(/\0.*/, ''); | ||
| break; | ||
| } | ||
| } | ||
| const targetExists = await exists(target); | ||
| if (targetExists) | ||
| await rename(target, og); | ||
| await rename(tmp, target); | ||
| if (targetExists) | ||
| await rimraf(og); | ||
| succeeded = true; | ||
| } | ||
| finally { | ||
| // do not handle error or obscure throw site, just do the cleanup | ||
| // if it didn't complete successfully. | ||
| if (!succeeded) { | ||
| /* c8 ignore start */ | ||
| if (await exists(og)) { | ||
| await rimraf(target); | ||
| await rename(og, target); | ||
| } | ||
| /* c8 ignore stop */ | ||
| await rimraf(tmp); | ||
| } | ||
| } | ||
| }; | ||
| //# sourceMappingURL=unpack.js.map |
| {"version":3,"file":"unpack.js","sourceRoot":"","sources":["../../src/unpack.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,qBAAqB,CAAA;AAC3C,OAAO,EAAE,WAAW,EAAE,MAAM,aAAa,CAAA;AACzC,OAAO,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;AAClE,OAAO,EAAE,QAAQ,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,EAAE,MAAM,WAAW,CAAA;AAC3D,OAAO,EAAE,MAAM,EAAE,MAAM,QAAQ,CAAA;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,OAAO,EAAE,GAAG,EAAE,MAAM,SAAS,CAAA;AAC7B,OAAO,EAAE,KAAK,IAAI,OAAO,EAAE,MAAM,WAAW,CAAA;AAC5C,OAAO,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAA;AAE9C,MAAM,KAAK,GAAG,KAAK,EAAE,KAAa,EAAE,EAAE,CACpC,IAAI,OAAO,CACT,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;AACX,qBAAqB;AACrB,OAAO,CAAC,KAAK,EAAE,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAE/D,CAAA;AAEH,MAAM,MAAM,GAAG,KAAK,EAAE,IAAY,EAAoB,EAAE;IACtD,IAAI,CAAC;QACH,MAAM,KAAK,CAAC,IAAI,CAAC,CAAA;QACjB,OAAO,IAAI,CAAA;IACb,CAAC;IAAC,MAAM,CAAC;QACP,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,IAAI,EAAE,GAAG,CAAC,CAAA;AACV,MAAM,GAAG,GAAG,WAAW,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,GAAG,CAAA;AAChD,MAAM,SAAS,GAAG,GAAG,EAAE,CAAC,GAAG,GAAG,MAAM,CAAC,EAAE,EAAE,CAAC,CAAA;AAE1C,MAAM,OAAO,GAAG,CACd,CAAS,EACT,MAA0B,EAC1B,MAAc,EACkB,EAAE;IAClC,kCAAkC;IAClC,IAAI,CAAC,CAAC,CAAC,IAAI;QAAE,OAAO,KAAK,CAAA;IACzB,IAAI,CAAC,MAAM;QAAE,OAAO,KAAK,CAAA;IACzB,oBAAoB;IACpB,CAAC,CAAC,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;IAEvC,iEAAiE;IACjE,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC;QAAE,OAAO,KAAK,CAAA;IAE5C,eAAe;IACf,MAAM,gBAAgB,GAAG,MAAM,CAAA;IAC/B,MAAM,gBAAgB,GAAG,OAAO,CAC9B,MAAM,EACN,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAC5B,CAAA;IAED,IAAI,CAAC,gBAAgB,CAAC,UAAU,CAAC,gBAAgB,CAAC,EAAE,CAAC;QACnD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,KAAK,EACjB,IAAY,EACZ,IAAY,EACZ,UAAU,GAAG,KAAK,EAClB,EAAE;IACF,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3B,2DAA2D;IAC3D,4DAA4D;IAC5D,0DAA0D;IAC1D,MAAM,SAAS,CAAC,IAAI,EAAE,IAAI,EAAE;QAC1B,IAAI,EAAE,UAAU,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK;KACjC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,IAAI,GAAG,EAAU,CAAA;AAC9B,MAAM,MAAM,GAAG,IAAI,GAAG,EAA4B,CAAA;AAClD,MAAM,MAAM,GAAG,KAAK,EAAE,CAAS,EAAE,EAAE;IACjC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACjB,MAAM,CAAC,GACL,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;YACb,KAAK,CAAC,CAAC,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,CACnD,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CACjB,CAAA;QACH,MAAM,CAAC,GAAG,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QAChB,MAAM,CAAC,CAAA;QACP,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,CAAA;IACb,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,KAAK,EACzB,OAAe,EACf,MAAc,EACC,EAAE;IACjB,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,OAAO,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACzD,MAAM,cAAc,CAClB,MAAM,CAAC,CAAC,CAAC,MAAM,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,EACvC,MAAM,CACP,CAAA;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,MAAc,EACd,MAAc,EACC,EAAE;IACjB,qBAAqB;IACrB,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,IAAI,CAAA;IACvD,IAAI,MAAM,EAAE,CAAC;QACX,MAAM,KAAK,CAAC,+BAA+B,EAAE;YAC3C,KAAK,EAAE,MAAM;YACb,MAAM,EAAE,KAAK;SACd,CAAC,CAAA;IACJ,CAAC;IACD,oBAAoB;IAEpB,oDAAoD;IACpD,IAAI,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,CAAC,EAAE,CAAC;QAC9B,MAAM,KAAK,CAAC,8CAA8C,EAAE;YAC1D,KAAK,EAAE,MAAM,CAAC,MAAM;SACrB,CAAC,CAAA;IACJ,CAAC;IACD,IAAI,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC;QACzB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,CACzB,CAAA;IACH,CAAC;IACD,qCAAqC;IACrC,KAAK,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,IAAI,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE,CAAC;QAC1D,IAAI,MAAM,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC;YACpB,MAAM,KAAK,CACT,oDAAoD,EACpD,EAAE,KAAK,EAAE,MAAM,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,EAAE,CACtC,CAAA;QACH,CAAC;IACH,CAAC;IAED,MAAM,GAAG,GACP,OAAO,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG,GAAG,SAAS,EAAE,CAAA;IACpE,MAAM,EAAE,GAAG,GAAG,GAAG,WAAW,CAAA;IAC5B,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAE5C,IAAI,SAAS,GAAG,KAAK,CAAA;IACrB,IAAI,CAAC;QACH,IAAI,MAAM,GAAuB,SAAS,CAAA;QAC1C,IAAI,MAAM,GAAG,CAAC,CAAA;QACd,IAAI,CAAS,CAAA;QACb,IAAI,EAAE,GAA2B,SAAS,CAAA;QAC1C,IAAI,GAAG,GAA2B,SAAS,CAAA;QAC3C,OACE,MAAM,GAAG,MAAM,CAAC,MAAM;YACtB,CAAC,CAAC,CAAC,GAAG,IAAI,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,EAAE,GAAG,CAAC,CAAC,CAAC,SAAS,EACpD,CAAC;YACD,MAAM,IAAI,GAAG,CAAA;YACb,EAAE,GAAG,SAAS,CAAA;YACd,GAAG,GAAG,SAAS,CAAA;YACf,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,IAAI,CAAC,CAAA;YACxB,MAAM,IAAI,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC,CAAA;YACnD,uBAAuB;YACvB,IAAI,CAAC,CAAC,CAAC,UAAU;gBAAE,SAAQ;YAC3B,MAAM,IAAI,GAAG,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,GAAG,GAAG,CAAC,CAAA;YAErC,6CAA6C;YAC7C,0DAA0D;YAC1D,QAAQ,CAAC,CAAC,IAAI,EAAE,CAAC;gBACf,KAAK,MAAM;oBACT,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,oBAAoB;oBACpB,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC;wBAAE,SAAQ;oBACtC,MAAM,KAAK,CACT,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,EAC7C,IAAI;oBACJ,+CAA+C;oBAC/C,gCAAgC;oBAChC,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAC9B,CAAA;oBACD,MAAK;gBAEP,KAAK,WAAW;oBACd,sBAAsB;oBACtB,IAAI,CAAC,MAAM;wBAAE,MAAM,GAAG,UAAU,CAAC,CAAC,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;oBAChD,IAAI,CAAC,MAAM;wBAAE,SAAQ;oBACrB,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC;wBAAE,SAAQ;oBACtC,MAAM,MAAM,CAAC,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;oBAC3D,MAAK;gBAEP,KAAK,sBAAsB;oBACzB,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,GAAG,EAAE,IAAI,CAAC,CAAA;oBAC3C,MAAK;gBAEP,KAAK,gBAAgB,CAAC;gBACtB,KAAK,mBAAmB;oBACtB,EAAE,GAAG,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,EAAE,EAAE,KAAK,CAAC,CAAA;oBAC1C,MAAK;gBAEP,KAAK,qBAAqB,CAAC;gBAC3B,KAAK,gBAAgB;oBACnB,EAAE,KAAK,MAAM,CAAC,MAAM,CAAC,IAAI,CAAe,CAAA;oBACxC,EAAE,CAAC,IAAI,GAAG,IAAI,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;oBAC7C,MAAK;YACT,CAAC;QACH,CAAC;QAED,MAAM,YAAY,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;QACzC,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAA;QAC1C,MAAM,MAAM,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;QACzB,IAAI,YAAY;YAAE,MAAM,MAAM,CAAC,EAAE,CAAC,CAAA;QAClC,SAAS,GAAG,IAAI,CAAA;IAClB,CAAC;YAAS,CAAC;QACT,iEAAiE;QACjE,sCAAsC;QACtC,IAAI,CAAC,SAAS,EAAE,CAAC;YACf,qBAAqB;YACrB,IAAI,MAAM,MAAM,CAAC,EAAE,CAAC,EAAE,CAAC;gBACrB,MAAM,MAAM,CAAC,MAAM,CAAC,CAAA;gBACpB,MAAM,MAAM,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;YAC1B,CAAC;YACD,oBAAoB;YACpB,MAAM,MAAM,CAAC,GAAG,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { error } from '@vltpkg/error-cause'\nimport { randomBytes } from 'node:crypto'\nimport { lstat, mkdir, rename, writeFile } from 'node:fs/promises'\nimport { basename, dirname, resolve, sep } from 'node:path'\nimport { rimraf } from 'rimraf'\nimport { Header } from 'tar/header'\nimport type { HeaderData } from 'tar/header'\nimport { Pax } from 'tar/pax'\nimport { unzip as unzipCB } from 'node:zlib'\nimport { findTarDir } from './find-tar-dir.ts'\n\nconst unzip = async (input: Buffer) =>\n new Promise<Buffer>(\n (res, rej) =>\n /* c8 ignore start */\n unzipCB(input, (er, result) => (er ? rej(er) : res(result))),\n /* c8 ignore stop */\n )\n\nconst exists = async (path: string): Promise<boolean> => {\n try {\n await lstat(path)\n return true\n } catch {\n return false\n }\n}\n\nlet id = 1\nconst tmp = randomBytes(6).toString('hex') + '.'\nconst tmpSuffix = () => tmp + String(id++)\n\nconst checkFs = (\n h: Header,\n tarDir: string | undefined,\n target: string,\n): h is Header & { path: string } => {\n /* c8 ignore start - impossible */\n if (!h.path) return false\n if (!tarDir) return false\n /* c8 ignore stop */\n h.path = h.path.replace(/[\\\\/]+/g, '/')\n\n // packages should always be in a 'package' tarDir in the archive\n if (!h.path.startsWith(tarDir)) return false\n\n // Package root\n const absoluteBasePath = target\n const itemAbsolutePath = resolve(\n target,\n h.path.slice(tarDir.length),\n )\n\n if (!itemAbsolutePath.startsWith(absoluteBasePath)) {\n return false\n }\n return true\n}\n\nconst write = async (\n path: string,\n body: Buffer,\n executable = false,\n) => {\n await mkdirp(dirname(path))\n // if the mode is world-executable, then make it executable\n // this is needed for some packages that have a file that is\n // not a declared bin, but still used as a cli executable.\n await writeFile(path, body, {\n mode: executable ? 0o777 : 0o666,\n })\n}\n\nconst made = new Set<string>()\nconst making = new Map<string, Promise<boolean>>()\nconst mkdirp = async (d: string) => {\n if (!made.has(d)) {\n const m =\n making.get(d) ??\n mkdir(d, { recursive: true, mode: 0o777 }).then(() =>\n making.delete(d),\n )\n making.set(d, m)\n await m\n made.add(d)\n }\n}\n\nexport const unpack = async (\n tarData: Buffer,\n target: string,\n): Promise<void> => {\n const isGzip = tarData[0] === 0x1f && tarData[1] === 0x8b\n await unpackUnzipped(\n isGzip ? await unzip(tarData) : tarData,\n target,\n )\n}\n\nconst unpackUnzipped = async (\n buffer: Buffer,\n target: string,\n): Promise<void> => {\n /* c8 ignore start */\n const isGzip = buffer[0] === 0x1f && buffer[1] === 0x8b\n if (isGzip) {\n throw error('still gzipped after unzipping', {\n found: isGzip,\n wanted: false,\n })\n }\n /* c8 ignore stop */\n\n // another real quick gutcheck before we get started\n if (buffer.length % 512 !== 0) {\n throw error('Invalid tarball: length not divisible by 512', {\n found: buffer.length,\n })\n }\n if (buffer.length < 1024) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.length },\n )\n }\n // make sure the last kb is all zeros\n for (let i = buffer.length - 1024; i < buffer.length; i++) {\n if (buffer[i] !== 0) {\n throw error(\n 'Invalid tarball: not terminated by 1024 null bytes',\n { found: buffer.subarray(i, i + 10) },\n )\n }\n }\n\n const tmp =\n dirname(target) + sep + '.' + basename(target) + '.' + tmpSuffix()\n const og = tmp + '.ORIGINAL'\n await Promise.all([rimraf(tmp), rimraf(og)])\n\n let succeeded = false\n try {\n let tarDir: string | undefined = undefined\n let offset = 0\n let h: Header\n let ex: HeaderData | undefined = undefined\n let gex: HeaderData | undefined = undefined\n while (\n offset < buffer.length &&\n !(h = new Header(buffer, offset, ex, gex)).nullBlock\n ) {\n offset += 512\n ex = undefined\n gex = undefined\n const size = h.size ?? 0\n const body = buffer.subarray(offset, offset + size)\n // skip invalid headers\n if (!h.cksumValid) continue\n offset += 512 * Math.ceil(size / 512)\n\n // TODO: tarDir might not be named \"package/\"\n // find the first tarDir in the first entry, and use that.\n switch (h.type) {\n case 'File':\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n /* c8 ignore next */\n if (!tarDir) continue\n if (!checkFs(h, tarDir, tmp)) continue\n await write(\n resolve(tmp, h.path.substring(tarDir.length)),\n body,\n // if it's world-executable, it's an executable\n // otherwise, make it read-only.\n 1 === ((h.mode ?? 0x666) & 1),\n )\n break\n\n case 'Directory':\n /* c8 ignore next 2 */\n if (!tarDir) tarDir = findTarDir(h.path, tarDir)\n if (!tarDir) continue\n if (!checkFs(h, tarDir, tmp)) continue\n await mkdirp(resolve(tmp, h.path.substring(tarDir.length)))\n break\n\n case 'GlobalExtendedHeader':\n gex = Pax.parse(body.toString(), gex, true)\n break\n\n case 'ExtendedHeader':\n case 'OldExtendedHeader':\n ex = Pax.parse(body.toString(), ex, false)\n break\n\n case 'NextFileHasLongPath':\n case 'OldGnuLongPath':\n ex ??= Object.create(null) as HeaderData\n ex.path = body.toString().replace(/\\0.*/, '')\n break\n }\n }\n\n const targetExists = await exists(target)\n if (targetExists) await rename(target, og)\n await rename(tmp, target)\n if (targetExists) await rimraf(og)\n succeeded = true\n } finally {\n // do not handle error or obscure throw site, just do the cleanup\n // if it didn't complete successfully.\n if (!succeeded) {\n /* c8 ignore start */\n if (await exists(og)) {\n await rimraf(target)\n await rename(og, target)\n }\n /* c8 ignore stop */\n await rimraf(tmp)\n }\n }\n}\n"]} |
| import type { UnpackRequest } from './unpack-request.ts'; | ||
| export type ResponseError = { | ||
| id: number; | ||
| error: unknown; | ||
| }; | ||
| export type ResponseOK = { | ||
| id: number; | ||
| ok: true; | ||
| }; | ||
| export declare const isResponseOK: (o: unknown) => o is ResponseOK; | ||
| /** | ||
| * Basically just a queue of unpack requests, | ||
| * to keep them throttled to a reasonable amount of parallelism | ||
| */ | ||
| export declare class Worker { | ||
| onMessage: (m: ResponseError | ResponseOK) => void; | ||
| constructor(onMessage: (m: ResponseError | ResponseOK) => void); | ||
| process(req: UnpackRequest): Promise<void>; | ||
| } | ||
| //# sourceMappingURL=worker.d.ts.map |
| {"version":3,"file":"worker.d.ts","sourceRoot":"","sources":["../../src/worker.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAA;AAGxD,MAAM,MAAM,aAAa,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,KAAK,EAAE,OAAO,CAAA;CAAE,CAAA;AAC1D,MAAM,MAAM,UAAU,GAAG;IAAE,EAAE,EAAE,MAAM,CAAC;IAAC,EAAE,EAAE,IAAI,CAAA;CAAE,CAAA;AAKjD,eAAO,MAAM,YAAY,MAAO,OAAO,KAAG,CAAC,IAAI,UACQ,CAAA;AAEvD;;;GAGG;AACH,qBAAa,MAAM;IACjB,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI,CAAA;gBAEtC,SAAS,EAAE,CAAC,CAAC,EAAE,aAAa,GAAG,UAAU,KAAK,IAAI;IAIxD,OAAO,CAAC,GAAG,EAAE,aAAa;CAWjC"} |
| import { unpack } from "./unpack.js"; | ||
| const isObj = (o) => !!o && typeof o === 'object'; | ||
| export const isResponseOK = (o) => isObj(o) && typeof o.id === 'number' && o.ok === true; | ||
| /** | ||
| * Basically just a queue of unpack requests, | ||
| * to keep them throttled to a reasonable amount of parallelism | ||
| */ | ||
| export class Worker { | ||
| onMessage; | ||
| constructor(onMessage) { | ||
| this.onMessage = onMessage; | ||
| } | ||
| async process(req) { | ||
| const { target, tarData, id } = req; | ||
| try { | ||
| await unpack(tarData, target); | ||
| const m = { id, ok: true }; | ||
| this.onMessage(m); | ||
| } | ||
| catch (error) { | ||
| const m = { id, error }; | ||
| this.onMessage(m); | ||
| } | ||
| } | ||
| } | ||
| //# sourceMappingURL=worker.js.map |
| {"version":3,"file":"worker.js","sourceRoot":"","sources":["../../src/worker.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AAKpC,MAAM,KAAK,GAAG,CAAC,CAAU,EAAgC,EAAE,CACzD,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,KAAK,QAAQ,CAAA;AAE9B,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,CAAU,EAAmB,EAAE,CAC1D,KAAK,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,EAAE,KAAK,QAAQ,IAAI,CAAC,CAAC,EAAE,KAAK,IAAI,CAAA;AAEvD;;;GAGG;AACH,MAAM,OAAO,MAAM;IACjB,SAAS,CAAyC;IAElD,YAAY,SAAkD;QAC5D,IAAI,CAAC,SAAS,GAAG,SAAS,CAAA;IAC5B,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,GAAkB;QAC9B,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,EAAE,EAAE,GAAG,GAAG,CAAA;QACnC,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,MAAM,CAAC,CAAA;YAC7B,MAAM,CAAC,GAAe,EAAE,EAAE,EAAE,EAAE,EAAE,IAAI,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,CAAC,GAAkB,EAAE,EAAE,EAAE,KAAK,EAAE,CAAA;YACtC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,CAAA;QACnB,CAAC;IACH,CAAC;CACF","sourcesContent":["import type { UnpackRequest } from './unpack-request.ts'\nimport { unpack } from './unpack.ts'\n\nexport type ResponseError = { id: number; error: unknown }\nexport type ResponseOK = { id: number; ok: true }\n\nconst isObj = (o: unknown): o is Record<string, unknown> =>\n !!o && typeof o === 'object'\n\nexport const isResponseOK = (o: unknown): o is ResponseOK =>\n isObj(o) && typeof o.id === 'number' && o.ok === true\n\n/**\n * Basically just a queue of unpack requests,\n * to keep them throttled to a reasonable amount of parallelism\n */\nexport class Worker {\n onMessage: (m: ResponseError | ResponseOK) => void\n\n constructor(onMessage: (m: ResponseError | ResponseOK) => void) {\n this.onMessage = onMessage\n }\n\n async process(req: UnpackRequest) {\n const { target, tarData, id } = req\n try {\n await unpack(tarData, target)\n const m: ResponseOK = { id, ok: true }\n this.onMessage(m)\n } catch (error) {\n const m: ResponseError = { id, error }\n this.onMessage(m)\n }\n }\n}\n"]} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
11
-8.33%45744
-1.32%27
-3.57%1
Infinity%+ Added
+ Added
+ Added
+ Added
+ Added
+ Added
- Removed
- Removed
- Removed
- Removed
- Removed
- Removed
Updated