Socket
Socket
Sign inDemoInstall

tar-stream

Package Overview
Dependencies
Maintainers
2
Versions
63
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

tar-stream - npm Package Compare versions

Comparing version 3.0.0 to 3.1.0

constants.js

489

extract.js

@@ -1,36 +0,97 @@

const bl = require('bl')
const { Writable, PassThrough } = require('streamx')
const { Writable, Readable, getStreamError } = require('streamx')
const FIFO = require('fast-fifo')
const b4a = require('b4a')
const headers = require('./headers')
const noop = function () {}
const EMPTY = b4a.alloc(0)
const overflow = function (size) {
size &= 511
return size && 512 - size
}
class BufferList {
constructor () {
this.buffered = 0
this.shifted = 0
this.queue = new FIFO()
const emptyStream = function (self, offset) {
const s = new Source(self, offset)
s.end()
return s
}
this._offset = 0
}
const mixinPax = function (header, pax) {
if (pax.path) header.name = pax.path
if (pax.linkpath) header.linkname = pax.linkpath
if (pax.size) header.size = parseInt(pax.size, 10)
header.pax = pax
return header
push (buffer) {
this.buffered += buffer.byteLength
this.queue.push(buffer)
}
shiftFirst (size) {
return this._buffered === 0 ? null : this._next(size)
}
shift (size) {
if (size > this.buffered) return null
if (size === 0) return EMPTY
let chunk = this._next(size)
if (size === chunk.byteLength) return chunk // likely case
const chunks = [chunk]
while ((size -= chunk.byteLength) > 0) {
chunk = this._next(size)
chunks.push(chunk)
}
return b4a.concat(chunks)
}
_next (size) {
const buf = this.queue.peek()
const rem = buf.byteLength - this._offset
if (size >= rem) {
const sub = this._offset ? buf.subarray(this._offset, buf.byteLength) : buf
this.queue.shift()
this._offset = 0
this.buffered -= rem
this.shifted += rem
return sub
}
this.buffered -= size
this.shifted += size
return buf.subarray(this._offset, (this._offset += size))
}
}
class Source extends PassThrough {
constructor (self, offset) {
class Source extends Readable {
constructor (self, header, offset) {
super()
this.header = header
this.offset = offset
this._parent = self
this.offset = offset
}
_read (cb) {
if (this._parent._stream === this) {
this._parent._update()
}
cb(null)
}
_predestroy () {
this._parent.destroy()
this._parent.destroy(getStreamError(this))
}
_detach () {
if (this._parent._stream === this) {
this._parent._stream = null
this._parent._missing = overflow(this.header.size)
this._parent._update()
}
}
_destroy (cb) {
this._detach()
cb(null)
}
}

@@ -42,14 +103,13 @@

opts = opts || {}
if (!opts) opts = {}
this._buffer = new BufferList()
this._offset = 0
this._buffer = bl()
this._missing = 0
this._partial = false
this._onparse = noop
this._header = null
this._stream = null
this._overflow = null
this._cb = null
this._missing = 0
this._longHeader = false
this._callback = noop
this._locked = false
this._finished = false
this._pax = null

@@ -59,186 +119,278 @@ this._paxGlobal = null

this._gnuLongLinkPath = null
this._filenameEncoding = opts.filenameEncoding || 'utf-8'
this._allowUnknownFormat = !!opts.allowUnknownFormat
this._unlockBound = this._unlock.bind(this)
}
const self = this
const b = self._buffer
_unlock (err) {
this._locked = false
const oncontinue = function () {
self._continue()
if (err) {
this.destroy(err)
this._continueWrite(err)
return
}
const onunlock = function (err) {
self._locked = false
if (err) return self.destroy(err)
if (!self._stream) oncontinue()
this._update()
}
_consumeHeader () {
if (this._locked) return false
this._offset = this._buffer.shifted
try {
this._header = headers.decode(this._buffer.shift(512), this._filenameEncoding, this._allowUnknownFormat)
} catch (err) {
this._continueWrite(err)
return false
}
const onstreamend = function () {
self._stream = null
const drain = overflow(self._header.size)
if (drain) self._parse(drain, ondrain)
else self._parse(512, onheader)
if (!self._locked) oncontinue()
if (!this._header) return true
switch (this._header.type) {
case 'gnu-long-path':
case 'gnu-long-link-path':
case 'pax-global-header':
case 'pax-header':
this._longHeader = true
this._missing = this._header.size
return true
}
const ondrain = function () {
self._buffer.consume(overflow(self._header.size))
self._parse(512, onheader)
oncontinue()
this._locked = true
this._applyLongHeaders()
if (this._header.size === 0 || this._header.type === 'directory') {
const stream = this._createStream()
stream.push(null)
this.emit('entry', this._header, stream, this._unlockBound)
return true
}
const onpaxglobalheader = function () {
const size = self._header.size
self._paxGlobal = headers.decodePax(b.slice(0, size))
b.consume(size)
onstreamend()
this._stream = this._createStream()
this._missing = this._header.size
this.emit('entry', this._header, this._stream, this._unlockBound)
return true
}
_applyLongHeaders () {
if (this._gnuLongPath) {
this._header.name = this._gnuLongPath
this._gnuLongPath = null
}
const onpaxheader = function () {
const size = self._header.size
self._pax = headers.decodePax(b.slice(0, size))
if (self._paxGlobal) self._pax = Object.assign({}, self._paxGlobal, self._pax)
b.consume(size)
onstreamend()
if (this._gnuLongLinkPath) {
this._header.linkname = this._gnuLongLinkPath
this._gnuLongLinkPath = null
}
const ongnulongpath = function () {
const size = self._header.size
this._gnuLongPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
if (this._pax) {
if (this._pax.path) this._header.name = this._pax.path
if (this._pax.linkpath) this._header.linkname = this._pax.linkpath
if (this._pax.size) this._header.size = parseInt(this._pax.size, 10)
this._header.pax = this._pax
this._pax = null
}
}
const ongnulonglinkpath = function () {
const size = self._header.size
this._gnuLongLinkPath = headers.decodeLongPath(b.slice(0, size), opts.filenameEncoding)
b.consume(size)
onstreamend()
_decodeLongHeader (buf) {
switch (this._header.type) {
case 'gnu-long-path':
this._gnuLongPath = headers.decodeLongPath(buf, this._filenameEncoding)
break
case 'gnu-long-link-path':
this._gnuLongLinkPath = headers.decodeLongPath(buf, this._filenameEncoding)
break
case 'pax-global-header':
this._paxGlobal = headers.decodePax(buf)
break
case 'pax-header':
this._pax = this._paxGlobal === null
? headers.decodePax(buf)
: Object.assign({}, this._paxGlobal, headers.decodePax(buf))
break
}
}
const onheader = function () {
const offset = self._offset
let header
try {
header = self._header = headers.decode(b.slice(0, 512), opts.filenameEncoding, opts.allowUnknownFormat)
} catch (err) {
self.destroy(err)
}
b.consume(512)
_consumeLongHeader () {
this._longHeader = false
this._missing = overflow(this._header.size)
if (!header) {
self._parse(512, onheader)
oncontinue()
return
}
const buf = this._buffer.shift(this._header.size)
if (header.type === 'gnu-long-path') {
self._parse(header.size, ongnulongpath)
oncontinue()
return
}
try {
this._decodeLongHeader(buf)
} catch (err) {
this._continueWrite(err)
return false
}
if (header.type === 'gnu-long-link-path') {
self._parse(header.size, ongnulonglinkpath)
oncontinue()
return
}
return true
}
if (header.type === 'pax-global-header') {
self._parse(header.size, onpaxglobalheader)
oncontinue()
return
}
_consumeStream () {
const buf = this._buffer.shiftFirst(this._missing)
if (buf === null) return false
if (header.type === 'pax-header') {
self._parse(header.size, onpaxheader)
oncontinue()
return
}
this._missing -= buf.byteLength
const drained = this._stream.push(buf)
if (self._gnuLongPath) {
header.name = self._gnuLongPath
self._gnuLongPath = null
}
if (this._missing === 0) {
this._stream.push(null)
if (drained) this._stream._detach()
return drained && this._locked === false
}
if (self._gnuLongLinkPath) {
header.linkname = self._gnuLongLinkPath
self._gnuLongLinkPath = null
}
return drained
}
if (self._pax) {
self._header = header = mixinPax(header, self._pax)
self._pax = null
}
_createStream () {
return new Source(this, this._header, this._offset)
}
self._locked = true
_update () {
while (this._buffer.buffered > 0 && !this.destroying) {
if (this._missing > 0) {
if (this._stream !== null) {
if (this._consumeStream() === false) return
continue
}
if (!header.size || header.type === 'directory') {
self._parse(512, onheader)
self.emit('entry', header, emptyStream(self, offset), onunlock)
return
if (this._longHeader === true) {
if (this._missing > this._buffer.buffered) break
if (this._consumeLongHeader() === false) return false
continue
}
const ignore = this._buffer.shiftFirst(this._missing)
if (ignore !== null) this._missing -= ignore.byteLength
continue
}
self._stream = new Source(self, offset)
self.emit('entry', header, self._stream, onunlock)
self._parse(header.size, onstreamend)
oncontinue()
if (this._buffer.buffered < 512) break
if (this._stream !== null || this._consumeHeader() === false) return
}
this._onheader = onheader
this._parse(512, onheader)
this._continueWrite(null)
}
_parse (size, onparse) {
this._offset += size
this._missing = size
if (onparse === this._onheader) this._partial = false
this._onparse = onparse
_continueWrite (err) {
const cb = this._callback
this._callback = noop
cb(err)
}
_continue () {
const cb = this._cb
this._cb = noop
if (this._overflow) this._write(this._overflow, cb)
else cb()
_write (data, cb) {
this._callback = cb
this._buffer.push(data)
this._update()
}
_write (data, cb) {
const s = this._stream
const b = this._buffer
const missing = this._missing
if (data.byteLength) this._partial = true
_final (cb) {
this._finished = this._missing === 0 && this._buffer.buffered === 0
cb(this._finished ? null : new Error('Unexpected end of data'))
}
// we do not reach end-of-chunk now. just forward it
if (data.byteLength < missing) {
this._missing -= data.byteLength
this._overflow = null
if (s) {
if (s.write(data, cb)) cb()
else s.once('drain', cb)
_predestroy () {
this._continueWrite(null)
}
_destroy (cb) {
if (this._stream) this._stream.destroy(getStreamError(this))
cb(null)
}
[Symbol.asyncIterator] () {
let error = null
let promiseResolve = null
let promiseReject = null
let entryStream = null
let entryCallback = null
const extract = this
this.on('entry', onentry)
this.on('error', (err) => { error = err })
this.on('close', onclose)
return {
[Symbol.asyncIterator] () {
return this
},
next () {
return new Promise(onnext)
},
return () {
return destroy(null)
},
throw (err) {
return destroy(err)
}
}
function consumeCallback (err) {
if (!entryCallback) return
const cb = entryCallback
entryCallback = null
cb(err)
}
function onnext (resolve, reject) {
if (error) {
return reject(error)
}
if (entryStream) {
resolve({ value: entryStream, done: false })
entryStream = null
return
}
b.append(data)
return cb()
promiseResolve = resolve
promiseReject = reject
consumeCallback(null)
if (extract._finished && promiseResolve) {
promiseResolve({ value: undefined, done: true })
promiseResolve = promiseReject = null
}
}
// end-of-chunk. the parser should call cb.
this._cb = cb
this._missing = 0
function onentry (header, stream, callback) {
entryCallback = callback
stream.on('error', noop) // no way around this due to tick sillyness
let overflow = null
if (data.byteLength > missing) {
overflow = data.subarray(missing)
data = data.subarray(0, missing)
if (promiseResolve) {
promiseResolve({ value: stream, done: false })
promiseResolve = promiseReject = null
} else {
entryStream = stream
}
}
if (s) s.end(data)
else b.append(data)
function onclose () {
consumeCallback(error)
if (!promiseResolve) return
if (error) promiseReject(error)
else promiseResolve({ value: undefined, done: true })
promiseResolve = promiseReject = null
}
this._overflow = overflow
this._onparse()
function destroy (err) {
extract.destroy(err)
consumeCallback(err)
return new Promise((resolve, reject) => {
if (extract.destroyed) return resolve({ value: undefined, done: true })
extract.once('close', function () {
if (err) reject(err)
else resolve({ value: undefined, done: true })
})
})
}
}
_final (cb) {
cb(this._partial ? new Error('Unexpected end of data') : null)
}
}

@@ -249,1 +401,8 @@

}
function noop () {}
function overflow (size) {
size &= 511
return size && 512 - size
}

@@ -6,6 +6,6 @@ const b4a = require('b4a')

const ZERO_OFFSET = '0'.charCodeAt(0)
const USTAR_MAGIC = b4a.from('ustar\x00', 'binary')
const USTAR_VER = b4a.from('00', 'binary')
const GNU_MAGIC = b4a.from('ustar\x20', 'binary')
const GNU_VER = b4a.from('\x20\x00', 'binary')
const USTAR_MAGIC = b4a.from([0x75, 0x73, 0x74, 0x61, 0x72, 0x00]) // ustar\x00
const USTAR_VER = b4a.from([ZERO_OFFSET, ZERO_OFFSET])
const GNU_MAGIC = b4a.from([0x75, 0x73, 0x74, 0x61, 0x72, 0x20]) // ustar\x20
const GNU_VER = b4a.from([0x20, 0x00])
const MASK = 0o7777

@@ -15,3 +15,149 @@ const MAGIC_OFFSET = 257

const clamp = function (index, len, defaultValue) {
exports.decodeLongPath = function decodeLongPath (buf, encoding) {
return decodeStr(buf, 0, buf.length, encoding)
}
exports.encodePax = function encodePax (opts) { // TODO: encode more stuff in pax
let result = ''
if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
const pax = opts.pax
if (pax) {
for (const key in pax) {
result += addLength(' ' + key + '=' + pax[key] + '\n')
}
}
return b4a.from(result)
}
exports.decodePax = function decodePax (buf) {
const result = {}
while (buf.length) {
let i = 0
while (i < buf.length && buf[i] !== 32) i++
const len = parseInt(buf.subarray(0, i).toString(), 10)
if (!len) return result
const b = b4a.toString(buf.subarray(i + 1, len - 1))
const keyIndex = b.indexOf('=')
if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
buf = buf.subarray(len)
}
return result
}
exports.encode = function encode (opts) {
const buf = b4a.alloc(512)
let name = opts.name
let prefix = ''
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (b4a.byteLength(name) !== name.length) return null // utf-8
while (b4a.byteLength(name) > 100) {
const i = name.indexOf('/')
if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i + 1)
}
if (b4a.byteLength(name) > 100 || b4a.byteLength(prefix) > 155) return null
if (opts.linkname && b4a.byteLength(opts.linkname) > 100) return null
b4a.write(buf, name)
b4a.write(buf, encodeOct(opts.mode & MASK, 6), 100)
b4a.write(buf, encodeOct(opts.uid, 6), 108)
b4a.write(buf, encodeOct(opts.gid, 6), 116)
encodeSize(opts.size, buf, 124)
b4a.write(buf, encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
if (opts.linkname) b4a.write(buf, opts.linkname, 157)
b4a.copy(USTAR_MAGIC, buf, MAGIC_OFFSET)
b4a.copy(USTAR_VER, buf, VERSION_OFFSET)
if (opts.uname) b4a.write(buf, opts.uname, 265)
if (opts.gname) b4a.write(buf, opts.gname, 297)
b4a.write(buf, encodeOct(opts.devmajor || 0, 6), 329)
b4a.write(buf, encodeOct(opts.devminor || 0, 6), 337)
if (prefix) b4a.write(buf, prefix, 345)
b4a.write(buf, encodeOct(cksum(buf), 6), 148)
return buf
}
exports.decode = function decode (buf, filenameEncoding, allowUnknownFormat) {
let typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
let name = decodeStr(buf, 0, 100, filenameEncoding)
const mode = decodeOct(buf, 100, 8)
const uid = decodeOct(buf, 108, 8)
const gid = decodeOct(buf, 116, 8)
const size = decodeOct(buf, 124, 12)
const mtime = decodeOct(buf, 136, 12)
const type = toType(typeflag)
const linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
const uname = decodeStr(buf, 265, 32)
const gname = decodeStr(buf, 297, 32)
const devmajor = decodeOct(buf, 329, 8)
const devminor = decodeOct(buf, 337, 8)
const c = cksum(buf)
// checksum is still initial value if header was null.
if (c === 8 * 32) return null
// valid checksum
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
if (isUSTAR(buf)) {
// ustar (posix) format.
// prepend prefix, if present.
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
} else if (isGNU(buf)) {
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
// multi-volume tarballs.
} else {
if (!allowUnknownFormat) {
throw new Error('Invalid tar header: unknown format.')
}
}
// to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
return {
name,
mode,
uid,
gid,
size,
mtime: new Date(1000 * mtime),
type,
linkname,
uname,
gname,
devmajor,
devminor,
pax: null
}
}
function isUSTAR (buf) {
return b4a.equals(USTAR_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6))
}
function isGNU (buf) {
return b4a.equals(GNU_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6)) &&
b4a.equals(GNU_VER, buf.subarray(VERSION_OFFSET, VERSION_OFFSET + 2))
}
function clamp (index, len, defaultValue) {
if (typeof index !== 'number') return defaultValue

@@ -26,3 +172,3 @@ index = ~~index // Coerce to integer.

const toType = function (flag) {
function toType (flag) {
switch (flag) {

@@ -59,3 +205,3 @@ case 0:

const toTypeflag = function (flag) {
function toTypeflag (flag) {
switch (flag) {

@@ -85,3 +231,3 @@ case 'file':

const indexOf = function (block, num, offset, end) {
function indexOf (block, num, offset, end) {
for (; offset < end; offset++) {

@@ -93,3 +239,3 @@ if (block[offset] === num) return offset

const cksum = function (block) {
function cksum (block) {
let sum = 8 * 32

@@ -101,8 +247,24 @@ for (let i = 0; i < 148; i++) sum += block[i]

const encodeOct = function (val, n) {
function encodeOct (val, n) {
val = val.toString(8)
if (val.length > n) return SEVENS.slice(0, n) + ' '
else return ZEROS.slice(0, n - val.length) + val + ' '
return ZEROS.slice(0, n - val.length) + val + ' '
}
function encodeSizeBin (num, buf, off) {
buf[off] = 0x80
for (let i = 11; i > 0; i--) {
buf[off + i] = num & 0xff
num = Math.floor(num / 0x100)
}
}
function encodeSize (num, buf, off) {
if (num.toString(8).length > 11) {
encodeSizeBin(num, buf, off)
} else {
b4a.write(buf, encodeOct(num, 11), off)
}
}
/* Copied from the node-tar repo and modified to meet

@@ -139,4 +301,4 @@ * tar-stream coding standard.

const decodeOct = function (val, offset, length) {
val = val.slice(offset, offset + length)
function decodeOct (val, offset, length) {
val = val.subarray(offset, offset + length)
offset = 0

@@ -153,11 +315,11 @@

if (end === offset) return 0
return parseInt(val.slice(offset, end).toString(), 8)
return parseInt(val.subarray(offset, end).toString(), 8)
}
}
const decodeStr = function (val, offset, length, encoding) {
return val.slice(offset, indexOf(val, 0, offset, offset + length)).toString(encoding)
function decodeStr (val, offset, length, encoding) {
return b4a.toString(val.subarray(offset, indexOf(val, 0, offset, offset + length)), encoding)
}
const addLength = function (str) {
function addLength (str) {
const len = b4a.byteLength(str)

@@ -169,138 +331,1 @@ let digits = Math.floor(Math.log(len) / Math.log(10)) + 1

}
exports.decodeLongPath = function (buf, encoding) {
return decodeStr(buf, 0, buf.length, encoding)
}
exports.encodePax = function (opts) { // TODO: encode more stuff in pax
let result = ''
if (opts.name) result += addLength(' path=' + opts.name + '\n')
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
const pax = opts.pax
if (pax) {
for (const key in pax) {
result += addLength(' ' + key + '=' + pax[key] + '\n')
}
}
return b4a.from(result)
}
exports.decodePax = function (buf) {
const result = {}
while (buf.length) {
let i = 0
while (i < buf.length && buf[i] !== 32) i++
const len = parseInt(buf.slice(0, i).toString(), 10)
if (!len) return result
const b = buf.slice(i + 1, len - 1).toString()
const keyIndex = b.indexOf('=')
if (keyIndex === -1) return result
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
buf = buf.slice(len)
}
return result
}
exports.encode = function (opts) {
const buf = b4a.alloc(512)
let name = opts.name
let prefix = ''
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
if (b4a.byteLength(name) !== name.length) return null // utf-8
while (b4a.byteLength(name) > 100) {
const i = name.indexOf('/')
if (i === -1) return null
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
name = name.slice(i + 1)
}
if (b4a.byteLength(name) > 100 || b4a.byteLength(prefix) > 155) return null
if (opts.linkname && b4a.byteLength(opts.linkname) > 100) return null
b4a.write(buf, name)
b4a.write(buf, encodeOct(opts.mode & MASK, 6), 100)
b4a.write(buf, encodeOct(opts.uid, 6), 108)
b4a.write(buf, encodeOct(opts.gid, 6), 116)
b4a.write(buf, encodeOct(opts.size, 11), 124)
b4a.write(buf, encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
if (opts.linkname) b4a.write(buf, opts.linkname, 157)
b4a.copy(USTAR_MAGIC, buf, MAGIC_OFFSET)
b4a.copy(USTAR_VER, buf, VERSION_OFFSET)
if (opts.uname) b4a.write(buf, opts.uname, 265)
if (opts.gname) b4a.write(buf, opts.gname, 297)
b4a.write(buf, encodeOct(opts.devmajor || 0, 6), 329)
b4a.write(buf, encodeOct(opts.devminor || 0, 6), 337)
if (prefix) b4a.write(buf, prefix, 345)
b4a.write(buf, encodeOct(cksum(buf), 6), 148)
return buf
}
exports.decode = function (buf, filenameEncoding, allowUnknownFormat) {
let typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
let name = decodeStr(buf, 0, 100, filenameEncoding)
const mode = decodeOct(buf, 100, 8)
const uid = decodeOct(buf, 108, 8)
const gid = decodeOct(buf, 116, 8)
const size = decodeOct(buf, 124, 12)
const mtime = decodeOct(buf, 136, 12)
const type = toType(typeflag)
const linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
const uname = decodeStr(buf, 265, 32)
const gname = decodeStr(buf, 297, 32)
const devmajor = decodeOct(buf, 329, 8)
const devminor = decodeOct(buf, 337, 8)
const c = cksum(buf)
// checksum is still initial value if header was null.
if (c === 8 * 32) return null
// valid checksum
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
if (USTAR_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0) {
// ustar (posix) format.
// prepend prefix, if present.
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
} else if (GNU_MAGIC.compare(buf, MAGIC_OFFSET, MAGIC_OFFSET + 6) === 0 &&
GNU_VER.compare(buf, VERSION_OFFSET, VERSION_OFFSET + 2) === 0) {
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
// multi-volume tarballs.
} else {
if (!allowUnknownFormat) {
throw new Error('Invalid tar header: unknown format.')
}
}
// to support old tar versions that use trailing / to indicate dirs
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
return {
name,
mode,
uid,
gid,
size,
mtime: new Date(1000 * mtime),
type,
linkname,
uname,
gname,
devmajor,
devminor
}
}

@@ -1,6 +0,5 @@

const { constants } = require('fs')
const { Readable, Writable } = require('streamx')
const { StringDecoder } = require('string_decoder')
const { Readable, Writable, getStreamError } = require('streamx')
const b4a = require('b4a')
const constants = require('./constants')
const headers = require('./headers')

@@ -13,52 +12,94 @@

const noop = function () {}
class Sink extends Writable {
constructor (pack, header, callback) {
super({ mapWritable })
const overflow = function (self, size) {
size &= 511
if (size) self.push(END_OF_TAR.subarray(0, 512 - size))
}
this.written = 0
this.header = header
function modeToType (mode) {
switch (mode & constants.S_IFMT) {
case constants.S_IFBLK: return 'block-device'
case constants.S_IFCHR: return 'character-device'
case constants.S_IFDIR: return 'directory'
case constants.S_IFIFO: return 'fifo'
case constants.S_IFLNK: return 'symlink'
this._callback = callback
this._linkname = null
this._isLinkname = header.type === 'symlink' && !header.linkname
this._isVoid = header.type !== 'file' && header.type !== 'contiguous-file'
this._finished = false
this._pack = pack
this._openCallback = null
if (this._pack._stream === null) this._pack._stream = this
else this._pack._pending.push(this)
}
return 'file'
}
_open (cb) {
this._openCallback = cb
if (this._pack._stream === this) this._continueOpen()
}
class Sink extends Writable {
constructor (to) {
super()
this.written = 0
this._to = to
_continueOpen () {
if (this._pack._stream === null) this._pack._stream = this
const cb = this._openCallback
this._openCallback = null
if (cb === null) return
if (this._pack.destroying) return cb(new Error('pack stream destroyed'))
if (this._pack._finalized) return cb(new Error('pack stream is already finalized'))
this._pack._stream = this
if (!this._isLinkname) {
this._pack._encode(this.header)
}
cb(null)
}
_write (data, cb) {
if (this._isLinkname) {
this._linkname = this._linkname ? b4a.concat([this._linkname, data]) : data
return cb(null)
}
if (this._isVoid) {
return cb(new Error('No body allowed for this entry'))
}
this.written += data.byteLength
if (this._to.push(data)) return cb()
this._to._drain = cb
if (this._pack.push(data)) return cb()
this._pack._drain = cb
}
}
class LinkSink extends Writable {
constructor () {
super()
this.linkname = ''
this._decoder = new StringDecoder('utf-8')
_final (cb) {
if (this._isLinkname) {
this.header.linkname = this._linkname ? b4a.toString(this._linkname, 'utf-8') : ''
this._pack._encode(this.header)
}
overflow(this._pack, this.header.size)
if (this.written !== this.header.size) { // corrupting tar
return cb(new Error('Size mismatch'))
}
this._pack._done(this)
this._finished = true
cb(null)
}
_write (data, cb) {
this.linkname += this._decoder.write(data)
cb()
_getError () {
return getStreamError(this) || new Error('tar entry destroyed')
}
}
class Void extends Writable {
_write (data, cb) {
cb(new Error('No body allowed for this entry'))
_predestroy () {
this._pack.destroy(this._getError())
}
_destroy (cb) {
this._pack._done(this)
if (this._finished) this._callback(null)
else this._callback(this._getError())
cb()
}
}

@@ -72,2 +113,3 @@

this._finalizing = false
this._pending = []
this._stream = null

@@ -77,4 +119,3 @@ }

entry (header, buffer, callback) {
if (this._stream) throw new Error('already piping an entry')
if (this._finalized || this.destroyed) return
if (this._finalized || this.destroying) throw new Error('already finalized or destroyed')

@@ -88,4 +129,2 @@ if (typeof buffer === 'function') {

const self = this
if (!header.size || header.type === 'symlink') header.size = 0

@@ -99,55 +138,17 @@ if (!header.type) header.type = modeToType(header.mode)

if (typeof buffer === 'string') buffer = b4a.from(buffer)
const sink = new Sink(this, header, callback)
if (b4a.isBuffer(buffer)) {
header.size = buffer.byteLength
this._encode(header)
const ok = this.push(buffer)
overflow(self, header.size)
if (ok) process.nextTick(callback)
else this._drain = callback
return new Void()
sink.write(buffer)
sink.end()
return sink
}
if (header.type === 'symlink' && !header.linkname) {
const linkSink = new LinkSink()
linkSink
.on('error', function (err) {
self.destroy()
callback(err)
})
.on('close', function () {
header.linkname = linkSink.linkname
self._encode(header)
callback()
})
return linkSink
if (sink._isVoid) {
sink.end()
return sink
}
this._encode(header)
if (header.type !== 'file' && header.type !== 'contiguous-file') {
process.nextTick(callback)
return new Void()
}
const sink = new Sink(this)
sink
.on('error', function (err) {
self._stream = null
self.destroy()
callback(err)
})
.on('close', function () {
self._stream = null
if (sink.written !== header.size) { // corrupting tar
}
overflow(self, header.size)
if (self._finalizing) { self.finalize() }
callback()
})
this._stream = sink
return sink

@@ -157,3 +158,3 @@ }

finalize () {
if (this._stream) {
if (this._stream || this._pending.length > 0) {
this._finalizing = true

@@ -165,2 +166,3 @@ return

this._finalized = true
this.push(END_OF_TAR)

@@ -170,2 +172,11 @@ this.push(null)

_done (stream) {
if (stream !== this._stream) return
this._stream = null
if (this._finalizing) this.finalize()
if (this._pending.length) this._pending.shift()._continueOpen()
}
_encode (header) {

@@ -213,6 +224,24 @@ if (!header.pax) {

_read (cb) {
_doDrain () {
const drain = this._drain
this._drain = noop
drain()
}
_predestroy () {
const err = getStreamError(this)
if (this._stream) this._stream.destroy(err)
while (this._pending.length) {
const stream = this._pending.shift()
stream.destroy(err)
stream._continueOpen()
}
this._doDrain()
}
_read (cb) {
this._doDrain()
cb()

@@ -225,1 +254,24 @@ }

}
function modeToType (mode) {
switch (mode & constants.S_IFMT) {
case constants.S_IFBLK: return 'block-device'
case constants.S_IFCHR: return 'character-device'
case constants.S_IFDIR: return 'directory'
case constants.S_IFIFO: return 'fifo'
case constants.S_IFLNK: return 'symlink'
}
return 'file'
}
function noop () {}
function overflow (self, size) {
size &= 511
if (size) self.push(END_OF_TAR.subarray(0, 512 - size))
}
function mapWritable (buf) {
return b4a.isBuffer(buf) ? buf : b4a.from(buf)
}
{
"name": "tar-stream",
"version": "3.0.0",
"version": "3.1.0",
"description": "tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",

@@ -9,2 +9,5 @@ "main": "index.js",

],
"browser": {
"fs": false
},
"scripts": {

@@ -24,11 +27,10 @@ "test": "standard && brittle test/*.js"

"dependencies": {
"b4a": "^1.6.1",
"bl": "^6.0.0",
"streamx": "^2.12.5"
"b4a": "^1.6.4",
"streamx": "^2.15.0"
},
"devDependencies": {
"brittle": "^3.1.1",
"brittle": "^3.3.2",
"concat-stream": "^2.0.0",
"standard": "^17.0.0"
"standard": "^17.0.1"
}
}

@@ -30,4 +30,4 @@ # tar-stream

``` js
var tar = require('tar-stream')
var pack = tar.pack() // pack is a stream
const tar = require('tar-stream')
const pack = tar.pack() // pack is a stream

@@ -38,3 +38,3 @@ // add a file called my-test.txt with the content "Hello World!"

// add a file called my-stream-test.txt from a stream
var entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
const entry = pack.entry({ name: 'my-stream-test.txt', size: 11 }, function(err) {
// the stream was added

@@ -59,5 +59,5 @@ // no more entries

``` js
var extract = tar.extract()
const extract = tar.extract()
extract.on('entry', function(header, stream, next) {
extract.on('entry', function (header, stream, next) {
// header is the tar header

@@ -67,3 +67,3 @@ // stream is the content body (might be an empty stream)

stream.on('end', function() {
stream.on('end', function () {
next() // ready for next entry

@@ -75,3 +75,3 @@ })

extract.on('finish', function() {
extract.on('finish', function () {
// all entries read

@@ -85,2 +85,17 @@ })

## Extracting as an async iterator
The extraction stream in addition to being a writable stream is also an async iterator
``` js
const extract = tar.extract()
someStream.pipe(extract)
for await (const entry of extract) {
entry.header // the tar header
entry.resume() // the entry is the stream also
}
```
## Headers

@@ -115,7 +130,7 @@

``` js
var extract = tar.extract()
var pack = tar.pack()
var path = require('path')
const extract = tar.extract()
const pack = tar.pack()
const path = require('path')
extract.on('entry', function(header, stream, callback) {
extract.on('entry', function (header, stream, callback) {
// let's prefix all names with 'tmp'

@@ -127,3 +142,3 @@ header.name = path.join('tmp', header.name)

extract.on('finish', function() {
extract.on('finish', function () {
// all entries done - lets finalize it

@@ -144,11 +159,11 @@ pack.finalize()

``` js
var fs = require('fs')
var tar = require('tar-stream')
const fs = require('fs')
const tar = require('tar-stream')
var pack = tar.pack() // pack is a stream
var path = 'YourTarBall.tar'
var yourTarball = fs.createWriteStream(path)
const pack = tar.pack() // pack is a stream
const path = 'YourTarBall.tar'
const yourTarball = fs.createWriteStream(path)
// add a file called YourFile.txt with the content "Hello World!"
pack.entry({name: 'YourFile.txt'}, 'Hello World!', function (err) {
pack.entry({ name: 'YourFile.txt' }, 'Hello World!', function (err) {
if (err) throw err

@@ -155,0 +170,0 @@ pack.finalize()

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc