Comparing version 6.1.11 to 6.1.12
@@ -12,10 +12,13 @@ 'use strict' | ||
module.exports = (opt_, files, cb) => { | ||
if (typeof files === 'function') | ||
if (typeof files === 'function') { | ||
cb = files | ||
} | ||
if (Array.isArray(opt_)) | ||
if (Array.isArray(opt_)) { | ||
files = opt_, opt_ = {} | ||
} | ||
if (!files || !Array.isArray(files) || !files.length) | ||
if (!files || !Array.isArray(files) || !files.length) { | ||
throw new TypeError('no files or directories specified') | ||
} | ||
@@ -26,7 +29,9 @@ files = Array.from(files) | ||
if (opt.sync && typeof cb === 'function') | ||
if (opt.sync && typeof cb === 'function') { | ||
throw new TypeError('callback not supported for sync tar functions') | ||
} | ||
if (!opt.file && typeof cb === 'function') | ||
if (!opt.file && typeof cb === 'function') { | ||
throw new TypeError('callback only supported with file option') | ||
} | ||
@@ -70,3 +75,3 @@ return opt.file && opt.sync ? createFileSync(opt, files) | ||
t({ | ||
file: path.resolve(p.cwd, file.substr(1)), | ||
file: path.resolve(p.cwd, file.slice(1)), | ||
sync: true, | ||
@@ -76,4 +81,5 @@ noResume: true, | ||
}) | ||
} else | ||
} else { | ||
p.add(file) | ||
} | ||
}) | ||
@@ -88,8 +94,9 @@ p.end() | ||
return t({ | ||
file: path.resolve(p.cwd, file.substr(1)), | ||
file: path.resolve(p.cwd, file.slice(1)), | ||
noResume: true, | ||
onentry: entry => p.add(entry), | ||
}).then(_ => addFilesAsync(p, files)) | ||
} else | ||
} else { | ||
p.add(file) | ||
} | ||
} | ||
@@ -96,0 +103,0 @@ p.end() |
@@ -12,25 +12,31 @@ 'use strict' | ||
module.exports = (opt_, files, cb) => { | ||
if (typeof opt_ === 'function') | ||
if (typeof opt_ === 'function') { | ||
cb = opt_, files = null, opt_ = {} | ||
else if (Array.isArray(opt_)) | ||
} else if (Array.isArray(opt_)) { | ||
files = opt_, opt_ = {} | ||
} | ||
if (typeof files === 'function') | ||
if (typeof files === 'function') { | ||
cb = files, files = null | ||
} | ||
if (!files) | ||
if (!files) { | ||
files = [] | ||
else | ||
} else { | ||
files = Array.from(files) | ||
} | ||
const opt = hlo(opt_) | ||
if (opt.sync && typeof cb === 'function') | ||
if (opt.sync && typeof cb === 'function') { | ||
throw new TypeError('callback not supported for sync tar functions') | ||
} | ||
if (!opt.file && typeof cb === 'function') | ||
if (!opt.file && typeof cb === 'function') { | ||
throw new TypeError('callback only supported with file option') | ||
} | ||
if (files.length) | ||
if (files.length) { | ||
filesFilter(opt, files) | ||
} | ||
@@ -91,5 +97,5 @@ return opt.file && opt.sync ? extractFileSync(opt) | ||
fs.stat(file, (er, stat) => { | ||
if (er) | ||
if (er) { | ||
reject(er) | ||
else { | ||
} else { | ||
const stream = new fsm.ReadStream(file, { | ||
@@ -96,0 +102,0 @@ readSize: readSize, |
@@ -37,14 +37,17 @@ 'use strict' | ||
if (Buffer.isBuffer(data)) | ||
if (Buffer.isBuffer(data)) { | ||
this.decode(data, off || 0, ex, gex) | ||
else if (data) | ||
} else if (data) { | ||
this.set(data) | ||
} | ||
} | ||
decode (buf, off, ex, gex) { | ||
if (!off) | ||
if (!off) { | ||
off = 0 | ||
} | ||
if (!buf || !(buf.length >= off + 512)) | ||
if (!buf || !(buf.length >= off + 512)) { | ||
throw new Error('need 512 bytes for header') | ||
} | ||
@@ -66,6 +69,8 @@ this.path = decString(buf, off, 100) | ||
this[TYPE] = decString(buf, off + 156, 1) | ||
if (this[TYPE] === '') | ||
if (this[TYPE] === '') { | ||
this[TYPE] = '0' | ||
if (this[TYPE] === '0' && this.path.substr(-1) === '/') | ||
} | ||
if (this[TYPE] === '0' && this.path.slice(-1) === '/') { | ||
this[TYPE] = '5' | ||
} | ||
@@ -77,4 +82,5 @@ // tar implementations sometimes incorrectly put the stat(dir).size | ||
// it anyway, and it'll just be a warning about an invalid header. | ||
if (this[TYPE] === '5') | ||
if (this[TYPE] === '5') { | ||
this.size = 0 | ||
} | ||
@@ -93,4 +99,5 @@ this.linkpath = decString(buf, off + 157, 100) | ||
const prefix = decString(buf, off + 345, 130) | ||
if (prefix) | ||
if (prefix) { | ||
this.path = prefix + '/' + this.path | ||
} | ||
this.atime = decDate(buf, off + 476, 12) | ||
@@ -102,11 +109,14 @@ this.ctime = decDate(buf, off + 488, 12) | ||
let sum = 8 * 0x20 | ||
for (let i = off; i < off + 148; i++) | ||
for (let i = off; i < off + 148; i++) { | ||
sum += buf[i] | ||
} | ||
for (let i = off + 156; i < off + 512; i++) | ||
for (let i = off + 156; i < off + 512; i++) { | ||
sum += buf[i] | ||
} | ||
this.cksumValid = sum === this.cksum | ||
if (this.cksum === null && sum === 8 * 0x20) | ||
if (this.cksum === null && sum === 8 * 0x20) { | ||
this.nullBlock = true | ||
} | ||
} | ||
@@ -119,4 +129,5 @@ | ||
if (ex[k] !== null && ex[k] !== undefined && | ||
!(global && k === 'path')) | ||
!(global && k === 'path')) { | ||
this[k] = ex[k] | ||
} | ||
} | ||
@@ -131,7 +142,9 @@ } | ||
if (!off) | ||
if (!off) { | ||
off = 0 | ||
} | ||
if (!(buf.length >= off + 512)) | ||
if (!(buf.length >= off + 512)) { | ||
throw new Error('need 512 bytes for header') | ||
} | ||
@@ -158,5 +171,5 @@ const prefixSize = this.ctime || this.atime ? 130 : 155 | ||
this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax | ||
if (buf[off + 475] !== 0) | ||
if (buf[off + 475] !== 0) { | ||
this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax | ||
else { | ||
} else { | ||
this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax | ||
@@ -168,7 +181,9 @@ this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax | ||
let sum = 8 * 0x20 | ||
for (let i = off; i < off + 148; i++) | ||
for (let i = off; i < off + 148; i++) { | ||
sum += buf[i] | ||
} | ||
for (let i = off + 156; i < off + 512; i++) | ||
for (let i = off + 156; i < off + 512; i++) { | ||
sum += buf[i] | ||
} | ||
@@ -184,4 +199,5 @@ this.cksum = sum | ||
for (const i in data) { | ||
if (data[i] !== null && data[i] !== undefined) | ||
if (data[i] !== null && data[i] !== undefined) { | ||
this[i] = data[i] | ||
} | ||
} | ||
@@ -199,6 +215,7 @@ } | ||
set type (type) { | ||
if (types.code.has(type)) | ||
if (types.code.has(type)) { | ||
this[TYPE] = types.code.get(type) | ||
else | ||
} else { | ||
this[TYPE] = type | ||
} | ||
} | ||
@@ -214,5 +231,5 @@ } | ||
if (Buffer.byteLength(pp) < pathSize) | ||
if (Buffer.byteLength(pp) < pathSize) { | ||
ret = [pp, prefix, false] | ||
else { | ||
} else { | ||
// first set prefix to the dir, and path to the base | ||
@@ -223,13 +240,11 @@ prefix = pathModule.dirname(pp) | ||
do { | ||
// both fit! | ||
if (Buffer.byteLength(pp) <= pathSize && | ||
Buffer.byteLength(prefix) <= prefixSize) | ||
Buffer.byteLength(prefix) <= prefixSize) { | ||
// both fit! | ||
ret = [pp, prefix, false] | ||
// prefix fits in prefix, but path doesn't fit in path | ||
else if (Buffer.byteLength(pp) > pathSize && | ||
Buffer.byteLength(prefix) <= prefixSize) | ||
ret = [pp.substr(0, pathSize - 1), prefix, true] | ||
else { | ||
} else if (Buffer.byteLength(pp) > pathSize && | ||
Buffer.byteLength(prefix) <= prefixSize) { | ||
// prefix fits in prefix, but path doesn't fit in path | ||
ret = [pp.slice(0, pathSize - 1), prefix, true] | ||
} else { | ||
// make path take a bit from prefix | ||
@@ -242,4 +257,5 @@ pp = pathModule.join(pathModule.basename(prefix), pp) | ||
// at this point, found no resolution, just truncate | ||
if (!ret) | ||
ret = [p.substr(0, pathSize - 1), '', true] | ||
if (!ret) { | ||
ret = [p.slice(0, pathSize - 1), '', true] | ||
} | ||
} | ||
@@ -246,0 +262,0 @@ return ret |
@@ -6,10 +6,11 @@ 'use strict' | ||
const encode = (num, buf) => { | ||
if (!Number.isSafeInteger(num)) | ||
// The number is so large that javascript cannot represent it with integer | ||
// precision. | ||
if (!Number.isSafeInteger(num)) { | ||
// The number is so large that javascript cannot represent it with integer | ||
// precision. | ||
throw Error('cannot encode number outside of javascript safe integer range') | ||
else if (num < 0) | ||
} else if (num < 0) { | ||
encodeNegative(num, buf) | ||
else | ||
} else { | ||
encodePositive(num, buf) | ||
} | ||
return buf | ||
@@ -34,7 +35,7 @@ } | ||
num = Math.floor(num / 0x100) | ||
if (flipped) | ||
if (flipped) { | ||
buf[i - 1] = onesComp(byte) | ||
else if (byte === 0) | ||
} else if (byte === 0) { | ||
buf[i - 1] = 0 | ||
else { | ||
} else { | ||
flipped = true | ||
@@ -51,9 +52,11 @@ buf[i - 1] = twosComp(byte) | ||
: null | ||
if (value === null) | ||
if (value === null) { | ||
throw Error('invalid base256 encoding') | ||
} | ||
if (!Number.isSafeInteger(value)) | ||
// The number is so large that javascript cannot represent it with integer | ||
// precision. | ||
if (!Number.isSafeInteger(value)) { | ||
// The number is so large that javascript cannot represent it with integer | ||
// precision. | ||
throw Error('parsed number outside of javascript safe integer range') | ||
} | ||
@@ -70,12 +73,13 @@ return value | ||
var f | ||
if (flipped) | ||
if (flipped) { | ||
f = onesComp(byte) | ||
else if (byte === 0) | ||
} else if (byte === 0) { | ||
f = byte | ||
else { | ||
} else { | ||
flipped = true | ||
f = twosComp(byte) | ||
} | ||
if (f !== 0) | ||
if (f !== 0) { | ||
sum -= f * Math.pow(256, len - i - 1) | ||
} | ||
} | ||
@@ -90,4 +94,5 @@ return sum | ||
var byte = buf[i] | ||
if (byte !== 0) | ||
if (byte !== 0) { | ||
sum += byte * Math.pow(256, len - i - 1) | ||
} | ||
} | ||
@@ -94,0 +99,0 @@ return sum |
@@ -15,28 +15,35 @@ 'use strict' | ||
module.exports = (opt_, files, cb) => { | ||
if (typeof opt_ === 'function') | ||
if (typeof opt_ === 'function') { | ||
cb = opt_, files = null, opt_ = {} | ||
else if (Array.isArray(opt_)) | ||
} else if (Array.isArray(opt_)) { | ||
files = opt_, opt_ = {} | ||
} | ||
if (typeof files === 'function') | ||
if (typeof files === 'function') { | ||
cb = files, files = null | ||
} | ||
if (!files) | ||
if (!files) { | ||
files = [] | ||
else | ||
} else { | ||
files = Array.from(files) | ||
} | ||
const opt = hlo(opt_) | ||
if (opt.sync && typeof cb === 'function') | ||
if (opt.sync && typeof cb === 'function') { | ||
throw new TypeError('callback not supported for sync tar functions') | ||
} | ||
if (!opt.file && typeof cb === 'function') | ||
if (!opt.file && typeof cb === 'function') { | ||
throw new TypeError('callback only supported with file option') | ||
} | ||
if (files.length) | ||
if (files.length) { | ||
filesFilter(opt, files) | ||
} | ||
if (!opt.noResume) | ||
if (!opt.noResume) { | ||
onentryFunction(opt) | ||
} | ||
@@ -85,5 +92,5 @@ return opt.file && opt.sync ? listFileSync(opt) | ||
const readSize = opt.maxReadSize || 16 * 1024 * 1024 | ||
if (stat.size < readSize) | ||
if (stat.size < readSize) { | ||
p.end(fs.readFileSync(file)) | ||
else { | ||
} else { | ||
let pos = 0 | ||
@@ -119,5 +126,5 @@ const buf = Buffer.allocUnsafe(readSize) | ||
fs.stat(file, (er, stat) => { | ||
if (er) | ||
if (er) { | ||
reject(er) | ||
else { | ||
} else { | ||
const stream = new fsm.ReadStream(file, { | ||
@@ -124,0 +131,0 @@ readSize: readSize, |
@@ -42,4 +42,5 @@ 'use strict' | ||
fs.stat(dir, (er, st) => { | ||
if (er || !st.isDirectory()) | ||
if (er || !st.isDirectory()) { | ||
er = new CwdError(dir, er && er.code || 'ENOTDIR') | ||
} | ||
cb(er) | ||
@@ -70,23 +71,27 @@ }) | ||
const done = (er, created) => { | ||
if (er) | ||
if (er) { | ||
cb(er) | ||
else { | ||
} else { | ||
cSet(cache, dir, true) | ||
if (created && doChown) | ||
if (created && doChown) { | ||
chownr(created, uid, gid, er => done(er)) | ||
else if (needChmod) | ||
} else if (needChmod) { | ||
fs.chmod(dir, mode, cb) | ||
else | ||
} else { | ||
cb() | ||
} | ||
} | ||
} | ||
if (cache && cGet(cache, dir) === true) | ||
if (cache && cGet(cache, dir) === true) { | ||
return done() | ||
} | ||
if (dir === cwd) | ||
if (dir === cwd) { | ||
return checkCwd(dir, done) | ||
} | ||
if (preserve) | ||
return mkdirp(dir, {mode}).then(made => done(null, made), done) | ||
if (preserve) { | ||
return mkdirp(dir, { mode }).then(made => done(null, made), done) | ||
} | ||
@@ -99,8 +104,10 @@ const sub = normPath(path.relative(cwd, dir)) | ||
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { | ||
if (!parts.length) | ||
if (!parts.length) { | ||
return cb(null, created) | ||
} | ||
const p = parts.shift() | ||
const part = normPath(path.resolve(base + '/' + p)) | ||
if (cGet(cache, part)) | ||
if (cGet(cache, part)) { | ||
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) | ||
} | ||
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) | ||
@@ -115,14 +122,16 @@ } | ||
cb(statEr) | ||
} else if (st.isDirectory()) | ||
} else if (st.isDirectory()) { | ||
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) | ||
else if (unlink) { | ||
} else if (unlink) { | ||
fs.unlink(part, er => { | ||
if (er) | ||
if (er) { | ||
return cb(er) | ||
} | ||
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) | ||
}) | ||
} else if (st.isSymbolicLink()) | ||
} else if (st.isSymbolicLink()) { | ||
return cb(new SymlinkError(part, part + '/' + parts.join('/'))) | ||
else | ||
} else { | ||
cb(er) | ||
} | ||
}) | ||
@@ -143,4 +152,5 @@ } else { | ||
} finally { | ||
if (!ok) | ||
if (!ok) { | ||
throw new CwdError(dir, code) | ||
} | ||
} | ||
@@ -170,10 +180,13 @@ } | ||
cSet(cache, dir, true) | ||
if (created && doChown) | ||
if (created && doChown) { | ||
chownr.sync(created, uid, gid) | ||
if (needChmod) | ||
} | ||
if (needChmod) { | ||
fs.chmodSync(dir, mode) | ||
} | ||
} | ||
if (cache && cGet(cache, dir) === true) | ||
if (cache && cGet(cache, dir) === true) { | ||
return done() | ||
} | ||
@@ -185,4 +198,5 @@ if (dir === cwd) { | ||
if (preserve) | ||
if (preserve) { | ||
return done(mkdirp.sync(dir, mode)) | ||
} | ||
@@ -196,4 +210,5 @@ const sub = normPath(path.relative(cwd, dir)) | ||
part = normPath(path.resolve(part)) | ||
if (cGet(cache, part)) | ||
if (cGet(cache, part)) { | ||
continue | ||
} | ||
@@ -215,4 +230,5 @@ try { | ||
continue | ||
} else if (st.isSymbolicLink()) | ||
} else if (st.isSymbolicLink()) { | ||
return new SymlinkError(part, part + '/' + parts.join('/')) | ||
} | ||
} | ||
@@ -219,0 +235,0 @@ } |
@@ -10,15 +10,19 @@ 'use strict' | ||
// a file that is not readable/writable by the owner. | ||
if (portable) | ||
if (portable) { | ||
mode = (mode | 0o600) & ~0o22 | ||
} | ||
// if dirs are readable, then they should be listable | ||
if (isDir) { | ||
if (mode & 0o400) | ||
if (mode & 0o400) { | ||
mode |= 0o100 | ||
if (mode & 0o40) | ||
} | ||
if (mode & 0o40) { | ||
mode |= 0o10 | ||
if (mode & 0o4) | ||
} | ||
if (mode & 0o4) { | ||
mode |= 0o1 | ||
} | ||
} | ||
return mode | ||
} |
@@ -6,7 +6,8 @@ // warning: extremely hot code path. | ||
const normalizeCache = Object.create(null) | ||
const {hasOwnProperty} = Object.prototype | ||
const { hasOwnProperty } = Object.prototype | ||
module.exports = s => { | ||
if (!hasOwnProperty.call(normalizeCache, s)) | ||
if (!hasOwnProperty.call(normalizeCache, s)) { | ||
normalizeCache[s] = s.normalize('NFKD') | ||
} | ||
return normalizeCache[s] | ||
} |
@@ -76,4 +76,5 @@ 'use strict' | ||
this[WRITEENTRYCLASS] = WriteEntry | ||
if (typeof opt.onwarn === 'function') | ||
if (typeof opt.onwarn === 'function') { | ||
this.on('warn', opt.onwarn) | ||
} | ||
@@ -83,6 +84,8 @@ this.portable = !!opt.portable | ||
if (opt.gzip) { | ||
if (typeof opt.gzip !== 'object') | ||
if (typeof opt.gzip !== 'object') { | ||
opt.gzip = {} | ||
if (this.portable) | ||
} | ||
if (this.portable) { | ||
opt.gzip.portable = true | ||
} | ||
this.zip = new zlib.Gzip(opt.gzip) | ||
@@ -93,4 +96,5 @@ this.zip.on('data', chunk => super.write(chunk)) | ||
this.on('resume', _ => this.zip.resume()) | ||
} else | ||
} else { | ||
this.on('drain', this[ONDRAIN]) | ||
} | ||
@@ -121,4 +125,5 @@ this.noDirRecurse = !!opt.noDirRecurse | ||
end (path) { | ||
if (path) | ||
if (path) { | ||
this.write(path) | ||
} | ||
this[ENDED] = true | ||
@@ -130,9 +135,11 @@ this[PROCESS]() | ||
write (path) { | ||
if (this[ENDED]) | ||
if (this[ENDED]) { | ||
throw new Error('write after end') | ||
} | ||
if (path instanceof ReadEntry) | ||
if (path instanceof ReadEntry) { | ||
this[ADDTARENTRY](path) | ||
else | ||
} else { | ||
this[ADDFSENTRY](path) | ||
} | ||
return this.flowing | ||
@@ -144,5 +151,5 @@ } | ||
// in this case, we don't have to wait for the stat | ||
if (!this.filter(p.path, p)) | ||
if (!this.filter(p.path, p)) { | ||
p.resume() | ||
else { | ||
} else { | ||
const job = new PackJob(p.path, absolute, false) | ||
@@ -171,6 +178,7 @@ job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) | ||
this[JOBS] -= 1 | ||
if (er) | ||
if (er) { | ||
this.emit('error', er) | ||
else | ||
} else { | ||
this[ONSTAT](job, stat) | ||
} | ||
}) | ||
@@ -184,4 +192,5 @@ } | ||
// now we have the stat, we can filter it. | ||
if (!this.filter(job.path, stat)) | ||
if (!this.filter(job.path, stat)) { | ||
job.ignore = true | ||
} | ||
@@ -197,4 +206,5 @@ this[PROCESS]() | ||
this[JOBS] -= 1 | ||
if (er) | ||
if (er) { | ||
return this.emit('error', er) | ||
} | ||
this[ONREADDIR](job, entries) | ||
@@ -211,4 +221,5 @@ }) | ||
[PROCESS] () { | ||
if (this[PROCESSING]) | ||
if (this[PROCESSING]) { | ||
return | ||
} | ||
@@ -230,5 +241,5 @@ this[PROCESSING] = true | ||
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { | ||
if (this.zip) | ||
if (this.zip) { | ||
this.zip.end(EOF) | ||
else { | ||
} else { | ||
super.write(EOF) | ||
@@ -251,8 +262,10 @@ super.end() | ||
[PROCESSJOB] (job) { | ||
if (job.pending) | ||
if (job.pending) { | ||
return | ||
} | ||
if (job.entry) { | ||
if (job === this[CURRENT] && !job.piped) | ||
if (job === this[CURRENT] && !job.piped) { | ||
this[PIPE](job) | ||
} | ||
return | ||
@@ -262,21 +275,26 @@ } | ||
if (!job.stat) { | ||
if (this.statCache.has(job.absolute)) | ||
if (this.statCache.has(job.absolute)) { | ||
this[ONSTAT](job, this.statCache.get(job.absolute)) | ||
else | ||
} else { | ||
this[STAT](job) | ||
} | ||
} | ||
if (!job.stat) | ||
if (!job.stat) { | ||
return | ||
} | ||
// filtered out! | ||
if (job.ignore) | ||
if (job.ignore) { | ||
return | ||
} | ||
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { | ||
if (this.readdirCache.has(job.absolute)) | ||
if (this.readdirCache.has(job.absolute)) { | ||
this[ONREADDIR](job, this.readdirCache.get(job.absolute)) | ||
else | ||
} else { | ||
this[READDIR](job) | ||
if (!job.readdir) | ||
} | ||
if (!job.readdir) { | ||
return | ||
} | ||
} | ||
@@ -291,4 +309,5 @@ | ||
if (job === this[CURRENT] && !job.piped) | ||
if (job === this[CURRENT] && !job.piped) { | ||
this[PIPE](job) | ||
} | ||
} | ||
@@ -326,4 +345,5 @@ | ||
[ONDRAIN] () { | ||
if (this[CURRENT] && this[CURRENT].entry) | ||
if (this[CURRENT] && this[CURRENT].entry) { | ||
this[CURRENT].entry.resume() | ||
} | ||
} | ||
@@ -348,9 +368,11 @@ | ||
source.on('data', chunk => { | ||
if (!zip.write(chunk)) | ||
if (!zip.write(chunk)) { | ||
source.pause() | ||
} | ||
}) | ||
} else { | ||
source.on('data', chunk => { | ||
if (!super.write(chunk)) | ||
if (!super.write(chunk)) { | ||
source.pause() | ||
} | ||
}) | ||
@@ -361,4 +383,5 @@ } | ||
pause () { | ||
if (this.zip) | ||
if (this.zip) { | ||
this.zip.pause() | ||
} | ||
return super.pause() | ||
@@ -365,0 +388,0 @@ } |
120
lib/parse.js
@@ -31,2 +31,3 @@ 'use strict' | ||
const zlib = require('minizlib') | ||
const { nextTick } = require('process') | ||
@@ -63,2 +64,3 @@ const gzipHeader = Buffer.from([0x1f, 0x8b]) | ||
const SAW_EOF = Symbol('sawEOF') | ||
const CLOSESTREAM = Symbol('closeStream') | ||
@@ -87,5 +89,5 @@ const noop = _ => true | ||
if (opt.ondone) | ||
if (opt.ondone) { | ||
this.on(DONE, opt.ondone) | ||
else { | ||
} else { | ||
this.on(DONE, _ => { | ||
@@ -95,3 +97,2 @@ this.emit('prefinish') | ||
this.emit('end') | ||
this.emit('close') | ||
}) | ||
@@ -121,11 +122,17 @@ } | ||
this[SAW_EOF] = false | ||
if (typeof opt.onwarn === 'function') | ||
this.on('end', () => this[CLOSESTREAM]()) | ||
if (typeof opt.onwarn === 'function') { | ||
this.on('warn', opt.onwarn) | ||
if (typeof opt.onentry === 'function') | ||
} | ||
if (typeof opt.onentry === 'function') { | ||
this.on('entry', opt.onentry) | ||
} | ||
} | ||
[CONSUMEHEADER] (chunk, position) { | ||
if (this[SAW_VALID_ENTRY] === null) | ||
if (this[SAW_VALID_ENTRY] === null) { | ||
this[SAW_VALID_ENTRY] = false | ||
} | ||
let header | ||
@@ -142,4 +149,5 @@ try { | ||
// ending an archive with no entries. pointless, but legal. | ||
if (this[STATE] === 'begin') | ||
if (this[STATE] === 'begin') { | ||
this[STATE] = 'header' | ||
} | ||
this[EMIT]('eof') | ||
@@ -152,13 +160,13 @@ } else { | ||
this[SAW_NULL_BLOCK] = false | ||
if (!header.cksumValid) | ||
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header}) | ||
else if (!header.path) | ||
this.warn('TAR_ENTRY_INVALID', 'path is required', {header}) | ||
else { | ||
if (!header.cksumValid) { | ||
this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) | ||
} else if (!header.path) { | ||
this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) | ||
} else { | ||
const type = header.type | ||
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) | ||
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header}) | ||
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) | ||
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header}) | ||
else { | ||
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { | ||
this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) | ||
} else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { | ||
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) | ||
} else { | ||
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) | ||
@@ -172,8 +180,10 @@ | ||
const onend = () => { | ||
if (!entry.invalid) | ||
if (!entry.invalid) { | ||
this[SAW_VALID_ENTRY] = true | ||
} | ||
} | ||
entry.on('end', onend) | ||
} else | ||
} else { | ||
this[SAW_VALID_ENTRY] = true | ||
} | ||
} | ||
@@ -202,5 +212,5 @@ | ||
} else { | ||
if (entry.remain) | ||
if (entry.remain) { | ||
this[STATE] = 'body' | ||
else { | ||
} else { | ||
this[STATE] = 'header' | ||
@@ -213,4 +223,5 @@ entry.end() | ||
this[NEXTENTRY]() | ||
} else | ||
} else { | ||
this[QUEUE].push(entry) | ||
} | ||
} | ||
@@ -223,2 +234,6 @@ } | ||
[CLOSESTREAM] () { | ||
nextTick(() => this.emit('close')) | ||
} | ||
[PROCESSENTRY] (entry) { | ||
@@ -230,5 +245,5 @@ let go = true | ||
go = false | ||
} else if (Array.isArray(entry)) | ||
} else if (Array.isArray(entry)) { | ||
this.emit.apply(this, entry) | ||
else { | ||
} else { | ||
this[READENTRY] = entry | ||
@@ -259,6 +274,8 @@ this.emit('entry', entry) | ||
if (drainNow) { | ||
if (!this[WRITING]) | ||
if (!this[WRITING]) { | ||
this.emit('drain') | ||
} else | ||
} | ||
} else { | ||
re.once('drain', _ => this.emit('drain')) | ||
} | ||
} | ||
@@ -290,4 +307,5 @@ } | ||
// if we finished, then the entry is reset | ||
if (!this[WRITEENTRY]) | ||
if (!this[WRITEENTRY]) { | ||
this[EMITMETA](entry) | ||
} | ||
@@ -298,6 +316,7 @@ return ret | ||
[EMIT] (ev, data, extra) { | ||
if (!this[QUEUE].length && !this[READENTRY]) | ||
if (!this[QUEUE].length && !this[READENTRY]) { | ||
this.emit(ev, data, extra) | ||
else | ||
} else { | ||
this[QUEUE].push([ev, data, extra]) | ||
} | ||
} | ||
@@ -341,4 +360,5 @@ | ||
write (chunk) { | ||
if (this[ABORTED]) | ||
if (this[ABORTED]) { | ||
return | ||
} | ||
@@ -356,4 +376,5 @@ // first write, might be gzipped | ||
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { | ||
if (chunk[i] !== gzipHeader[i]) | ||
if (chunk[i] !== gzipHeader[i]) { | ||
this[UNZIP] = false | ||
} | ||
} | ||
@@ -378,6 +399,7 @@ if (this[UNZIP] === null) { | ||
this[WRITING] = true | ||
if (this[UNZIP]) | ||
if (this[UNZIP]) { | ||
this[UNZIP].write(chunk) | ||
else | ||
} else { | ||
this[CONSUMECHUNK](chunk) | ||
} | ||
this[WRITING] = false | ||
@@ -392,4 +414,5 @@ | ||
// if we have no queue, then that means a clogged READENTRY | ||
if (!ret && !this[QUEUE].length) | ||
if (!ret && !this[QUEUE].length) { | ||
this[READENTRY].once('drain', _ => this.emit('drain')) | ||
} | ||
@@ -400,4 +423,5 @@ return ret | ||
[BUFFERCONCAT] (c) { | ||
if (c && !this[ABORTED]) | ||
if (c && !this[ABORTED]) { | ||
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c | ||
} | ||
} | ||
@@ -416,5 +440,6 @@ | ||
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ | ||
entry.blockRemain} more bytes, only ${have} available)`, {entry}) | ||
if (this[BUFFER]) | ||
entry.blockRemain} more bytes, only ${have} available)`, { entry }) | ||
if (this[BUFFER]) { | ||
entry.write(this[BUFFER]) | ||
} | ||
entry.end() | ||
@@ -427,7 +452,7 @@ } | ||
[CONSUMECHUNK] (chunk) { | ||
if (this[CONSUMING]) | ||
if (this[CONSUMING]) { | ||
this[BUFFERCONCAT](chunk) | ||
else if (!chunk && !this[BUFFER]) | ||
} else if (!chunk && !this[BUFFER]) { | ||
this[MAYBEEND]() | ||
else { | ||
} else { | ||
this[CONSUMING] = true | ||
@@ -439,4 +464,5 @@ if (this[BUFFER]) { | ||
this[CONSUMECHUNKSUB](c) | ||
} else | ||
} else { | ||
this[CONSUMECHUNKSUB](chunk) | ||
} | ||
@@ -454,4 +480,5 @@ while (this[BUFFER] && | ||
if (!this[BUFFER] || this[ENDED]) | ||
if (!this[BUFFER] || this[ENDED]) { | ||
this[MAYBEEND]() | ||
} | ||
} | ||
@@ -488,6 +515,7 @@ | ||
if (position < length) { | ||
if (this[BUFFER]) | ||
if (this[BUFFER]) { | ||
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) | ||
else | ||
} else { | ||
this[BUFFER] = chunk.slice(position) | ||
} | ||
} | ||
@@ -498,5 +526,5 @@ } | ||
if (!this[ABORTED]) { | ||
if (this[UNZIP]) | ||
if (this[UNZIP]) { | ||
this[UNZIP].end(chunk) | ||
else { | ||
} else { | ||
this[ENDED] = true | ||
@@ -503,0 +531,0 @@ this.write(chunk) |
@@ -30,4 +30,5 @@ // A path exclusive reservation system | ||
const dirs = path.split('/').slice(0, -1).reduce((set, path) => { | ||
if (set.length) | ||
if (set.length) { | ||
path = join(set[set.length - 1], path) | ||
} | ||
set.push(path || '/') | ||
@@ -47,4 +48,5 @@ return set | ||
/* istanbul ignore if - unpossible */ | ||
if (!res) | ||
if (!res) { | ||
throw new Error('function does not have any path reservations') | ||
} | ||
return { | ||
@@ -59,3 +61,3 @@ paths: res.paths.map(path => queues.get(path)), | ||
const check = fn => { | ||
const {paths, dirs} = getQueues(fn) | ||
const { paths, dirs } = getQueues(fn) | ||
return paths.every(q => q[0] === fn) && | ||
@@ -67,4 +69,5 @@ dirs.every(q => q[0] instanceof Set && q[0].has(fn)) | ||
const run = fn => { | ||
if (running.has(fn) || !check(fn)) | ||
if (running.has(fn) || !check(fn)) { | ||
return false | ||
} | ||
running.add(fn) | ||
@@ -76,4 +79,5 @@ fn(() => clear(fn)) | ||
const clear = fn => { | ||
if (!running.has(fn)) | ||
if (!running.has(fn)) { | ||
return false | ||
} | ||
@@ -86,10 +90,11 @@ const { paths, dirs } = reservations.get(fn) | ||
assert.equal(q[0], fn) | ||
if (q.length === 1) | ||
if (q.length === 1) { | ||
queues.delete(path) | ||
else { | ||
} else { | ||
q.shift() | ||
if (typeof q[0] === 'function') | ||
if (typeof q[0] === 'function') { | ||
next.add(q[0]) | ||
else | ||
} else { | ||
q[0].forEach(fn => next.add(fn)) | ||
} | ||
} | ||
@@ -101,5 +106,5 @@ }) | ||
assert(q[0] instanceof Set) | ||
if (q[0].size === 1 && q.length === 1) | ||
if (q[0].size === 1 && q.length === 1) { | ||
queues.delete(dir) | ||
else if (q[0].size === 1) { | ||
} else if (q[0].size === 1) { | ||
q.shift() | ||
@@ -109,4 +114,5 @@ | ||
next.add(q[0]) | ||
} else | ||
} else { | ||
q[0].delete(fn) | ||
} | ||
}) | ||
@@ -134,18 +140,20 @@ running.delete(fn) | ||
) | ||
reservations.set(fn, {dirs, paths}) | ||
reservations.set(fn, { dirs, paths }) | ||
paths.forEach(path => { | ||
const q = queues.get(path) | ||
if (!q) | ||
if (!q) { | ||
queues.set(path, [fn]) | ||
else | ||
} else { | ||
q.push(fn) | ||
} | ||
}) | ||
dirs.forEach(dir => { | ||
const q = queues.get(dir) | ||
if (!q) | ||
if (!q) { | ||
queues.set(dir, [new Set([fn])]) | ||
else if (q[q.length - 1] instanceof Set) | ||
} else if (q[q.length - 1] instanceof Set) { | ||
q[q.length - 1].add(fn) | ||
else | ||
} else { | ||
q.push(new Set([fn])) | ||
} | ||
}) | ||
@@ -152,0 +160,0 @@ |
@@ -27,4 +27,5 @@ 'use strict' | ||
const body = this.encodeBody() | ||
if (body === '') | ||
if (body === '') { | ||
return null | ||
} | ||
@@ -38,4 +39,5 @@ const bodyLen = Buffer.byteLength(body) | ||
// 0-fill the header section, it might not hit every field | ||
for (let i = 0; i < 512; i++) | ||
for (let i = 0; i < 512; i++) { | ||
buf[i] = 0 | ||
} | ||
@@ -65,4 +67,5 @@ new Header({ | ||
// null pad after the body | ||
for (let i = bodyLen + 512; i < buf.length; i++) | ||
for (let i = bodyLen + 512; i < buf.length; i++) { | ||
buf[i] = 0 | ||
} | ||
@@ -93,4 +96,5 @@ return buf | ||
encodeField (field) { | ||
if (this[field] === null || this[field] === undefined) | ||
if (this[field] === null || this[field] === undefined) { | ||
return '' | ||
} | ||
const v = this[field] instanceof Date ? this[field].getTime() / 1000 | ||
@@ -107,4 +111,5 @@ : this[field] | ||
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 | ||
if (byteLen + digits >= Math.pow(10, digits)) | ||
if (byteLen + digits >= Math.pow(10, digits)) { | ||
digits += 1 | ||
} | ||
const len = digits + byteLen | ||
@@ -131,10 +136,12 @@ return len + s | ||
// Refactor to not be a naive line-by-line parse. | ||
if (n !== Buffer.byteLength(line) + 1) | ||
if (n !== Buffer.byteLength(line) + 1) { | ||
return set | ||
} | ||
line = line.substr((n + ' ').length) | ||
line = line.slice((n + ' ').length) | ||
const kv = line.split('=') | ||
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') | ||
if (!k) | ||
if (!k) { | ||
return set | ||
} | ||
@@ -141,0 +148,0 @@ const v = kv.join('=') |
@@ -52,4 +52,5 @@ 'use strict' | ||
this.mode = header.mode | ||
if (this.mode) | ||
if (this.mode) { | ||
this.mode = this.mode & 0o7777 | ||
} | ||
this.uid = header.uid | ||
@@ -67,6 +68,8 @@ this.gid = header.gid | ||
if (ex) | ||
if (ex) { | ||
this[SLURP](ex) | ||
if (gex) | ||
} | ||
if (gex) { | ||
this[SLURP](gex, true) | ||
} | ||
} | ||
@@ -76,4 +79,5 @@ | ||
const writeLen = data.length | ||
if (writeLen > this.blockRemain) | ||
if (writeLen > this.blockRemain) { | ||
throw new Error('writing more to entry than is appropriate') | ||
} | ||
@@ -84,7 +88,9 @@ const r = this.remain | ||
this.blockRemain = Math.max(0, br - writeLen) | ||
if (this.ignore) | ||
if (this.ignore) { | ||
return true | ||
} | ||
if (r >= writeLen) | ||
if (r >= writeLen) { | ||
return super.write(data) | ||
} | ||
@@ -100,6 +106,7 @@ // r < writeLen | ||
if (ex[k] !== null && ex[k] !== undefined && | ||
!(global && k === 'path')) | ||
!(global && k === 'path')) { | ||
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] | ||
} | ||
} | ||
} | ||
} |
@@ -22,10 +22,13 @@ 'use strict' | ||
if (!opt.file) | ||
if (!opt.file) { | ||
throw new TypeError('file is required') | ||
} | ||
if (opt.gzip) | ||
if (opt.gzip) { | ||
throw new TypeError('cannot append to compressed archives') | ||
} | ||
if (!files || !Array.isArray(files) || !files.length) | ||
if (!files || !Array.isArray(files) || !files.length) { | ||
throw new TypeError('no files or directories specified') | ||
} | ||
@@ -49,6 +52,7 @@ files = Array.from(files) | ||
} catch (er) { | ||
if (er.code === 'ENOENT') | ||
if (er.code === 'ENOENT') { | ||
fd = fs.openSync(opt.file, 'w+') | ||
else | ||
} else { | ||
throw er | ||
} | ||
} | ||
@@ -65,20 +69,25 @@ | ||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) | ||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { | ||
throw new Error('cannot append to compressed archives') | ||
} | ||
if (!bytes) | ||
if (!bytes) { | ||
break POSITION | ||
} | ||
} | ||
const h = new Header(headBuf) | ||
if (!h.cksumValid) | ||
if (!h.cksumValid) { | ||
break | ||
} | ||
const entryBlockSize = 512 * Math.ceil(h.size / 512) | ||
if (position + entryBlockSize + 512 > st.size) | ||
if (position + entryBlockSize + 512 > st.size) { | ||
break | ||
} | ||
// the 512 for the header we just parsed will be added as well | ||
// also jump ahead all the blocks for the body | ||
position += entryBlockSize | ||
if (opt.mtimeCache) | ||
if (opt.mtimeCache) { | ||
opt.mtimeCache.set(h.path, h.mtime) | ||
} | ||
} | ||
@@ -112,11 +121,13 @@ threw = false | ||
const cb = (er, pos) => { | ||
if (er) | ||
if (er) { | ||
fs.close(fd, _ => cb_(er)) | ||
else | ||
} else { | ||
cb_(null, pos) | ||
} | ||
} | ||
let position = 0 | ||
if (size === 0) | ||
if (size === 0) { | ||
return cb(null, 0) | ||
} | ||
@@ -126,4 +137,5 @@ let bufPos = 0 | ||
const onread = (er, bytes) => { | ||
if (er) | ||
if (er) { | ||
return cb(er) | ||
} | ||
bufPos += bytes | ||
@@ -137,23 +149,29 @@ if (bufPos < 512 && bytes) { | ||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) | ||
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { | ||
return cb(new Error('cannot append to compressed archives')) | ||
} | ||
// truncated header | ||
if (bufPos < 512) | ||
if (bufPos < 512) { | ||
return cb(null, position) | ||
} | ||
const h = new Header(headBuf) | ||
if (!h.cksumValid) | ||
if (!h.cksumValid) { | ||
return cb(null, position) | ||
} | ||
const entryBlockSize = 512 * Math.ceil(h.size / 512) | ||
if (position + entryBlockSize + 512 > size) | ||
if (position + entryBlockSize + 512 > size) { | ||
return cb(null, position) | ||
} | ||
position += entryBlockSize + 512 | ||
if (position >= size) | ||
if (position >= size) { | ||
return cb(null, position) | ||
} | ||
if (opt.mtimeCache) | ||
if (opt.mtimeCache) { | ||
opt.mtimeCache.set(h.path, h.mtime) | ||
} | ||
bufPos = 0 | ||
@@ -174,12 +192,15 @@ fs.read(fd, headBuf, 0, 512, position, onread) | ||
if (er) | ||
if (er) { | ||
return reject(er) | ||
} | ||
fs.fstat(fd, (er, st) => { | ||
if (er) | ||
if (er) { | ||
return fs.close(fd, () => reject(er)) | ||
} | ||
getPos(fd, st.size, (er, position) => { | ||
if (er) | ||
if (er) { | ||
return reject(er) | ||
} | ||
const stream = new fsm.WriteStream(opt.file, { | ||
@@ -206,3 +227,3 @@ fd: fd, | ||
t({ | ||
file: path.resolve(p.cwd, file.substr(1)), | ||
file: path.resolve(p.cwd, file.slice(1)), | ||
sync: true, | ||
@@ -212,4 +233,5 @@ noResume: true, | ||
}) | ||
} else | ||
} else { | ||
p.add(file) | ||
} | ||
}) | ||
@@ -224,10 +246,11 @@ p.end() | ||
return t({ | ||
file: path.resolve(p.cwd, file.substr(1)), | ||
file: path.resolve(p.cwd, file.slice(1)), | ||
noResume: true, | ||
onentry: entry => p.add(entry), | ||
}).then(_ => addFilesAsync(p, files)) | ||
} else | ||
} else { | ||
p.add(file) | ||
} | ||
} | ||
p.end() | ||
} |
@@ -19,3 +19,3 @@ // unix absolute paths are also absolute on win32, so we use this for both | ||
: parsed.root | ||
path = path.substr(root.length) | ||
path = path.slice(root.length) | ||
r += root | ||
@@ -22,0 +22,0 @@ parsed = parse(path) |
@@ -69,9 +69,11 @@ 'use strict' | ||
const unlinkFile = (path, cb) => { | ||
if (!isWindows) | ||
if (!isWindows) { | ||
return fs.unlink(path, cb) | ||
} | ||
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') | ||
fs.rename(path, name, er => { | ||
if (er) | ||
if (er) { | ||
return cb(er) | ||
} | ||
fs.unlink(name, cb) | ||
@@ -83,4 +85,5 @@ }) | ||
const unlinkFileSync = path => { | ||
if (!isWindows) | ||
if (!isWindows) { | ||
return fs.unlinkSync(path) | ||
} | ||
@@ -114,4 +117,5 @@ const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') | ||
const pnorm = cacheKeyNormalize(path) | ||
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) | ||
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { | ||
cache.delete(path) | ||
} | ||
} | ||
@@ -121,4 +125,5 @@ } | ||
const dropCache = cache => { | ||
for (const key of cache.keys()) | ||
for (const key of cache.keys()) { | ||
cache.delete(key) | ||
} | ||
} | ||
@@ -128,4 +133,5 @@ | ||
constructor (opt) { | ||
if (!opt) | ||
if (!opt) { | ||
opt = {} | ||
} | ||
@@ -155,4 +161,5 @@ opt.ondone = _ => { | ||
// need both or neither | ||
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') | ||
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { | ||
throw new TypeError('cannot set owner without number uid and gid') | ||
} | ||
if (opt.preserveOwner) { | ||
@@ -172,6 +179,7 @@ throw new TypeError( | ||
// default true for root | ||
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') | ||
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { | ||
this.preserveOwner = process.getuid && process.getuid() === 0 | ||
else | ||
} else { | ||
this.preserveOwner = !!opt.preserveOwner | ||
} | ||
@@ -225,4 +233,5 @@ this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? | ||
warn (code, msg, data = {}) { | ||
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') | ||
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { | ||
data.recoverable = false | ||
} | ||
return super.warn(code, msg, data) | ||
@@ -236,3 +245,2 @@ } | ||
this.emit('end') | ||
this.emit('close') | ||
} | ||
@@ -244,4 +252,5 @@ } | ||
const parts = normPath(entry.path).split('/') | ||
if (parts.length < this.strip) | ||
if (parts.length < this.strip) { | ||
return false | ||
} | ||
entry.path = parts.slice(this.strip).join('/') | ||
@@ -251,6 +260,7 @@ | ||
const linkparts = normPath(entry.linkpath).split('/') | ||
if (linkparts.length >= this.strip) | ||
if (linkparts.length >= this.strip) { | ||
entry.linkpath = linkparts.slice(this.strip).join('/') | ||
else | ||
} else { | ||
return false | ||
} | ||
} | ||
@@ -281,6 +291,7 @@ } | ||
if (path.isAbsolute(entry.path)) | ||
if (path.isAbsolute(entry.path)) { | ||
entry.absolute = normPath(path.resolve(entry.path)) | ||
else | ||
} else { | ||
entry.absolute = normPath(path.resolve(this.cwd, entry.path)) | ||
} | ||
@@ -307,4 +318,5 @@ // if we somehow ended up with a path that escapes the cwd, and we are | ||
entry.type !== 'Directory' && | ||
entry.type !== 'GNUDumpDir') | ||
entry.type !== 'GNUDumpDir') { | ||
return false | ||
} | ||
@@ -314,5 +326,5 @@ // only encode : chars that aren't drive letter indicators | ||
const { root: aRoot } = path.win32.parse(entry.absolute) | ||
entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length)) | ||
entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) | ||
const { root: pRoot } = path.win32.parse(entry.path) | ||
entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length)) | ||
entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) | ||
} | ||
@@ -324,4 +336,5 @@ | ||
[ONENTRY] (entry) { | ||
if (!this[CHECKPATH](entry)) | ||
if (!this[CHECKPATH](entry)) { | ||
return entry.resume() | ||
} | ||
@@ -333,5 +346,7 @@ assert.equal(typeof entry.absolute, 'string') | ||
case 'GNUDumpDir': | ||
if (entry.mode) | ||
if (entry.mode) { | ||
entry.mode = entry.mode | 0o700 | ||
} | ||
// eslint-disable-next-line no-fallthrough | ||
case 'File': | ||
@@ -356,6 +371,6 @@ case 'OldFile': | ||
// mode, but otherwise continue on. | ||
if (er.name === 'CwdError') | ||
if (er.name === 'CwdError') { | ||
this.emit('error', er) | ||
else { | ||
this.warn('TAR_ENTRY_ERROR', er, {entry}) | ||
} else { | ||
this.warn('TAR_ENTRY_ERROR', er, { entry }) | ||
this[UNPEND]() | ||
@@ -410,4 +425,5 @@ entry.resume() | ||
stream.on('error', er => { | ||
if (stream.fd) | ||
if (stream.fd) { | ||
fs.close(stream.fd, () => {}) | ||
} | ||
@@ -426,4 +442,5 @@ // flush all the data out so that we aren't left hanging | ||
/* istanbul ignore else - we should always have a fd by now */ | ||
if (stream.fd) | ||
if (stream.fd) { | ||
fs.close(stream.fd, () => {}) | ||
} | ||
@@ -437,6 +454,7 @@ this[ONERROR](er, entry) | ||
fs.close(stream.fd, er => { | ||
if (er) | ||
if (er) { | ||
this[ONERROR](er, entry) | ||
else | ||
} else { | ||
this[UNPEND]() | ||
} | ||
fullyDone() | ||
@@ -521,3 +539,3 @@ }) | ||
this.warn('TAR_ENTRY_UNSUPPORTED', | ||
`unsupported entry type: ${entry.type}`, {entry}) | ||
`unsupported entry type: ${entry.type}`, { entry }) | ||
entry.resume() | ||
@@ -564,4 +582,5 @@ } | ||
const paths = [entry.path] | ||
if (entry.linkpath) | ||
if (entry.linkpath) { | ||
paths.push(entry.linkpath) | ||
} | ||
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) | ||
@@ -581,6 +600,7 @@ } | ||
// to arbitrary writes to anywhere on the system. | ||
if (entry.type === 'SymbolicLink') | ||
if (entry.type === 'SymbolicLink') { | ||
dropCache(this.dirCache) | ||
else if (entry.type !== 'Directory') | ||
} else if (entry.type !== 'Directory') { | ||
pruneCache(this.dirCache, entry.absolute) | ||
} | ||
} | ||
@@ -632,4 +652,5 @@ | ||
} | ||
if (lstatEr || this[ISREUSABLE](entry, st)) | ||
if (lstatEr || this[ISREUSABLE](entry, st)) { | ||
return this[MAKEFS](null, entry, done) | ||
} | ||
@@ -642,4 +663,5 @@ if (st.isDirectory()) { | ||
const afterChmod = er => this[MAKEFS](er, entry, done) | ||
if (!needChmod) | ||
if (!needChmod) { | ||
return afterChmod() | ||
} | ||
return fs.chmod(entry.absolute, entry.mode, afterChmod) | ||
@@ -662,4 +684,5 @@ } | ||
// don't remove if the cwd, we want that error | ||
if (entry.absolute === this.cwd) | ||
if (entry.absolute === this.cwd) { | ||
return this[MAKEFS](null, entry, done) | ||
} | ||
@@ -671,6 +694,7 @@ unlinkFile(entry.absolute, er => | ||
if (this[CHECKED_CWD]) | ||
if (this[CHECKED_CWD]) { | ||
start() | ||
else | ||
} else { | ||
checkCwd() | ||
} | ||
} | ||
@@ -706,5 +730,5 @@ | ||
fs[link](linkpath, entry.absolute, er => { | ||
if (er) | ||
if (er) { | ||
this[ONERROR](er, entry) | ||
else { | ||
} else { | ||
this[UNPEND]() | ||
@@ -735,4 +759,5 @@ entry.resume() | ||
const er = this[MKDIR](this.cwd, this.dmode) | ||
if (er) | ||
if (er) { | ||
return this[ONERROR](er, entry) | ||
} | ||
this[CHECKED_CWD] = true | ||
@@ -747,4 +772,5 @@ } | ||
const mkParent = this[MKDIR](parent, this.dmode) | ||
if (mkParent) | ||
if (mkParent) { | ||
return this[ONERROR](mkParent, entry) | ||
} | ||
} | ||
@@ -754,7 +780,9 @@ } | ||
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) | ||
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) | ||
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { | ||
return this[SKIP](entry) | ||
} | ||
if (lstatEr || this[ISREUSABLE](entry, st)) | ||
if (lstatEr || this[ISREUSABLE](entry, st)) { | ||
return this[MAKEFS](null, entry) | ||
} | ||
@@ -793,4 +821,5 @@ if (st.isDirectory()) { | ||
} | ||
if (er || closeError) | ||
if (er || closeError) { | ||
this[ONERROR](er || closeError, entry) | ||
} | ||
done() | ||
@@ -797,0 +826,0 @@ } |
@@ -12,10 +12,13 @@ 'use strict' | ||
if (!opt.file) | ||
if (!opt.file) { | ||
throw new TypeError('file is required') | ||
} | ||
if (opt.gzip) | ||
if (opt.gzip) { | ||
throw new TypeError('cannot append to compressed archives') | ||
} | ||
if (!files || !Array.isArray(files) || !files.length) | ||
if (!files || !Array.isArray(files) || !files.length) { | ||
throw new TypeError('no files or directories specified') | ||
} | ||
@@ -31,4 +34,5 @@ files = Array.from(files) | ||
if (!opt.mtimeCache) | ||
if (!opt.mtimeCache) { | ||
opt.mtimeCache = new Map() | ||
} | ||
@@ -35,0 +39,0 @@ opt.filter = filter ? (path, stat) => |
'use strict' | ||
module.exports = Base => class extends Base { | ||
warn (code, message, data = {}) { | ||
if (this.file) | ||
if (this.file) { | ||
data.file = this.file | ||
if (this.cwd) | ||
} | ||
if (this.cwd) { | ||
data.cwd = this.cwd | ||
} | ||
data.code = message instanceof Error && message.code || code | ||
@@ -16,7 +18,8 @@ data.tarCode = code | ||
this.emit('warn', data.tarCode, message, data) | ||
} else if (message instanceof Error) | ||
} else if (message instanceof Error) { | ||
this.emit('error', Object.assign(message, data)) | ||
else | ||
} else { | ||
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) | ||
} | ||
} | ||
} |
@@ -11,4 +11,5 @@ 'use strict' | ||
const prefixPath = (path, prefix) => { | ||
if (!prefix) | ||
if (!prefix) { | ||
return normPath(path) | ||
} | ||
path = normPath(path).replace(/^\.(\/|$)/, '') | ||
@@ -48,4 +49,5 @@ return stripSlash(prefix) + '/' + path | ||
super(opt) | ||
if (typeof p !== 'string') | ||
if (typeof p !== 'string') { | ||
throw new TypeError('path is required') | ||
} | ||
this.path = normPath(p) | ||
@@ -77,4 +79,5 @@ // suppress atime, ctime, uid, gid, uname, gname | ||
if (typeof opt.onwarn === 'function') | ||
if (typeof opt.onwarn === 'function') { | ||
this.on('warn', opt.onwarn) | ||
} | ||
@@ -100,4 +103,5 @@ let pathWarn = false | ||
if (this.path === '') | ||
if (this.path === '') { | ||
this.path = './' | ||
} | ||
@@ -111,11 +115,13 @@ if (pathWarn) { | ||
if (this.statCache.has(this.absolute)) | ||
if (this.statCache.has(this.absolute)) { | ||
this[ONLSTAT](this.statCache.get(this.absolute)) | ||
else | ||
} else { | ||
this[LSTAT]() | ||
} | ||
} | ||
emit (ev, ...data) { | ||
if (ev === 'error') | ||
if (ev === 'error') { | ||
this[HAD_ERROR] = true | ||
} | ||
return super.emit(ev, ...data) | ||
@@ -126,4 +132,5 @@ } | ||
fs.lstat(this.absolute, (er, stat) => { | ||
if (er) | ||
if (er) { | ||
return this.emit('error', er) | ||
} | ||
this[ONLSTAT](stat) | ||
@@ -136,4 +143,5 @@ }) | ||
this.stat = stat | ||
if (!stat.isFile()) | ||
if (!stat.isFile()) { | ||
stat.size = 0 | ||
} | ||
this.type = getType(stat) | ||
@@ -163,4 +171,5 @@ this.emit('stat', stat) | ||
[HEADER] () { | ||
if (this.type === 'Directory' && this.portable) | ||
if (this.type === 'Directory' && this.portable) { | ||
this.noMtime = true | ||
} | ||
@@ -207,4 +216,5 @@ this.header = new Header({ | ||
[DIRECTORY] () { | ||
if (this.path.substr(-1) !== '/') | ||
if (this.path.slice(-1) !== '/') { | ||
this.path += '/' | ||
} | ||
this.stat.size = 0 | ||
@@ -217,4 +227,5 @@ this[HEADER]() | ||
fs.readlink(this.absolute, (er, linkpath) => { | ||
if (er) | ||
if (er) { | ||
return this.emit('error', er) | ||
} | ||
this[ONREADLINK](linkpath) | ||
@@ -243,4 +254,5 @@ }) | ||
const linkpath = this.linkCache.get(linkKey) | ||
if (linkpath.indexOf(this.cwd) === 0) | ||
if (linkpath.indexOf(this.cwd) === 0) { | ||
return this[HARDLINK](linkpath) | ||
} | ||
} | ||
@@ -251,4 +263,5 @@ this.linkCache.set(linkKey, this.absolute) | ||
this[HEADER]() | ||
if (this.stat.size === 0) | ||
if (this.stat.size === 0) { | ||
return this.end() | ||
} | ||
@@ -260,4 +273,5 @@ this[OPENFILE]() | ||
fs.open(this.absolute, 'r', (er, fd) => { | ||
if (er) | ||
if (er) { | ||
return this.emit('error', er) | ||
} | ||
this[ONOPENFILE](fd) | ||
@@ -269,4 +283,5 @@ }) | ||
this.fd = fd | ||
if (this[HAD_ERROR]) | ||
if (this[HAD_ERROR]) { | ||
return this[CLOSE]() | ||
} | ||
@@ -335,6 +350,7 @@ this.blockLen = 512 * Math.ceil(this.stat.size / 512) | ||
const flushed = this.write(writeBuf) | ||
if (!flushed) | ||
if (!flushed) { | ||
this[AWAITDRAIN](() => this[ONDRAIN]()) | ||
else | ||
} else { | ||
this[ONDRAIN]() | ||
} | ||
} | ||
@@ -361,4 +377,5 @@ | ||
if (!this.remain) { | ||
if (this.blockRemain) | ||
if (this.blockRemain) { | ||
super.write(Buffer.alloc(this.blockRemain)) | ||
} | ||
return this[CLOSE](er => er ? this.emit('error', er) : this.end()) | ||
@@ -431,4 +448,5 @@ } | ||
this.type = readEntry.type | ||
if (this.type === 'Directory' && this.portable) | ||
if (this.type === 'Directory' && this.portable) { | ||
this.noMtime = true | ||
} | ||
@@ -449,4 +467,5 @@ this.prefix = opt.prefix || null | ||
if (typeof opt.onwarn === 'function') | ||
if (typeof opt.onwarn === 'function') { | ||
this.on('warn', opt.onwarn) | ||
} | ||
@@ -521,4 +540,5 @@ let pathWarn = false | ||
const writeLen = data.length | ||
if (writeLen > this.blockRemain) | ||
if (writeLen > this.blockRemain) { | ||
throw new Error('writing more to entry than is appropriate') | ||
} | ||
this.blockRemain -= writeLen | ||
@@ -529,4 +549,5 @@ return super.write(data) | ||
end () { | ||
if (this.blockRemain) | ||
if (this.blockRemain) { | ||
super.write(Buffer.alloc(this.blockRemain)) | ||
} | ||
return super.end() | ||
@@ -533,0 +554,0 @@ } |
{ | ||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)", | ||
"author": "GitHub Inc.", | ||
"name": "tar", | ||
"description": "tar for node", | ||
"version": "6.1.11", | ||
"version": "6.1.12", | ||
"repository": { | ||
@@ -11,14 +11,10 @@ "type": "git", | ||
"scripts": { | ||
"test:posix": "tap", | ||
"test:win32": "tap --lines=98 --branches=98 --statements=98 --functions=98", | ||
"test": "node test/fixtures/test.js", | ||
"posttest": "npm run lint", | ||
"eslint": "eslint", | ||
"lint": "npm run eslint -- test lib", | ||
"genparse": "node scripts/generate-parse-fixtures.js", | ||
"template-oss-apply": "template-oss-apply --force", | ||
"lint": "eslint \"**/*.js\"", | ||
"postlint": "template-oss-check", | ||
"lintfix": "npm run lint -- --fix", | ||
"preversion": "npm test", | ||
"postversion": "npm publish", | ||
"prepublishOnly": "git push origin --follow-tags", | ||
"genparse": "node scripts/generate-parse-fixtures.js", | ||
"bench": "for i in benchmarks/*/*.js; do echo $i; for j in {1..5}; do node $i || break; done; done" | ||
"snap": "tap", | ||
"test": "tap", | ||
"posttest": "npm run lint" | ||
}, | ||
@@ -34,28 +30,48 @@ "dependencies": { | ||
"devDependencies": { | ||
"@npmcli/eslint-config": "^4.0.0", | ||
"@npmcli/template-oss": "4.8.0", | ||
"chmodr": "^1.2.0", | ||
"end-of-stream": "^1.4.3", | ||
"eslint": "^7.17.0", | ||
"eslint-plugin-import": "^2.22.1", | ||
"eslint-plugin-node": "^11.1.0", | ||
"eslint-plugin-promise": "^4.2.1", | ||
"eslint-plugin-standard": "^5.0.0", | ||
"events-to-array": "^1.1.2", | ||
"mutate-fs": "^2.1.1", | ||
"rimraf": "^2.7.1", | ||
"tap": "^15.0.9", | ||
"tar-fs": "^1.16.3", | ||
"tar-stream": "^1.6.2" | ||
"nock": "^13.2.9", | ||
"rimraf": "^3.0.2", | ||
"tap": "^16.0.1" | ||
}, | ||
"license": "ISC", | ||
"engines": { | ||
"node": ">= 10" | ||
"node": ">=10" | ||
}, | ||
"files": [ | ||
"index.js", | ||
"lib/*.js" | ||
"bin/", | ||
"lib/", | ||
"index.js" | ||
], | ||
"tap": { | ||
"coverage-map": "map.js", | ||
"check-coverage": true | ||
"timeout": 0, | ||
"nyc-arg": [ | ||
"--exclude", | ||
"tap-snapshots/**" | ||
] | ||
}, | ||
"templateOSS": { | ||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", | ||
"version": "4.8.0", | ||
"content": "scripts/template-oss", | ||
"engines": ">=10", | ||
"distPaths": [ | ||
"index.js" | ||
], | ||
"allowPaths": [ | ||
"/index.js" | ||
], | ||
"ciVersions": [ | ||
"10.x", | ||
"12.x", | ||
"14.x", | ||
"16.x", | ||
"18.x" | ||
] | ||
} | ||
} |
# node-tar | ||
[Fast](./benchmarks) and full-featured Tar for Node.js | ||
Fast and full-featured Tar for Node.js | ||
@@ -212,2 +212,15 @@ The API is designed to mimic the behavior of `tar(1)` on unix systems. | ||
For example, to just get the list of filenames from an archive: | ||
```js | ||
const getEntryFilenames = async tarballFilename => { | ||
const filenames = [] | ||
await tar.t({ | ||
file: tarballFilename, | ||
onentry: entry => filenames.push(entry.path), | ||
}) | ||
return filenames | ||
} | ||
``` | ||
To replicate `cat my-tarball.tgz | tar t` do: | ||
@@ -227,2 +240,14 @@ | ||
```js | ||
const getEntryFilenamesSync = tarballFilename => { | ||
const filenames = [] | ||
tar.t({ | ||
file: tarballFilename, | ||
onentry: entry => filenames.push(entry.path), | ||
sync: true, | ||
}) | ||
return filenames | ||
} | ||
``` | ||
To filter entries, add `filter: <function>` to the options. | ||
@@ -434,11 +459,14 @@ Tar-creating methods call the filter with `filter(path, stat)`. | ||
Returns an event emitter that emits `entry` events with | ||
`tar.ReadEntry` objects. However, they don't emit `'data'` or `'end'` | ||
events. (If you want to get actual readable entries, use the | ||
`tar.Parse` class instead.) | ||
If the `file` option is _not_ provided, then returns an event emitter that | ||
emits `entry` events with `tar.ReadEntry` objects. However, they don't | ||
emit `'data'` or `'end'` events. (If you want to get actual readable | ||
entries, use the `tar.Parse` class instead.) | ||
If a `file` option _is_ provided, then the return value will be a promise | ||
that resolves when the file has been fully traversed in async mode, or | ||
`undefined` if `sync: true` is set. Thus, you _must_ specify an `onentry` | ||
method in order to do anything useful with the data it parses. | ||
The following options are supported: | ||
- `cwd` Extract files relative to the specified directory. Defaults | ||
to `process.cwd()`. [Alias: `C`] | ||
- `file` The archive file to list. If not specified, then a | ||
@@ -455,4 +483,4 @@ Writable stream is returned where the archive data should be | ||
- `onentry` A function that gets called with `(entry)` for each entry | ||
that passes the filter. This is important for when both `file` and | ||
`sync` are set, because it will be called synchronously. | ||
that passes the filter. This is important for when `file` is set, | ||
because there is no other way to do anything useful with this method. | ||
- `maxReadSize` The maximum buffer size for `fs.read()` operations. | ||
@@ -459,0 +487,0 @@ Defaults to 16 MB. |
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
163639
9
3771
1071
1