tar
Advanced tools
Comparing version 4.0.2 to 4.1.0
@@ -8,2 +8,3 @@ 'use strict' | ||
const fs = require('fs') | ||
const fsm = require('fs-minipass') | ||
const t = require('./list.js') | ||
@@ -40,15 +41,7 @@ const path = require('path') | ||
const p = new Pack.Sync(opt) | ||
let threw = true | ||
let fd | ||
try { | ||
fd = fs.openSync(opt.file, 'w', opt.mode || 0o666) | ||
p.on('data', chunk => fs.writeSync(fd, chunk, 0, chunk.length)) | ||
p.on('end', _ => fs.closeSync(fd)) | ||
addFilesSync(p, files) | ||
threw = false | ||
} finally { | ||
if (threw) | ||
try { fs.closeSync(fd) } catch (er) {} | ||
} | ||
const stream = new fsm.WriteStreamSync(opt.file, { | ||
mode: opt.mode || 0o666 | ||
}) | ||
p.pipe(stream) | ||
addFilesSync(p, files) | ||
} | ||
@@ -58,3 +51,5 @@ | ||
const p = new Pack(opt) | ||
const stream = fs.createWriteStream(opt.file, { mode: opt.mode || 0o666 }) | ||
const stream = new fsm.WriteStream(opt.file, { | ||
mode: opt.mode || 0o666 | ||
}) | ||
p.pipe(stream) | ||
@@ -61,0 +56,0 @@ |
@@ -7,2 +7,3 @@ 'use strict' | ||
const fs = require('fs') | ||
const fsm = require('fs-minipass') | ||
const path = require('path') | ||
@@ -68,24 +69,11 @@ | ||
let fd | ||
try { | ||
const stat = fs.statSync(file) | ||
const readSize = opt.maxReadSize || 16*1024*1024 | ||
if (stat.size < readSize) | ||
u.end(fs.readFileSync(file)) | ||
else { | ||
let pos = 0 | ||
const buf = Buffer.allocUnsafe(readSize) | ||
fd = fs.openSync(file, 'r') | ||
while (pos < stat.size) { | ||
let bytesRead = fs.readSync(fd, buf, 0, readSize, pos) | ||
pos += bytesRead | ||
u.write(buf.slice(0, bytesRead)) | ||
} | ||
u.end() | ||
fs.closeSync(fd) | ||
} | ||
threw = false | ||
} finally { | ||
if (threw && fd) | ||
try { fs.closeSync(fd) } catch (er) {} | ||
} | ||
const stat = fs.statSync(file) | ||
// This trades a zero-byte read() syscall for a stat | ||
// However, it will usually result in less memory allocation | ||
const readSize = opt.maxReadSize || 16*1024*1024 | ||
const stream = new fsm.ReadStreamSync(file, { | ||
readSize: readSize, | ||
size: stat.size | ||
}) | ||
stream.pipe(u) | ||
} | ||
@@ -102,14 +90,11 @@ | ||
// This trades a zero-byte read() syscall for a stat | ||
// However, it will usually result in less memory allocation | ||
fs.stat(file, (er, stat) => { | ||
if (er) | ||
reject(er) | ||
else if (stat.size < readSize) | ||
fs.readFile(file, (er, data) => { | ||
if (er) | ||
return reject(er) | ||
u.end(data) | ||
}) | ||
else { | ||
const stream = fs.createReadStream(file, { | ||
highWaterMark: readSize | ||
const stream = new fsm.ReadStream(file, { | ||
readSize: readSize, | ||
size: stat.size | ||
}) | ||
@@ -116,0 +101,0 @@ stream.on('error', reject) |
@@ -10,2 +10,3 @@ 'use strict' | ||
const fs = require('fs') | ||
const fsm = require('fs-minipass') | ||
const path = require('path') | ||
@@ -115,11 +116,6 @@ | ||
reject(er) | ||
else if (stat.size < readSize) | ||
fs.readFile(file, (er, data) => { | ||
if (er) | ||
return reject(er) | ||
parse.end(data) | ||
}) | ||
else { | ||
const stream = fs.createReadStream(file, { | ||
highWaterMark: readSize | ||
const stream = new fsm.ReadStream(file, { | ||
readSize: readSize, | ||
size: stat.size | ||
}) | ||
@@ -126,0 +122,0 @@ stream.on('error', reject) |
@@ -91,2 +91,3 @@ 'use strict' | ||
this.follow = !!opt.follow | ||
this.noMtime = !!opt.noMtime | ||
@@ -295,3 +296,4 @@ this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true | ||
linkCache: this.linkCache, | ||
statCache: this.statCache | ||
statCache: this.statCache, | ||
noMtime: this.noMtime | ||
} | ||
@@ -303,6 +305,5 @@ } | ||
try { | ||
return new this[WRITEENTRYCLASS]( | ||
job.path, this[ENTRYOPT](job)).on('end', _ => { | ||
this[JOBDONE](job) | ||
}).on('error', er => this.emit('error', er)) | ||
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) | ||
.on('end', () => this[JOBDONE](job)) | ||
.on('error', er => this.emit('error', er)) | ||
} catch (er) { | ||
@@ -384,3 +385,2 @@ this.emit('error', er) | ||
const base = p === './' ? '' : p.replace(/\/*$/, '/') | ||
@@ -387,0 +387,0 @@ this[ADDFSENTRY](base + entry) |
@@ -8,2 +8,3 @@ 'use strict' | ||
const fs = require('fs') | ||
const fsm = require('fs-minipass') | ||
const t = require('./list.js') | ||
@@ -43,2 +44,4 @@ const path = require('path') | ||
let fd | ||
let position | ||
try { | ||
@@ -56,3 +59,2 @@ try { | ||
const headBuf = Buffer.alloc(512) | ||
let position | ||
@@ -84,11 +86,5 @@ POSITION: for (position = 0; position < st.size; position += 512) { | ||
} | ||
threw = false | ||
p.on('data', c => { | ||
fs.writeSync(fd, c, 0, c.length, position) | ||
position += c.length | ||
}) | ||
p.on('end', _ => fs.closeSync(fd)) | ||
addFilesSync(p, files) | ||
threw = false | ||
streamSync(opt, p, position, fd, files) | ||
} finally { | ||
@@ -100,2 +96,11 @@ if (threw) | ||
const streamSync = (opt, p, position, fd, files) => { | ||
const stream = new fsm.WriteStreamSync(opt.file, { | ||
fd: fd, | ||
start: position | ||
}) | ||
p.pipe(stream) | ||
addFilesSync(p, files) | ||
} | ||
const replace = (opt, files, cb) => { | ||
@@ -158,8 +163,12 @@ files = Array.from(files) | ||
p.on('error', reject) | ||
let flag = 'r+' | ||
const onopen = (er, fd) => { | ||
if (er) { | ||
if (er.code === 'ENOENT') | ||
return fs.open(opt.file, 'w+', onopen) | ||
if (er && er.code === 'ENOENT' && flag === 'r+') { | ||
flag = 'w+' | ||
return fs.open(opt.file, flag, onopen) | ||
} | ||
if (er) | ||
return reject(er) | ||
} | ||
fs.fstat(fd, (er, st) => { | ||
@@ -171,5 +180,4 @@ if (er) | ||
return reject(er) | ||
const stream = fs.createWriteStream(opt.file, { | ||
const stream = new fsm.WriteStream(opt.file, { | ||
fd: fd, | ||
flags: 'r+', | ||
start: position | ||
@@ -184,3 +192,3 @@ }) | ||
} | ||
fs.open(opt.file, 'r+', onopen) | ||
fs.open(opt.file, flag, onopen) | ||
}) | ||
@@ -187,0 +195,0 @@ |
@@ -7,2 +7,3 @@ 'use strict' | ||
const fs = require('fs') | ||
const fsm = require('fs-minipass') | ||
const path = require('path') | ||
@@ -36,2 +37,8 @@ const mkdir = require('./mkdir.js') | ||
// this.gid, entry.gid, this.processUid | ||
const uint32 = (a, b, c) => | ||
a === a >>> 0 ? a | ||
: b === b >>> 0 ? b | ||
: c | ||
class Unpack extends Parser { | ||
@@ -229,11 +236,7 @@ constructor (opt) { | ||
[UID] (entry) { | ||
return typeof this.uid === 'number' ? this.uid | ||
: typeof entry.uid === 'number' ? entry.uid | ||
: this.processUid | ||
return uint32(this.uid, entry.uid, this.processUid) | ||
} | ||
[GID] (entry) { | ||
return typeof this.gid === 'number' ? this.gid | ||
: typeof entry.gid === 'number' ? entry.gid | ||
: this.processGid | ||
return uint32(this.gid, entry.gid, this.processGid) | ||
} | ||
@@ -243,23 +246,33 @@ | ||
const mode = entry.mode & 0o7777 || this.fmode | ||
const stream = fs.createWriteStream(entry.absolute, { mode: mode }) | ||
const stream = new fsm.WriteStream(entry.absolute, { | ||
mode: mode, | ||
autoClose: false | ||
}) | ||
stream.on('error', er => this[ONERROR](er, entry)) | ||
const queue = [] | ||
const processQueue = _ => { | ||
const action = queue.shift() | ||
if (action) | ||
action(processQueue) | ||
else | ||
this[UNPEND]() | ||
let actions = 1 | ||
const done = er => { | ||
if (er) { | ||
this[ONERROR](er, entry) | ||
actions = 1 | ||
} | ||
if (--actions === 0) | ||
fs.close(stream.fd, _ => this[UNPEND]()) | ||
} | ||
stream.on('close', _ => { | ||
if (entry.mtime && !this.noMtime) | ||
queue.push(cb => | ||
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb)) | ||
if (this[DOCHOWN](entry)) | ||
queue.push(cb => | ||
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb)) | ||
processQueue() | ||
stream.on('finish', _ => { | ||
if (entry.mtime && !this.noMtime) { | ||
actions++ | ||
fs.futimes(stream.fd, entry.atime || new Date(), entry.mtime, done) | ||
} | ||
if (this[DOCHOWN](entry)) { | ||
actions++ | ||
fs.fchown(stream.fd, this[UID](entry), this[GID](entry), done) | ||
} | ||
done() | ||
}) | ||
entry.pipe(stream) | ||
@@ -274,8 +287,5 @@ } | ||
const queue = [] | ||
const processQueue = _ => { | ||
const action = queue.shift() | ||
if (action) | ||
action(processQueue) | ||
else { | ||
let actions = 1 | ||
const done = _ => { | ||
if (--actions === 0) { | ||
this[UNPEND]() | ||
@@ -286,10 +296,13 @@ entry.resume() | ||
if (entry.mtime && !this.noMtime) | ||
queue.push(cb => | ||
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, cb)) | ||
if (this[DOCHOWN](entry)) | ||
queue.push(cb => | ||
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), cb)) | ||
if (entry.mtime && !this.noMtime) { | ||
actions++ | ||
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) | ||
} | ||
processQueue() | ||
if (this[DOCHOWN](entry)) { | ||
actions++ | ||
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) | ||
} | ||
done() | ||
}) | ||
@@ -420,19 +433,38 @@ } | ||
const mode = entry.mode & 0o7777 || this.fmode | ||
const oner = er => { | ||
try { fs.closeSync(fd) } catch (_) {} | ||
this[ONERROR](er, entry) | ||
} | ||
let stream | ||
let fd | ||
try { | ||
const fd = fs.openSync(entry.absolute, 'w', mode) | ||
entry.on('data', buf => fs.writeSync(fd, buf, 0, buf.length, null)) | ||
entry.on('end', _ => { | ||
if (entry.mtime && !this.noMtime) { | ||
try { | ||
fs.futimesSync(fd, entry.atime || new Date(), entry.mtime) | ||
} catch (er) {} | ||
} | ||
if (this[DOCHOWN](entry)) { | ||
try { | ||
fs.fchownSync(fd, this[UID](entry), this[GID](entry)) | ||
} catch (er) {} | ||
} | ||
try { fs.closeSync(fd) } catch (er) { this[ONERROR](er, entry) } | ||
}) | ||
} catch (er) { this[ONERROR](er, entry) } | ||
fd = fs.openSync(entry.absolute, 'w', mode) | ||
} catch (er) { | ||
return oner(er) | ||
} | ||
entry.on('data', chunk => { | ||
try { | ||
fs.writeSync(fd, chunk, 0, chunk.length) | ||
} catch (er) { | ||
oner(er) | ||
} | ||
}) | ||
entry.on('end', _ => { | ||
try { | ||
if (entry.mtime && !this.noMtime) | ||
fs.futimesSync(fd, entry.atime || new Date(), entry.mtime) | ||
if (this[DOCHOWN](entry)) | ||
fs.fchownSync(fd, this[UID](entry), this[GID](entry)) | ||
fs.closeSync(fd) | ||
} catch (er) { | ||
return oner(er) | ||
} | ||
}) | ||
} | ||
@@ -439,0 +471,0 @@ |
@@ -47,2 +47,4 @@ 'use strict' | ||
this.noPax = !!opt.noPax | ||
this.noMtime = !!opt.noMtime | ||
if (typeof opt.onwarn === 'function') | ||
@@ -105,2 +107,5 @@ this.on('warn', opt.onwarn) | ||
[HEADER] () { | ||
if (this.type === 'Directory' && this.portable) | ||
this.noMtime = true | ||
this.header = new Header({ | ||
@@ -115,4 +120,3 @@ path: this.path, | ||
size: this.stat.size, | ||
mtime: this.type === 'Directory' && this.portable | ||
? null : this.stat.mtime, | ||
mtime: this.noMtime ? null : this.stat.mtime, | ||
type: this.type, | ||
@@ -130,3 +134,3 @@ uname: this.portable ? null : | ||
gid: this.portable ? null : this.header.gid, | ||
mtime: this.header.mtime, | ||
mtime: this.noMtime ? null : this.header.mtime, | ||
path: this.path, | ||
@@ -301,4 +305,13 @@ linkpath: this.linkpath, | ||
super(opt) | ||
this.preservePaths = !!opt.preservePaths | ||
this.portable = !!opt.portable | ||
this.strict = !!opt.strict | ||
this.noPax = !!opt.noPax | ||
this.noMtime = !!opt.noMtime | ||
this.readEntry = readEntry | ||
this.type = readEntry.type | ||
if (this.type === 'Directory' && this.portable) | ||
this.noMtime = true | ||
this.path = readEntry.path | ||
@@ -308,19 +321,12 @@ this.mode = readEntry.mode | ||
this.mode = this.mode & 0o7777 | ||
this.uid = readEntry.uid | ||
this.gid = readEntry.gid | ||
this.uname = readEntry.uname | ||
this.gname = readEntry.gname | ||
this.uid = this.portable ? null : readEntry.uid | ||
this.gid = this.portable ? null : readEntry.gid | ||
this.uname = this.portable ? null : readEntry.uname | ||
this.gname = this.portable ? null : readEntry.gname | ||
this.size = readEntry.size | ||
this.mtime = readEntry.mtime | ||
this.atime = readEntry.atime | ||
this.ctime = readEntry.ctime | ||
this.mtime = this.noMtime ? null : readEntry.mtime | ||
this.atime = this.portable ? null : readEntry.atime | ||
this.ctime = this.portable ? null : readEntry.ctime | ||
this.linkpath = readEntry.linkpath | ||
this.uname = readEntry.uname | ||
this.gname = readEntry.gname | ||
this.preservePaths = !!opt.preservePaths | ||
this.portable = !!opt.portable | ||
this.strict = !!opt.strict | ||
this.noPax = !!opt.noPax | ||
if (typeof opt.onwarn === 'function') | ||
@@ -350,3 +356,3 @@ this.on('warn', opt.onwarn) | ||
size: this.size, | ||
mtime: this.mtime, | ||
mtime: this.noMtime ? null : this.mtime, | ||
type: this.type, | ||
@@ -363,3 +369,3 @@ uname: this.portable ? null : this.uname, | ||
gid: this.portable ? null : this.gid, | ||
mtime: this.mtime, | ||
mtime: this.noMtime ? null : this.mtime, | ||
path: this.path, | ||
@@ -366,0 +372,0 @@ linkpath: this.linkpath, |
@@ -5,3 +5,3 @@ { | ||
"description": "tar for node", | ||
"version": "4.0.2", | ||
"version": "4.1.0", | ||
"repository": { | ||
@@ -12,3 +12,3 @@ "type": "git", | ||
"scripts": { | ||
"test": "tap test/*.js --100 -J --coverage-report=text", | ||
"test": "tap test/*.js --100 -J --coverage-report=text -c", | ||
"preversion": "npm test", | ||
@@ -22,2 +22,3 @@ "postversion": "npm publish", | ||
"chownr": "^1.0.1", | ||
"fs-minipass": "^1.2.3", | ||
"minipass": "^2.2.1", | ||
@@ -32,5 +33,5 @@ "minizlib": "^1.0.4", | ||
"events-to-array": "^1.1.2", | ||
"mutate-fs": "^1.1.0", | ||
"mutate-fs": "^2.1.1", | ||
"rimraf": "^2.6.2", | ||
"tap": "^10.7.2", | ||
"tap": "^11.0.0-rc.3", | ||
"tar-fs": "^1.16.0", | ||
@@ -37,0 +38,0 @@ "tar-stream": "^1.5.2" |
@@ -229,2 +229,6 @@ # node-tar | ||
numeric values may be interpreted incorrectly. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
[Alias: `m`, `no-mtime`] | ||
@@ -314,2 +318,4 @@ The following options are mostly internal, but can be modified in some | ||
`uid` option. | ||
- `noMtime` Set to true to omit writing `mtime` value for extracted | ||
entries. [Alias: `m`, `no-mtime`] | ||
@@ -410,2 +416,6 @@ The following options are mostly internal, but can be modified in some | ||
numeric values may be interpreted incorrectly. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
[Alias: `m`, `no-mtime`] | ||
@@ -456,3 +466,8 @@ ### tar.r(options, fileList, callback) [alias: tar.replace] | ||
numeric values may be interpreted incorrectly. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
[Alias: `m`, `no-mtime`] | ||
## Low-Level API | ||
@@ -503,3 +518,7 @@ | ||
numeric values may be interpreted incorrectly. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
#### add(path) | ||
@@ -591,2 +610,4 @@ | ||
`uid` option. | ||
- `noMtime` Set to true to omit writing `mtime` value for extracted | ||
entries. | ||
@@ -702,3 +723,7 @@ ### class tar.Unpack.Sync | ||
numeric values may be interpreted incorrectly. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
#### constructor(path, options) | ||
@@ -731,2 +756,5 @@ | ||
any warnings encountered. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
@@ -763,2 +791,5 @@ #### warn(message, data) | ||
any warnings encountered. | ||
- `noMtime` Set to true to omit writing `mtime` values for entries. | ||
Note that this prevents using other mtime-based features like | ||
`tar.update` or the `keepNewer` option with the resulting tar archive. | ||
@@ -765,0 +796,0 @@ ### class tar.Header |
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
125487
2808
915
6
14
+ Addedfs-minipass@^1.2.3
+ Addedfs-minipass@1.2.7(transitive)