Socket
Socket
Sign inDemoInstall

cacache

Package Overview
Dependencies
48
Maintainers
1
Versions
100
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 2.0.0 to 3.0.0

lib/util/check-size-stream.js

56

get.js

@@ -0,24 +1,52 @@

var fs = require('graceful-fs')
var index = require('./lib/entry-index')
var pipe = require('mississippi').pipe
var read = require('./lib/content/read')
var through = require('mississippi').through
module.exports.directory = directory
module.exports.directory.byDigest = read.asDirectory
function directory (cache, key, destination, opts, cb) {
module.exports.stream = stream
module.exports.stream.byDigest = read.readStream
function stream (cache, key, opts) {
var stream = through()
index.find(cache, key, function (err, data) {
if (err) { return cb(err) }
if (!data) { return cb(index.notFoundError(cache, key)) }
read.asDirectory(cache, data.digest, destination, opts, cb)
if (err) { return stream.emit('error', err) }
if (!data) {
return stream.emit(
'error', index.notFoundError(cache, key)
)
}
pipe(
read.readStream(cache, data.digest, opts),
stream
)
})
return stream
}
module.exports.tarball = tarball
module.exports.tarball.byDigest = read.asTarball
function tarball (cache, key, destination, opts, cb) {
index.find(cache, key, function (err, data) {
if (err) { return cb(err) }
if (!data) { return cb(index.notFoundError(cache, key)) }
read.asTarball(cache, data.digest, destination, opts, cb)
})
module.exports.file = file
function file (cache, key, destination, opts, cb) {
if (!cb) {
cb = opts
opts = {}
}
pipe(
stream(cache, key, opts),
fs.createWriteStream(destination),
cb
)
}
file.byDigest = fileByDigest
function fileByDigest (cache, digest, destination, opts, cb) {
if (!cb) {
cb = opts
opts = {}
}
pipe(
stream.byDigest(cache, digest, opts),
fs.createWriteStream(destination),
cb
)
}
module.exports.info = info

@@ -25,0 +53,0 @@ function info (cache, key, cb) {

module.exports = {
chownr: require('./lib/util/fix-owner').chownr,
ls: require('./ls'),
get: require('./get'),
put: require('./put'),
rm: require('./rm')
rm: require('./rm'),
verify: require('./verify')
}

@@ -0,3 +1,4 @@

var checkSizeStream = require('../util/check-size-stream')
var checksumStream = require('../util/checksum-stream')
var contentPath = require('./path')
var crypto = require('crypto')
var dezalgo = require('dezalgo')

@@ -7,26 +8,25 @@ var fixOwner = require('../util/fix-owner')

var hasContent = require('./read').hasContent
var mv = require('mv')
var move = require('../util/move')
var once = require('once')
var path = require('path')
var pumpify = require('pumpify')
var through = require('through2')
var pipe = require('mississippi').pipe
var randomstring = require('randomstring')
var rimraf = require('rimraf')
var tar = require('tar-fs')
var zlib = require('zlib')
var through = require('mississippi').through
module.exports = putStream
function putStream (cache, inputStream, opts, _cb) {
if (!_cb) {
_cb = opts
opts = null
}
opts = opts || {}
var logger = wrapLogger(opts.logger || Function.prototype)
var startTime = +(new Date())
var tmpTarget = path.join(cache, 'tmp', (opts.tmpPrefix || '') + randomstring.generate())
var cb = dezalgo(function (err, digest) {
var cb = dezalgo(once(function (err, digest) {
rimraf(tmpTarget, function (err2) {
var timeDiff = +(new Date()) - startTime
logger('verbose', 'processed', digest, 'in', timeDiff + 'ms')
return _cb(err2 || err, digest)
})
})
}))
inputStream.on('error', cb)
hasContent(cache, opts.digest, function (err, exists) {

@@ -36,19 +36,9 @@ if (err) { return cb(err) }

if (exists) {
logger('silly', 'content already present. Skipping write.')
return cb(err, opts.digest)
}
fixOwner.mkdirfix(tmpTarget, opts.uid, opts.gid, function (err) {
pipeToTmp(inputStream, tmpTarget, opts, function (err, digest) {
if (err) { return cb(err) }
pipeToTmp(inputStream, tmpTarget, opts, function (err, digest) {
moveToDestination(tmpTarget, cache, digest, opts, function (err) {
if (err) { return cb(err) }
logger('silly', 'Temporary file written. Verifying.')
var verifier = opts.verify || function (target, digest, cb) { cb() }
verifier(tmpTarget, digest, function (err) {
if (err) { return cb(err) }
logger('silly', 'Verified. Moving to final cache destination')
moveToDestination(tmpTarget, cache, digest, logger, opts, function (err) {
if (err) { return cb(err) }
cb(null, digest)
})
})
cb(null, digest)
})

@@ -59,112 +49,48 @@ })

function wrapLogger (logObj) {
return function () {
if (logObj[arguments[0]]) {
logObj[arguments[0]].apply(logObj, [].slice.call(arguments, 1))
} else if (logObj.log) {
logObj.log.apply(logObj, arguments)
} else if (typeof logObj === 'function') {
logObj.apply(null, arguments)
}
}
}
function pipeToTmp (inputStream, tmpTarget, opts, cb) {
var hash = crypto.createHash(opts.hash || 'sha256')
var hashStream = through(function (chunk, enc, cb) {
hash.update(chunk, enc)
cb(null, chunk)
})
fixOwner.mkdirfix(path.dirname(tmpTarget), opts.uid, opts.gid, function (err) {
if (err) { return cb(err) }
var hashStream = checksumStream(opts.digest, opts.hashAlgorithm)
var sizeStream = opts.size !== undefined
? checkSizeStream(opts.size)
: through()
var digest
hashStream.on('digest', function (d) { digest = d })
var gotData = false
inputStream.on('data', function headerCheck (c) {
gotData = true
pumpify(
var gotData = false
var outStream = fs.createWriteStream(tmpTarget)
inputStream.on('data', function dataCheck () {
gotData = true
inputStream.removeListener('data', dataCheck)
})
pipe(
inputStream,
sizeStream,
hashStream,
makeOutStream(c, tmpTarget, opts).on('finish', function () {
var digest = hash.digest('hex')
if (opts.digest && (opts.digest !== digest)) {
var er = new Error('digests did not match')
er.found = digest
er.expected = opts.digest
return cb(er)
outStream,
function (err) {
if (err) { return cb(err) }
if (!gotData) {
var e = new Error('Input stream empty')
e.code = 'ENODATA'
e.stream = inputStream
return cb(e)
}
cb(null, digest)
})
}
)
// remove and re-emit
inputStream.removeListener('data', headerCheck)
inputStream.emit('data', c)
}).on('error', cb).on('finish', function () {
if (!gotData) {
var err = new Error('Input stream empty')
err.code = 'ENODATA'
cb(new Error('input stream empty'))
}
})
}
function makeOutStream (c, target, opts) {
if (opts.filename) {
return fs.createWriteStream(path.join(target, opts.filename))
} else if (c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08) {
return pumpify(zlib.Unzip(), makeTarStream(target, opts))
} else if (hasTarHeader(c)) {
return makeTarStream(target, opts)
} else {
return fs.createWriteStream(path.join(target, opts.filename || 'index.js'))
}
}
function moveToDestination (tmpTarget, cache, digest, opts, cb) {
var destination = contentPath(cache, digest)
var destDir = path.dirname(destination)
function makeTarStream (target, opts) {
return tar.extract(target, {
map: function (header) {
if (process.platform !== 'win32') {
header.uid = typeof opts.uid === 'number' ? opts.uid : header.uid
header.gid = typeof opts.gid === 'number' ? opts.gid : header.gid
}
return header
},
ignore: opts.ignore,
dmode: opts.dmode,
fmode: opts.fmode,
umask: opts.umask,
strip: opts.strip
fixOwner.mkdirfix(destDir, opts.uid, opts.gid, function (err) {
if (err) { return cb(err) }
move(tmpTarget, destination, function (err) {
if (err) { return cb(err) }
fixOwner.chownr(destination, opts.uid, opts.gid, cb)
})
})
}
function hasTarHeader (c) {
return c[257] === 0x75 && // tar archives have 7573746172 at position
c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
c[259] === 0x74 &&
c[260] === 0x61 &&
c[261] === 0x72 &&
((c[262] === 0x00 &&
c[263] === 0x30 &&
c[264] === 0x30) ||
(c[262] === 0x20 &&
c[263] === 0x20 &&
c[264] === 0x00))
}
function moveToDestination (tmpTarget, cache, digest, logger, opts, cb) {
var destination = contentPath(cache, digest)
mv(tmpTarget, destination, {
mkdirp: true, clobber: !!opts.clobber
}, function (err) {
if (err) {
if (err.code === 'EEXIST') {
logger('silly', digest, 'already has an entry in the cache. Skipping move. Use the clobber option to force copy')
} else if (err.code === 'EBUSY') {
logger('silly', digest, 'exists and is already being accessed. Skipping move.')
} else {
return cb(err)
}
}
fixOwner.chownr(destination, opts.uid, opts.gid, cb)
})
}

@@ -0,28 +1,27 @@

var checksumStream = require('../util/checksum-stream')
var contentPath = require('./path')
var copy = require('fs-extra/lib/copy')
var dezalgo = require('dezalgo')
var fs = require('graceful-fs')
var pumpify = require('pumpify')
var tar = require('tar-fs')
var pipe = require('mississippi').pipe
module.exports.asDirectory = asDirectory
function asDirectory (cache, address, destination, opts, cb) {
module.exports.readStream = readStream
function readStream (cache, address, opts) {
opts = opts || {}
var stream = checksumStream(address, opts.hashAlgorithm || 'sha256')
var cpath = contentPath(cache, address)
copy(cpath, destination, opts, cb)
hasContent(cache, address, function (err, exists) {
if (err) { return stream.emit('error', err) }
if (!exists) {
err = new Error('content not found')
err.code = 'ENOENT'
err.cache = cache
err.digest = address
return stream.emit('error', err)
} else {
pipe(fs.createReadStream(cpath), stream)
}
})
return stream
}
module.exports.asTarball = asTarball
function asTarball (cache, address, destination, opts, cb) {
var cpath = contentPath(cache, address)
pumpify(
tar.pack(cpath, {
map: opts.prefix && function (header) {
header.name = opts.prefix + '/' + header.name
return header
}
}),
fs.createWriteStream(destination)
).on('error', cb).on('finish', function () { cb() })
}
module.exports.hasContent = hasContent

@@ -35,2 +34,4 @@ function hasContent (cache, address, cb) {

return cb(null, false)
} else if (err && process.platform === 'win32' && err.code === 'EPERM') {
return cb(null, false)
} else if (err) {

@@ -37,0 +38,0 @@ return cb(err)

var contentPath = require('./path')
var rimraf = require('rimraf')
module.export = rm
module.exports = rm
function rm (cache, address, cb) {
rimraf(contentPath(cache, address), cb)
}

@@ -0,3 +1,3 @@

var asyncMap = require('slide/lib/async-map')
var contentPath = require('./content/path')
var crypto = require('crypto')
var fixOwner = require('./util/fix-owner')

@@ -7,5 +7,5 @@ var fs = require('graceful-fs')

var path = require('path')
var pumpify = require('pumpify')
var asyncMap = require('slide/lib/async-map')
var pipe = require('mississippi').pipe
var split = require('split')
var through = require('mississippi').through

@@ -30,9 +30,4 @@ module.exports.insert = insert

if (err) { return _cb(err) }
find(cache, key, function (err, existing) {
if (err) { cb(err) }
if (existing && existing.digest === digest) {
if (typeof opts.override === 'undefined' || !opts.override) {
return cb()
}
}
fs.stat(bucket, function (err, existing) {
if (err && err.code !== 'ENOENT' && err.code !== 'EPERM') { cb(err) }
var entry = {

@@ -44,3 +39,19 @@ key: key,

}
fs.appendFile(bucket, JSON.stringify(entry) + '\n', function (err) {
// Because of the way these entries work,
// the index is safe from fs.appendFile stopping
// mid-write so long as newlines are *prepended*
//
// That is, if a write fails, it will be ignored
// by `find`, and the next successful one will be
// used.
//
// This should be -very rare-, since `fs.appendFile`
// will often be atomic on most platforms unless
// very large metadata has been included, but caches
// like this one tend to last a long time. :)
// Most corrupted reads are likely to be from attempting
// to read the index while it's being written to --
// which is safe, but not guaranteed to be atomic.
var e = (existing ? '\n' : '') + JSON.stringify(entry)
fs.appendFile(bucket, e, function (err) {
if (err) { return cb(err) }

@@ -59,4 +70,3 @@ fixOwner.chownr(bucket, opts.uid, opts.gid, cb)

var ret
pumpify(stream, split('\n', null, {trailing: true}).on('data', function (l) {
if (!l) { return }
pipe(stream, split('\n', null, {trailing: true}).on('data', function (l) {
try {

@@ -70,9 +80,7 @@ var obj = JSON.parse(l)

}
})).on('finish', function () {
cb(null, ret)
}).on('error', function (e) {
if (e.code === 'ENOENT') {
cb(null)
}), function (err) {
if (err && err.code === 'ENOENT') {
cb(null, null)
} else {
cb(e)
cb(err, ret)
}

@@ -87,15 +95,16 @@ })

module.exports.ls = ls
function ls (cache, cb) {
module.exports.lsStream = lsStream
function lsStream (cache) {
var indexPath = path.join(cache, 'index')
var stream = through.obj()
fs.readdir(indexPath, function (err, files) {
if (err && err.code === 'ENOENT') {
return cb(null, {})
return stream.end()
} else if (err) {
return cb(err)
return stream.emit('error', err)
} else {
var entries = {}
asyncMap(files, function (f, next) {
asyncMap(files, function (f, cb) {
fs.readFile(path.join(indexPath, f), 'utf8', function (err, data) {
if (err) { return cb(err) }
var entries = {}
data.split('\n').forEach(function (entry) {

@@ -106,2 +115,5 @@ try {

}
// NOTE - it's possible for an entry to be
// incomplete/corrupt. So we just skip it.
// See comment on `insert()` for deets.
if (parsed) {

@@ -111,11 +123,26 @@ entries[parsed.key] = formatEntry(cache, parsed)

})
next()
Object.keys(entries).forEach(function (k) {
stream.write(entries[k])
})
cb()
})
}, function (err) {
cb(err, entries)
if (err) { stream.emit('error') }
stream.end()
})
}
})
return stream
}
module.exports.ls = ls
function ls (cache, cb) {
var entries = {}
lsStream(cache).on('finish', function () {
cb(null, entries)
}).on('data', function (d) {
entries[d.key] = d
}).on('error', cb)
}
module.exports.notFoundError = notFoundError

@@ -134,20 +161,13 @@ function notFoundError (cache, key) {

module.exports._hashKey = hashKey
function hashKey (key) {
var hash = crypto.createHash('sha1')
hash.update(key)
return hash.digest('hex')
// relatively readable key. Conflicts handled by buckets.
return key.replace(/[^a-z0-9]/ig, '').toLowerCase().slice(0, 30)
}
function formatEntry (cache, entry) {
var prefix = cache
if (prefix.indexOf(process.env.HOME) === 0) {
prefix = '~' + prefix.slice(process.env.HOME.length)
}
return {
key: entry.key,
digest: entry.digest,
path: path.join(prefix, path.relative(
cache,
contentPath(cache, entry.digest)
)),
path: contentPath(cache, entry.digest),
time: entry.time,

@@ -154,0 +174,0 @@ metadata: entry.metadata

@@ -22,3 +22,3 @@ var chownr = require('chownr')

}
cb = inflight('fixOwner: fixing ownership on ' + filepath)
cb = inflight('fixOwner: fixing ownership on ' + filepath, cb)
if (!cb) {

@@ -30,3 +30,8 @@ // We're inflight! whoosh!

// *now* we override perms
chownr(filepath, uid, gid, cb)
chownr(
filepath,
typeof uid === 'number' ? uid : process.getuid(),
typeof gid === 'number' ? gid : process.getgid(),
cb
)
}

@@ -38,4 +43,4 @@

if (err || !made) { return cb(err, made) }
fixOwner(p, uid, gid, cb)
fixOwner(made, uid, gid, cb)
})
}

@@ -1,1 +0,4 @@

module.exports = require('./lib/entry-index').ls
var index = require('./lib/entry-index')
module.exports = index.ls
module.exports.stream = index.lsStream
{
"name": "cacache",
"version": "2.0.0",
"version": "3.0.0",
"description": "General content-addressable cache system that maintains a filesystem registry of file data.",

@@ -13,5 +13,6 @@ "main": "index.js",

"postversion": "npm publish && git push --follow-tags",
"pretest": "standard",
"pretest": "standard lib test *.js",
"test": "nyc -- tap test/*.js"
},
"repository": "https://github.com/zkat/cacache",
"keywords": [

@@ -27,20 +28,17 @@ "cache",

"dezalgo": "^1.0.3",
"from2": "^2.3.0",
"fs-extra": "^1.0.0",
"graceful-fs": "^4.1.10",
"inflight": "^1.0.6",
"lockfile": "^1.0.2",
"mississippi": "^1.2.0",
"mkdirp": "^0.5.1",
"mv": "^2.1.1",
"pumpify": "^1.3.5",
"once": "^1.4.0",
"randomstring": "^1.1.5",
"rimraf": "^2.5.4",
"slide": "^1.1.6",
"split": "^1.0.0",
"tar-fs": "^1.14.0",
"through2": "^2.0.1"
"split": "^1.0.0"
},
"devDependencies": {
"nyc": "^9.0.1",
"standard": "^8.5.0",
"nyc": "^10.0.0",
"require-inject": "^1.4.0",
"standard": "^8.6.0",
"tacks": "^1.2.2",

@@ -47,0 +45,0 @@ "tap": "^8.0.1"

var dezalgo = require('dezalgo')
var from = require('from2')
var from = require('mississippi').from
var fs = require('graceful-fs')

@@ -32,3 +32,2 @@ var index = require('./lib/entry-index')

}
cb = dezalgo(cb)
opts = Object.create(opts || {})

@@ -55,3 +54,5 @@ opts.filename = filename

if (err) { cb(err) }
index.insert(cache, key, digest, opts, cb)
index.insert(cache, key, digest, opts, function (err) {
cb(err, digest)
})
})

@@ -68,6 +69,3 @@ }

opts.metadata = metadata
opts.override = true
console.log('what the fuck tho')
index.find(cache, key, function (err, info) {
console.log('ok i read the thing', err, info)
if (err) { return cb(err) }

@@ -74,0 +72,0 @@ if (!info) { return cb(index.notFoundError(cache, key)) }

@@ -1,2 +0,2 @@

# cacache [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache)
# cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache)

@@ -10,3 +10,3 @@ [`cacache`](https://github.com/zkat/cacache) is a Node.js library for managing

`$ npm install cacache`
`$ npm install --save cacache`

@@ -36,4 +36,4 @@ ## Table of Contents

* [`rm.content`](#rm-content)
* [`rm.gc`](#rm-gc)
* [`chownr`](#chownr)
* [`verify`](#verify)
* [`verify.lastRun`](#verify-last-run)

@@ -48,2 +48,3 @@ ### Example

const key = 'my-unique-key-1234'
let tarballDigest = null

@@ -53,12 +54,19 @@ // Cache it! Use `cachePath` as the root of the content cache

if (err) { return console.error('Error saving your file!', err.code) }
tarballDigest = digest // we'll use this later
console.log(`Saved ${tarball} to ${cachePath} as ${digest}.`)
})
const destination = '/tmp/extract-to-here'
const destination = '/tmp/mytar.tgz'
// Copy the contents out of the cache and into their destination!
cacache.get.directory(cachePath, key, destination, (err) => {
cacache.get.file(cachePath, key, destination, (err) => {
if (err) { return console.error('Error extracting data!', err.code) }
console.log(`data extracted to ${cachePath}.`)
})
// The same thing, but skip the key index.
cacache.get.file.byDigest(cachePath, tarballDigest, destination, (err) => {
if (err) { return console.error('Error extracting data!', err.code) }
console.log(`data extracted to ${cachePath}.`)
})
```

@@ -68,9 +76,8 @@

* Stores tarball data (expanded) or single files
* Extraction by key or by content digest (shasum, etc).
* Deduplicated content by digest -- two inputs with same key are only saved once
* Manipulate tarball data on expansion and save the updated version
* Data validation
* Streaming support
* Metadata storage
* Consistency checks, both on insert and extract.
* (Kinda) concurrency-safe and fault tolerant.
* Streaming support.
* Metadata storage.

@@ -87,3 +94,3 @@ ### Guide

entry in the object will be keyed by the unique index key, with corresponding
`get.info` objects as the values.
[`get.info`](#get-info) objects as the values.

@@ -102,3 +109,3 @@ ##### Example

digest: 'deadbeef',
path: '~/.testcache/content/deadbeef',
path: '.testcache/content/deadbeef',
time: 12345698490,

@@ -114,3 +121,3 @@ metadata: {

digest: 'bada55',
path: '~/.testcache/content/bada55',
path: '.testcache/content/bada55',
time: 11992309289

@@ -121,10 +128,10 @@ }

#### <a name="get-directory"></a> `> cacache.get.directory(cache, key, destination, [opts], cb)`
#### <a name="get-file"></a> `> cacache.get.file(cache, key, destination, [opts], cb)`
Copies cached data identified by `key` to a directory named `destination`. The
latter will be created if it does not already exist.
Copies cached data identified by `key` to a file named `destination`.
If there is no content identified by `key`, it will error.
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, an error will be returned through the callback.
A sub-function, `get.directory.byDigest` may be used for identical behavior,
A sub-function, `get.file.byDigest` may be used for identical behavior,
except lookup will happen by content digest, bypassing the index entirely.

@@ -135,39 +142,37 @@

```javascript
cacache.get.directory(cachePath, 'my-thing', './put/it/here', (err) => {
cacache.get.file(cachePath, 'my-thing', './put/it/here', (err) => {
if (err) { throw err }
console.log(`my-thing contents extracted to ./put/it/here`)
console.log(`my-thing contents copied to ./put/it/here`)
})
cacache.get.directory.byDigest(cachePath, pkg.sha, './put/it/here', (err) => {
cacache.get.file.byDigest(cachePath, pkg.sha, './put/it/here', (err) => {
if (err) { throw err }
console.log(`pkg contents extracted to ./put/it/here`)
console.log(`pkg contents copied to ./put/it/here`)
})
```
#### <a name="get-tarball"></a> `> cacache.get.tarball(cache, key, destination, [opts], cb)`
#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts], cb)`
Creates a tarball from cached data identified by `key` and writes it to a file
named by `destination`.
Returns a stream of the cached data identified by `key`.
If there is no content identified by `key`, it will error.
If there is no content identified by `key`, or if the locally-stored data does
not pass the validity checksum, an error will be emitted.
A sub-function, `get.tarball.byDigest` may be used for identical behavior,
A sub-function, `get.stream.byDigest` may be used for identical behavior,
except lookup will happen by content digest, bypassing the index entirely.
**NOTE**: The extracted tarball is not guaranteed to have an identical digest to
a tarball that was inserted into the cache. What you get out is not
necessarily what you put in.
##### Example
```javascript
cacache.get.directory(cachePath, 'my-thing', './put/it/here', (err) => {
if (err) { throw err }
console.log(`my-thing contents extracted to ./put/it/here`)
})
cache.get.stream(
cachePath, 'my-thing'
).pipe(
fs.createWriteStream('./x.tgz')
)
cacache.get.directory.byDigest(cachePath, pkg.sha, './put/it/here', (err) => {
if (err) { throw err }
console.log(`pkg contents extracted to ./put/it/here`)
})
cache.get.stream.byDigest(
cachePath, 'deadbeef'
).pipe(
fs.createWriteStream('./x.tgz')
)
```

@@ -181,2 +186,10 @@

##### Fields
* `key` - Key the entry was looked up under. Matches the `key` argument.
* `digest` - Content digest the entry refers to.
* `path` - Filesystem path relative to `cache` argument where content is stored.
* `time` - Timestamp the entry was first added on.
* `metadata` - User-assigned metadata associated with the entry/content.
##### Example

@@ -193,3 +206,3 @@

digest: 'deadbeef',
path: '~/.testcache/content/deadbeef',
path: '.testcache/content/deadbeef',
time: 12345698490,

@@ -206,6 +219,3 @@ metadata: {

Inserts a file into the cache by pathname. If `file` refers to a tarball, it
will be expanded and stored in the cache that way. The tarball may optionally
be gzipped. Any other files will be stored as single files inside the cache
directory.
Inserts a file into the cache by pathname.

@@ -236,6 +246,3 @@ ##### Example

Inserts data from a stream into the cache. If the stream contains tarball data,
it will be expanded and stored in the cache that way. The tar data may
optionally be gzipped. Any other data type will be stored as single files inside
the cache directory.
Inserts data from a stream into the cache.

@@ -275,20 +282,8 @@ ##### Example

##### `clobber`
##### `size`
Default: false
If provided, the data stream will be verified to check that enough data was
passed through. If there's more or less data than expected, an `EBADSIZE` error
will be returned.
If true, this insertion will overwrite the existing content directory in case
of a race. Note that in general, content digests are treated as absolute
identifiers for all content data, so cacache assumes it doesn't need to touch
anything that was already written.
If false, will likely prevent race conditions where cache contents might already
be in the process of being read when the new cache content is renamed, causing
serious errors for running processes.
##### `filename`
When inserting non-tarball data, the filename to use for the sole file to be
stored.
##### `digest`

@@ -299,5 +294,5 @@

To control the hashing algorithm, use `opts.hash`.
To control the hashing algorithm, use `opts.hashAlgorithm`.
##### `hash`
##### `hashAlgorithm`

@@ -309,57 +304,9 @@ Default: 'sha256'

##### `logger`
Will be called with a loglevel as its first argument on any internal log events.
##### `strip`
Default: 0
When inserting tarballs, the number of directories to strip from the beginning
of the contents' paths.
##### `dmode`/`fmode`/`umask`
Modes applied to expanded content files. Does not affect the rest of the cache.
##### `uid`/`gid`
uid and gid for any new content added to the cache.
If provided, cacache will do its best to make sure any new files added to the
cache use this particular `uid`/`gid` combination. This can be used,
for example, to drop permissions when someone uses `sudo`, but cacache makes
no assumptions about your needs here.
##### `ignore`
Function that receives the filename and header information for expanded tarball
files. If it returns true, the file will be skipped during expansion.
```javascript
ignore: (name, header) => {
return name.startsWith('.')
}
```
##### `verifier`
Receives the internal path to the expanded cache contents. Can be used to verify
and arbitrarily modify the data to be stored.
If the callback receives an error, content insertion will fail and the content
will be deleted.
```javascript
verifier: (path, digest, cb) => {
fs.lstat(path + '/.sekrit', (err) => {
if (err) {
cb()
} else {
cb(new Error('sekrit file should not be there!'))
}
})
}
```
##### `tmpPrefix`
Useful for debugging the cache -- prefix to use for randomly-named temporary
cache directories.
#### <a name="rm-all"></a> `> cacache.rm.all(cache, cb)`

@@ -407,20 +354,45 @@

#### <a name="rm-gc"></a> `> cacache.rm.gc(cache, cb)`
#### <a name="verify"></a> `> cacache.verify(cache, opts, cb)`
Navigates the entry index, cleaning up inaccessible entries (due to appends),
and removes any content entries that are no longer reachable from index entries.
Checks out and fixes up your cache:
* Cleans up corrupted or invalid index entries.
* Garbage collects any content entries not referenced by the index.
* Checks digests for all content entries and removes invalid content.
* Fixes cache ownership.
* Removes the `tmp` directory in the cache and all its contents.
When it's done, it'll return an object with various stats about the verification
process, including amount of storage reclaimed, number of valid entries, number
of entries removed, etc.
This function should not be run while other processes are running `cacache`. It
assumes it'll be used offline by a human or a coordinated process. Concurrent
verifies are protected by a lock, but there's no guarantee others won't be
reading/writing on the cache.
##### Options
* `opts.uid` - uid to assign to cache and its contents
* `opts.gid` - gid to assign to cache and its contents
* `opts.hashAlgorithm` - defaults to `'sha256'`. Hash to use for content checks.
##### Example
```sh
echo somegarbage >> $CACHEPATH/content/deadbeef
```
```javascript
cacache.rm.gc(cachePath, (err) => {
cacache.verify(cachePath, (err, stats) => {
if (err) { throw err }
console.log('less data in the cache now, and everything still works')
// deadbeef collected, because of invalid checksum.
console.log('cache is much nicer now! stats:', stats)
})
```
#### <a name="chownr"></a> `> cacache.chownr(cache, uid, gid, cb)`
#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache, cb)`
Fixes ownership for the entire cache, including contents, such that it belongs
to a specific user.
Returns a `Date` representing the last time `cacache.verify` was run on `cache`.

@@ -430,6 +402,9 @@ ##### Example

```javascript
cacache.chownr(cachePath, uid, gid, (err) => {
cacache.verify(cachePath, (err) => {
if (err) { throw err }
console.log('fewer permission issues now')
cacache.verify.lastRun(cachePath, (err, lastTime) => {
if (err) { throw err }
console.log('cacache.verify was last called on' + lastTime)
})
})
```

@@ -19,6 +19,1 @@ var rmContent = require('./lib/content/rm')

}
module.exports.gc = gc
function gc (cache) {
throw new Error('garbage collection not implemented yet')
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc