cacache
Advanced tools
Comparing version 5.0.3 to 6.0.0
164
get.js
'use strict' | ||
var index = require('./lib/entry-index') | ||
var finished = require('mississippi').finished | ||
var pipe = require('mississippi').pipe | ||
var read = require('./lib/content/read') | ||
var through = require('mississippi').through | ||
const Promise = require('bluebird') | ||
module.exports = function get (cache, key, opts, cb) { | ||
return getData(false, cache, key, opts, cb) | ||
const index = require('./lib/entry-index') | ||
const memo = require('./lib/memoization') | ||
const pipe = require('mississippi').pipe | ||
const pipeline = require('mississippi').pipeline | ||
const read = require('./lib/content/read') | ||
const through = require('mississippi').through | ||
module.exports = function get (cache, key, opts) { | ||
return getData(false, cache, key, opts) | ||
} | ||
module.exports.byDigest = function getByDigest (cache, digest, opts, cb) { | ||
return getData(true, cache, digest, opts, cb) | ||
module.exports.byDigest = function getByDigest (cache, digest, opts) { | ||
return getData(true, cache, digest, opts) | ||
} | ||
function getData (byDigest, cache, key, opts, cb) { | ||
if (!cb) { | ||
cb = opts | ||
opts = null | ||
function getData (byDigest, cache, key, opts) { | ||
opts = opts || {} | ||
opts.hashAlgorithm = opts.hashAlgorithm || 'sha512' | ||
const memoized = ( | ||
byDigest | ||
? memo.get.byDigest(cache, key, opts.hashAlgorithm) | ||
: memo.get(cache, key) | ||
) | ||
if (memoized && opts.memoize !== false) { | ||
return Promise.resolve(byDigest ? memoized : { | ||
metadata: memoized.entry.metadata, | ||
data: memoized.data, | ||
digest: memoized.entry.digest, | ||
hashAlgorithm: memoized.entry.hashAlgorithm | ||
}) | ||
} | ||
opts = opts || {} | ||
var src = (byDigest ? getStream.byDigest : getStream)(cache, key, opts) | ||
var data = '' | ||
var meta | ||
src.on('data', function (d) { data += d }) | ||
src.on('metadata', function (m) { meta = m }) | ||
finished(src, function (err) { | ||
cb(err, data, meta) | ||
return ( | ||
byDigest ? Promise.resolve(null) : index.find(cache, key, opts) | ||
).then(entry => { | ||
if (!entry && !byDigest) { | ||
throw index.notFoundError(cache, key) | ||
} | ||
return read(cache, byDigest ? key : entry.digest, { | ||
hashAlgorithm: byDigest ? opts.hashAlgorithm : entry.hashAlgorithm, | ||
size: opts.size | ||
}).then(data => byDigest ? data : { | ||
metadata: entry.metadata, | ||
data: data, | ||
digest: entry.digest, | ||
hashAlgorithm: entry.hashAlgorithm | ||
}).then(res => { | ||
if (opts.memoize && byDigest) { | ||
memo.put.byDigest(cache, key, opts.hashAlgorithm, res) | ||
} else if (opts.memoize) { | ||
memo.put(cache, entry, res.data) | ||
} | ||
return res | ||
}) | ||
}) | ||
@@ -32,8 +60,17 @@ } | ||
module.exports.stream = getStream | ||
module.exports.stream.byDigest = read.readStream | ||
function getStream (cache, key, opts) { | ||
var stream = through() | ||
index.find(cache, key, function (err, data) { | ||
if (err) { return stream.emit('error', err) } | ||
if (!data) { | ||
opts = opts || {} | ||
let stream = through() | ||
const memoized = memo.get(cache, key) | ||
if (memoized && opts.memoize !== false) { | ||
stream.on('newListener', function (ev, cb) { | ||
ev === 'metadata' && cb(memoized.entry.metadata) | ||
ev === 'digest' && cb(memoized.entry.digest) | ||
ev === 'hashAlgorithm' && cb(memoized.entry.hashAlgorithm) | ||
}) | ||
stream.write(memoized.data, () => stream.end()) | ||
return stream | ||
} | ||
index.find(cache, key).then(entry => { | ||
if (!entry) { | ||
return stream.emit( | ||
@@ -43,17 +80,80 @@ 'error', index.notFoundError(cache, key) | ||
} | ||
stream.emit('metadata', data) | ||
let memoStream | ||
if (opts.memoize) { | ||
let memoData = [] | ||
let memoLength = 0 | ||
memoStream = through((c, en, cb) => { | ||
memoData && memoData.push(c) | ||
memoLength += c.length | ||
cb(null, c, en) | ||
}, cb => { | ||
memoData && memo.put(cache, entry, Buffer.concat(memoData, memoLength)) | ||
cb() | ||
}) | ||
} else { | ||
memoStream = through() | ||
} | ||
// TODO - don't overwrite someone else's `opts`. | ||
opts.hashAlgorithm = entry.hashAlgorithm | ||
stream.emit('metadata', entry.metadata) | ||
stream.emit('hashAlgorithm', entry.hashAlgorithm) | ||
stream.emit('digest', entry.digest) | ||
stream.on('newListener', function (ev, cb) { | ||
ev === 'metadata' && cb(data) | ||
ev === 'metadata' && cb(entry.metadata) | ||
ev === 'digest' && cb(entry.digest) | ||
ev === 'hashAlgorithm' && cb(entry.hashAlgorithm) | ||
}) | ||
pipe( | ||
read.readStream(cache, data.digest, opts), | ||
read.readStream(cache, entry.digest, opts), | ||
memoStream, | ||
stream | ||
) | ||
}) | ||
}, err => stream.emit('error', err)) | ||
return stream | ||
} | ||
module.exports.stream.byDigest = getStreamDigest | ||
function getStreamDigest (cache, digest, opts) { | ||
opts = opts || {} | ||
opts.hashAlgorithm = opts.hashAlgorithm || 'sha512' | ||
const memoized = memo.get.byDigest(cache, digest, opts.hashAlgorithm) | ||
if (memoized && opts.memoize !== false) { | ||
const stream = through() | ||
stream.write(memoized, () => stream.end()) | ||
return stream | ||
} else { | ||
let stream = read.readStream(cache, digest, opts) | ||
if (opts.memoize) { | ||
let memoData = [] | ||
let memoLength = 0 | ||
const memoStream = through((c, en, cb) => { | ||
memoData && memoData.push(c) | ||
memoLength += c.length | ||
cb(null, c, en) | ||
}, cb => { | ||
memoData && memo.put.byDigest( | ||
cache, | ||
digest, | ||
opts.hashAlgorithm, | ||
Buffer.concat(memoData, memoLength) | ||
) | ||
cb() | ||
}) | ||
stream = pipeline(stream, memoStream) | ||
} | ||
return stream | ||
} | ||
} | ||
module.exports.info = info | ||
function info (cache, key, cb) { | ||
index.find(cache, key, cb) | ||
function info (cache, key, opts) { | ||
opts = opts || {} | ||
const memoized = memo.get(cache, key) | ||
if (memoized && opts.memoize !== false) { | ||
return Promise.resolve(memoized.entry) | ||
} else { | ||
return index.find(cache, key) | ||
} | ||
} | ||
module.exports.hasContent = read.hasContent |
@@ -8,3 +8,4 @@ 'use strict' | ||
rm: require('./rm'), | ||
verify: require('./verify') | ||
verify: require('./verify'), | ||
clearMemoized: require('./lib/memoization').clearMemoized | ||
} |
'use strict' | ||
var contentVer = require('../../package.json')['cache-version'].content | ||
var path = require('path') | ||
// Current format of content file path: | ||
// | ||
// ~/.my-cache/content-v1/sha512/ba/bada55deadbeefc0ffee | ||
// | ||
module.exports = contentPath | ||
function contentPath (cache, address) { | ||
return path.join(cache, 'content', address) | ||
function contentPath (cache, address, hashAlgorithm) { | ||
address = address && address.toLowerCase() | ||
hashAlgorithm = hashAlgorithm ? hashAlgorithm.toLowerCase() : 'sha512' | ||
return path.join( | ||
cache, | ||
`content-v${contentVer}`, | ||
hashAlgorithm, | ||
address.slice(0, 2), | ||
address.slice(2) | ||
) | ||
} |
'use strict' | ||
var checksumStream = require('checksum-stream') | ||
var contentPath = require('./path') | ||
var dezalgo = require('dezalgo') | ||
var fs = require('graceful-fs') | ||
var pipe = require('mississippi').pipe | ||
const Promise = require('bluebird') | ||
module.exports.readStream = readStream | ||
function readStream (cache, address, opts) { | ||
const checksumStream = require('checksum-stream') | ||
const contentPath = require('./path') | ||
const crypto = require('crypto') | ||
const fs = require('graceful-fs') | ||
const pipeline = require('mississippi').pipeline | ||
Promise.promisifyAll(fs) | ||
module.exports = read | ||
function read (cache, address, opts) { | ||
opts = opts || {} | ||
var stream = checksumStream({ | ||
digest: address, | ||
algorithm: opts.hashAlgorithm || 'sha1' | ||
}) | ||
var cpath = contentPath(cache, address) | ||
hasContent(cache, address, function (err, exists) { | ||
if (err) { return stream.emit('error', err) } | ||
if (!exists) { | ||
err = new Error('content not found') | ||
err.code = 'ENOENT' | ||
err.cache = cache | ||
err.digest = address | ||
return stream.emit('error', err) | ||
const algo = opts.hashAlgorithm || 'sha512' | ||
const cpath = contentPath(cache, address, algo) | ||
return fs.readFileAsync(cpath, null).then(data => { | ||
const digest = crypto.createHash(algo).update(data).digest('hex') | ||
if (typeof opts.size === 'number' && opts.size !== data.length) { | ||
throw sizeError(opts.size, data.length) | ||
} else if (digest !== address) { | ||
throw checksumError(address, digest) | ||
} else { | ||
pipe(fs.createReadStream(cpath), stream) | ||
return data | ||
} | ||
}) | ||
return stream | ||
} | ||
module.exports.stream = readStream | ||
module.exports.readStream = readStream | ||
function readStream (cache, address, opts) { | ||
opts = opts || {} | ||
const cpath = contentPath(cache, address, opts.hashAlgorithm || 'sha512') | ||
return pipeline( | ||
fs.createReadStream(cpath), checksumStream({ | ||
digest: address, | ||
algorithm: opts.hashAlgorithm || 'sha512', | ||
size: opts.size | ||
}) | ||
) | ||
} | ||
module.exports.hasContent = hasContent | ||
function hasContent (cache, address, cb) { | ||
cb = dezalgo(cb) | ||
if (!address) { return cb(null, false) } | ||
fs.lstat(contentPath(cache, address), function (err) { | ||
function hasContent (cache, address, algorithm) { | ||
if (!address) { return Promise.resolve(false) } | ||
return fs.lstatAsync( | ||
contentPath(cache, address, algorithm || 'sha512') | ||
).then(() => true).catch(err => { | ||
if (err && err.code === 'ENOENT') { | ||
return cb(null, false) | ||
return Promise.resolve(false) | ||
} else if (err && process.platform === 'win32' && err.code === 'EPERM') { | ||
return cb(null, false) | ||
} else if (err) { | ||
return cb(err) | ||
return Promise.resolve(false) | ||
} else { | ||
return cb(null, true) | ||
throw err | ||
} | ||
}) | ||
} | ||
function sizeError (expected, found) { | ||
var err = new Error('stream data size mismatch') | ||
err.expected = expected | ||
err.found = found | ||
err.code = 'EBADSIZE' | ||
return err | ||
} | ||
function checksumError (expected, found) { | ||
var err = new Error('checksum failed') | ||
err.code = 'EBADCHECKSUM' | ||
err.expected = expected | ||
err.found = found | ||
return err | ||
} |
'use strict' | ||
var Promise = require('bluebird') | ||
var contentPath = require('./path') | ||
var rimraf = require('rimraf') | ||
var rimraf = Promise.promisify(require('rimraf')) | ||
module.exports = rm | ||
function rm (cache, address, cb) { | ||
rimraf(contentPath(cache, address), cb) | ||
function rm (cache, address, algorithm) { | ||
address = address.toLowerCase() | ||
algorithm = algorithm && algorithm.toLowerCase() | ||
return rimraf(contentPath(cache, address, algorithm || 'sha512')) | ||
} |
'use strict' | ||
var asyncMap = require('slide/lib/async-map') | ||
var contentPath = require('./content/path') | ||
var fixOwner = require('./util/fix-owner') | ||
var fs = require('graceful-fs') | ||
var lockfile = require('lockfile') | ||
var path = require('path') | ||
var pipe = require('mississippi').pipe | ||
var split = require('split') | ||
var through = require('mississippi').through | ||
const asyncMap = require('slide/lib/async-map') | ||
const contentPath = require('./content/path') | ||
const crypto = require('crypto') | ||
const fixOwner = require('./util/fix-owner') | ||
const fs = require('graceful-fs') | ||
const path = require('path') | ||
const Promise = require('bluebird') | ||
const through = require('mississippi').through | ||
const indexV = require('../package.json')['cache-version'].index | ||
const appendFileAsync = Promise.promisify(fs.appendFile) | ||
const readFileAsync = Promise.promisify(fs.readFile) | ||
module.exports.insert = insert | ||
function insert (cache, key, digest, opts, _cb) { | ||
if (!_cb) { | ||
_cb = opts | ||
opts = null | ||
} | ||
function insert (cache, key, digest, opts) { | ||
opts = opts || {} | ||
var bucket = indexPath(cache, key) | ||
var lock = bucket + '.lock' | ||
var cb = function (err, entry) { | ||
lockfile.unlock(lock, function (er) { | ||
_cb(er || err, entry) | ||
}) | ||
} | ||
fixOwner.mkdirfix(path.dirname(bucket), opts.uid, opts.gid, function (err) { | ||
if (err) { return _cb(err) } | ||
lockfile.lock(lock, { | ||
stale: 60000, | ||
retries: 10, | ||
wait: 10000 | ||
}, function (err) { | ||
if (err) { return _cb(err) } | ||
fs.stat(bucket, function (err, existing) { | ||
if (err && err.code !== 'ENOENT' && err.code !== 'EPERM') { cb(err) } | ||
var entry = { | ||
key: key, | ||
digest: digest, | ||
time: +(new Date()), | ||
metadata: opts.metadata | ||
} | ||
// Because of the way these entries work, | ||
// the index is safe from fs.appendFile stopping | ||
// mid-write so long as newlines are *prepended* | ||
// | ||
// That is, if a write fails, it will be ignored | ||
// by `find`, and the next successful one will be | ||
// used. | ||
// | ||
// This should be -very rare-, since `fs.appendFile` | ||
// will often be atomic on most platforms unless | ||
// very large metadata has been included, but caches | ||
// like this one tend to last a long time. :) | ||
// Most corrupted reads are likely to be from attempting | ||
// to read the index while it's being written to -- | ||
// which is safe, but not guaranteed to be atomic. | ||
var e = (existing ? '\n' : '') + JSON.stringify(entry) | ||
fs.appendFile(bucket, e, function (err) { | ||
if (err) { return cb(err) } | ||
fixOwner.chownr(bucket, opts.uid, opts.gid, function (err) { | ||
cb(err, entry) | ||
}) | ||
}) | ||
}) | ||
}) | ||
}) | ||
const bucket = bucketPath(cache, key) | ||
return fixOwner.mkdirfix( | ||
path.dirname(bucket), opts.uid, opts.gid | ||
).then(() => { | ||
const entry = { | ||
key: key, | ||
digest: digest, | ||
hashAlgorithm: opts.hashAlgorithm, | ||
time: +(new Date()), | ||
metadata: opts.metadata | ||
} | ||
const stringified = JSON.stringify(entry) | ||
// NOTE - Cleverness ahoy! | ||
// | ||
// This works because it's tremendously unlikely for an entry to corrupt | ||
// another while still preserving the string length of the JSON in | ||
// question. So, we just slap the length in there and verify it on read. | ||
// | ||
// Thanks to @isaacs for the whiteboarding session that ended up with this. | ||
return appendFileAsync( | ||
bucket, `\n${stringified.length}\t${stringified}` | ||
).then(() => entry) | ||
}).then(entry => ( | ||
fixOwner.chownr(bucket, opts.uid, opts.gid).then(() => ( | ||
formatEntry(cache, entry) | ||
)) | ||
)) | ||
} | ||
module.exports.find = find | ||
function find (cache, key, cb) { | ||
var bucket = indexPath(cache, key) | ||
var stream = fs.createReadStream(bucket) | ||
var ret | ||
pipe(stream, split('\n', null, {trailing: true}).on('data', function (l) { | ||
try { | ||
var obj = JSON.parse(l) | ||
} catch (e) { | ||
return | ||
} | ||
if (obj && (obj.key === key)) { | ||
ret = formatEntry(cache, obj) | ||
} | ||
}), function (err) { | ||
if (err && err.code === 'ENOENT') { | ||
cb(null, null) | ||
function find (cache, key) { | ||
const bucket = bucketPath(cache, key) | ||
return bucketEntries(cache, bucket).then(entries => { | ||
return entries.reduce((latest, next) => { | ||
if (next && next.key === key) { | ||
return formatEntry(cache, next) | ||
} else { | ||
return latest | ||
} | ||
}, null) | ||
}).catch(err => { | ||
if (err.code === 'ENOENT') { | ||
return null | ||
} else { | ||
cb(err, ret) | ||
throw err | ||
} | ||
@@ -94,4 +70,4 @@ }) | ||
module.exports.delete = del | ||
function del (cache, key, cb) { | ||
insert(cache, key, null, cb) | ||
function del (cache, key) { | ||
return insert(cache, key, null) | ||
} | ||
@@ -101,5 +77,5 @@ | ||
function lsStream (cache) { | ||
var indexPath = path.join(cache, 'index') | ||
var stream = through.obj() | ||
fs.readdir(indexPath, function (err, files) { | ||
const indexDir = bucketDir(cache) | ||
const stream = through.obj() | ||
fs.readdir(indexDir, function (err, buckets) { | ||
if (err && err.code === 'ENOENT') { | ||
@@ -110,22 +86,29 @@ return stream.end() | ||
} else { | ||
asyncMap(files, function (f, cb) { | ||
fs.readFile(path.join(indexPath, f), 'utf8', function (err, data) { | ||
if (err) { return cb(err) } | ||
var entries = {} | ||
data.split('\n').forEach(function (entry) { | ||
try { | ||
var parsed = JSON.parse(entry) | ||
} catch (e) { | ||
} | ||
// NOTE - it's possible for an entry to be | ||
// incomplete/corrupt. So we just skip it. | ||
// See comment on `insert()` for deets. | ||
if (parsed) { | ||
entries[parsed.key] = formatEntry(cache, parsed) | ||
} | ||
}) | ||
Object.keys(entries).forEach(function (k) { | ||
stream.write(entries[k]) | ||
}) | ||
cb() | ||
asyncMap(buckets, (bucket, cb) => { | ||
fs.readdir(path.join(indexDir, bucket), (err, files) => { | ||
if (err && err.code === 'ENOENT') { | ||
return cb() | ||
} else if (err) { | ||
return cb(err) | ||
} else { | ||
asyncMap(files, function (f, cb) { | ||
const bpath = path.join(indexDir, bucket, f) | ||
bucketEntries(cache, bpath).then(_entries => { | ||
const entries = _entries.reduce((acc, entry) => { | ||
acc[entry.key] = entry | ||
return acc | ||
}, {}) | ||
Object.keys(entries).forEach(function (k) { | ||
stream.write(formatEntry(cache, entries[k])) | ||
}) | ||
cb() | ||
}, err => { | ||
if (err.code === 'ENOENT') { | ||
cb() | ||
} else { | ||
cb(err) | ||
} | ||
}) | ||
}, cb) | ||
} | ||
}) | ||
@@ -142,9 +125,11 @@ }, function (err) { | ||
module.exports.ls = ls | ||
function ls (cache, cb) { | ||
var entries = {} | ||
lsStream(cache).on('finish', function () { | ||
cb(null, entries) | ||
}).on('data', function (d) { | ||
entries[d.key] = d | ||
}).on('error', cb) | ||
function ls (cache) { | ||
const entries = {} | ||
return Promise.fromNode(cb => { | ||
lsStream(cache).on('finish', function () { | ||
cb(null, entries) | ||
}).on('data', function (d) { | ||
entries[d.key] = d | ||
}).on('error', cb) | ||
}) | ||
} | ||
@@ -154,3 +139,3 @@ | ||
function notFoundError (cache, key) { | ||
var err = new Error('content not found') | ||
const err = new Error('content not found') | ||
err.code = 'ENOENT' | ||
@@ -162,10 +147,44 @@ err.cache = cache | ||
function indexPath (cache, key) { | ||
return path.join(cache, 'index', hashKey(key)) | ||
function bucketEntries (cache, bucket, filter) { | ||
return readFileAsync( | ||
bucket, 'utf8' | ||
).then(data => { | ||
let entries = [] | ||
data.split('\n').forEach(entry => { | ||
const pieces = entry.split('\t') | ||
if (!pieces[1] || pieces[1].length !== parseInt(pieces[0], 10)) { | ||
// Length is no good! Corruption ahoy! | ||
return | ||
} | ||
let obj | ||
try { | ||
obj = JSON.parse(pieces[1]) | ||
} catch (e) { | ||
// Entry is corrupted! | ||
return | ||
} | ||
if (obj) { | ||
entries.push(obj) | ||
} | ||
}) | ||
return entries | ||
}) | ||
} | ||
function bucketDir (cache) { | ||
return path.join(cache, `index-v${indexV}`) | ||
} | ||
module.exports._bucketPath = bucketPath | ||
function bucketPath (cache, key) { | ||
const hashed = hashKey(key) | ||
return path.join(bucketDir(cache), hashed.slice(0, 2), hashed.slice(2)) | ||
} | ||
module.exports._hashKey = hashKey | ||
function hashKey (key) { | ||
// relatively readable key. Conflicts handled by buckets. | ||
return key.replace(/[^a-z0-9_-]+/ig, '_').toLowerCase().slice(0, 120) | ||
return crypto | ||
.createHash('sha256') | ||
.update(key) | ||
.digest('hex') | ||
} | ||
@@ -177,3 +196,4 @@ | ||
digest: entry.digest, | ||
path: contentPath(cache, entry.digest), | ||
hashAlgorithm: entry.hashAlgorithm, | ||
path: contentPath(cache, entry.digest, entry.hashAlgorithm), | ||
time: entry.time, | ||
@@ -180,0 +200,0 @@ metadata: entry.metadata |
'use strict' | ||
var chownr = require('chownr') | ||
var dezalgo = require('dezalgo') | ||
var inflight = require('inflight') | ||
var mkdirp = require('mkdirp') | ||
const Promise = require('bluebird') | ||
const chownr = Promise.promisify(require('chownr')) | ||
const mkdirp = Promise.promisify(require('mkdirp')) | ||
const inflight = require('promise-inflight') | ||
module.exports.chownr = fixOwner | ||
function fixOwner (filepath, uid, gid, cb) { | ||
cb = dezalgo(cb) | ||
function fixOwner (filepath, uid, gid) { | ||
if (!process.getuid) { | ||
// This platform doesn't need ownership fixing | ||
return cb() | ||
return Promise.resolve() | ||
} | ||
if (typeof uid !== 'number' && typeof gid !== 'number') { | ||
// There's no permissions override. Nothing to do here. | ||
return cb() | ||
return Promise.resolve() | ||
} | ||
@@ -22,16 +22,11 @@ if ((typeof uid === 'number' && process.getuid() === uid) && | ||
// No need to override if it's already what we used. | ||
return cb() | ||
return Promise.resolve() | ||
} | ||
cb = inflight('fixOwner: fixing ownership on ' + filepath, cb) | ||
if (!cb) { | ||
// We're inflight! whoosh! | ||
return | ||
} | ||
// *now* we override perms | ||
chownr( | ||
filepath, | ||
typeof uid === 'number' ? uid : process.getuid(), | ||
typeof gid === 'number' ? gid : process.getgid(), | ||
cb | ||
return inflight( | ||
'fixOwner: fixing ownership on ' + filepath, | ||
() => chownr( | ||
filepath, | ||
typeof uid === 'number' ? uid : process.getuid(), | ||
typeof gid === 'number' ? gid : process.getgid() | ||
) | ||
) | ||
@@ -42,6 +37,7 @@ } | ||
function mkdirfix (p, uid, gid, cb) { | ||
mkdirp(p, function (err, made) { | ||
if (err || !made) { return cb(err, made) } | ||
fixOwner(made, uid, gid, cb) | ||
return mkdirp(p).then(made => { | ||
if (made) { | ||
return fixOwner(made, uid, gid).then(() => made) | ||
} | ||
}) | ||
} |
'use strict' | ||
var fs = require('graceful-fs') | ||
const fs = require('graceful-fs') | ||
const Promise = require('bluebird') | ||
let move | ||
let pinflight | ||
module.exports = moveFile | ||
function moveFile (src, dest, cb) { | ||
function moveFile (src, dest) { | ||
// This isn't quite an fs.rename -- the assumption is that | ||
@@ -16,14 +19,33 @@ // if `dest` already exists, and we get certain errors while | ||
// Note that, as the name suggests, this strictly only supports file moves. | ||
fs.link(src, dest, function (err) { | ||
if (err) { | ||
if (err.code === 'EEXIST' || err.code === 'EBUSY') { | ||
// file already exists, so whatever | ||
} else if (err.code === 'EPERM' && process.platform === 'win32') { | ||
// file handle stayed open even past graceful-fs limits | ||
} else { | ||
return cb(err) | ||
return Promise.fromNode(cb => { | ||
fs.link(src, dest, err => { | ||
if (err) { | ||
if (err.code === 'EEXIST' || err.code === 'EBUSY') { | ||
// file already exists, so whatever | ||
} else if (err.code === 'EPERM' && process.platform === 'win32') { | ||
// file handle stayed open even past graceful-fs limits | ||
} else { | ||
return cb(err) | ||
} | ||
} | ||
return fs.unlink(src, cb) | ||
}) | ||
}).catch(err => { | ||
if (process.platform !== 'win32') { | ||
throw err | ||
} else { | ||
if (!pinflight) { pinflight = require('promise-inflight') } | ||
return pinflight('cacache-move-file:' + dest, () => { | ||
return Promise.promisify(fs.stat)(dest).catch(err => { | ||
if (err !== 'ENOENT') { | ||
// Something else is wrong here. Bail bail bail | ||
throw err | ||
} | ||
// file doesn't already exist! let's try a rename -> copy fallback | ||
if (!move) { move = require('@npmcorp/move') } | ||
return move(src, dest, { Promise, fs }) | ||
}) | ||
}) | ||
} | ||
fs.unlink(src, cb) | ||
}) | ||
} |
'use strict' | ||
var asyncMap = require('slide').asyncMap | ||
const Promise = require('bluebird') | ||
var checksumStream = require('checksum-stream') | ||
@@ -8,28 +9,24 @@ var fixOwner = require('./util/fix-owner') | ||
var index = require('./entry-index') | ||
var lockfile = require('lockfile') | ||
var lockfile = Promise.promisifyAll(require('lockfile')) | ||
var path = require('path') | ||
var pipe = require('mississippi').pipe | ||
var rimraf = require('rimraf') | ||
var pipe = Promise.promisify(require('mississippi').pipe) | ||
var rimraf = Promise.promisify(require('rimraf')) | ||
Promise.promisifyAll(fs) | ||
module.exports = verify | ||
function verify (cache, opts, _cb) { | ||
if (!_cb) { | ||
_cb = opts | ||
opts = null | ||
} | ||
function verify (cache, opts) { | ||
opts = opts || {} | ||
var lock = path.join(cache, 'verify.lock') | ||
var cb = function (err, stats) { | ||
lockfile.unlock(lock, function (er) { | ||
_cb(er || err, stats) | ||
opts.log && opts.log.verbose('verify', 'verifying content cache at', cache) | ||
const startTime = +(new Date()) | ||
return fixOwner.mkdirfix( | ||
cache, opts.uid, opts.gid | ||
).then(() => { | ||
const lockPath = path.join(cache, 'verify.lock') | ||
const lock = lockfile.lockAsync(lockPath).disposer(() => { | ||
return lockfile.unlock(lockPath) | ||
}) | ||
} | ||
fixOwner.mkdirfix(cache, opts.uid, opts.gid, function (err) { | ||
if (err) { return _cb(err) } | ||
lockfile.lock(lock, function (err) { | ||
if (err) { return _cb(err) } | ||
garbageCollect(cache, opts, function (err, gcStats) { | ||
if (err) { return cb(err) } | ||
tidyIndex(cache, opts, function (err, tidyStats) { | ||
if (err) { return cb(err) } | ||
return Promise.using(lock, () => { | ||
return garbageCollect(cache, opts).then(gcStats => { | ||
return tidyIndex(cache, opts).then(tidyStats => { | ||
var stats = tidyStats | ||
@@ -39,24 +36,29 @@ Object.keys(gcStats).forEach(function (key) { | ||
}) | ||
var verifile = path.join(cache, '_lastverified') | ||
fs.writeFile(verifile, '' + (+(new Date())), function (err) { | ||
if (err) { return cb(err) } | ||
fixOwner.chownr(cache, opts.uid, opts.gid, function (err) { | ||
if (err) { return cb(err) } | ||
rimraf(path.join(cache, 'tmp'), function (err) { | ||
if (err) { return cb(err) } | ||
cb(null, stats) | ||
}) | ||
}) | ||
}) | ||
return stats | ||
}) | ||
}).then(stats => { | ||
var verifile = path.join(cache, '_lastverified') | ||
opts.log && opts.log.verbose('verify', 'writing verifile to ' + verifile) | ||
return fs.writeFileAsync( | ||
verifile, '' + (+(new Date())) | ||
).then(() => { | ||
opts.log && opts.log.verbose('verify', 'fixing cache ownership') | ||
return fixOwner.chownr(cache, opts.uid, opts.gid) | ||
}).then(() => { | ||
opts.log && opts.log.verbose('verify', 'clearing out tmp') | ||
return rimraf(path.join(cache, 'tmp')) | ||
}).then(() => stats) | ||
}) | ||
}) | ||
}).then(stats => { | ||
stats.runTime = (+(new Date()) - startTime) / 1000 | ||
opts.log && opts.log.verbose('verify', 'final stats:', stats) | ||
return stats | ||
}) | ||
} | ||
function tidyIndex (cache, opts, cb) { | ||
index.ls(cache, function (err, entries) { | ||
if (err) { return cb(err) } | ||
rimraf(path.join(cache, 'index'), function (err) { | ||
if (err) { return cb(err) } | ||
function tidyIndex (cache, opts) { | ||
opts.log && opts.log.verbose('verify', 'tidying index') | ||
return index.ls(cache).then(entries => { | ||
return rimraf(path.join(cache, 'index')).then(() => { | ||
var stats = { | ||
@@ -67,26 +69,23 @@ entriesRemoved: 0, | ||
} | ||
asyncMap(Object.keys(entries), function (key, cb) { | ||
return Promise.reduce(Object.keys(entries), (stats, key) => { | ||
var entry = entries[key] | ||
if (!entry.digest) { | ||
stats.digestMissing++ | ||
return cb() | ||
return stats | ||
} | ||
var content = path.join(cache, 'content', entries[key].digest) | ||
fs.stat(content, function (err) { | ||
if (err && err.code === 'ENOENT') { | ||
return fs.statAsync(content).catch(err => { | ||
if (err.code === 'ENOENT') { | ||
stats.entriesRemoved++ | ||
return cb() | ||
} else { | ||
stats.totalEntries++ | ||
index.insert(cache, key, entry.digest, { | ||
uid: opts.uid, | ||
gid: opts.gid, | ||
metadata: entry.metadata | ||
}, cb) | ||
return stats | ||
} | ||
}).then(() => { | ||
stats.totalEntries++ | ||
return index.insert(cache, key, entry.digest, { | ||
uid: opts.uid, | ||
gid: opts.gid, | ||
metadata: entry.metadata | ||
}).then(() => stats) | ||
}) | ||
}, function (err) { | ||
if (err) { return cb(err) } | ||
cb(null, stats) | ||
}) | ||
}, stats) | ||
}) | ||
@@ -96,4 +95,5 @@ }) | ||
function garbageCollect (cache, opts, cb) { | ||
index.ls(cache, function (err, entries) { | ||
function garbageCollect (cache, opts) { | ||
opts.log && opts.log.verbose('verify', 'garbage collecting content') | ||
return index.ls(cache).then(entries => { | ||
var byDigest = {} | ||
@@ -103,42 +103,38 @@ Object.keys(entries).forEach(function (k) { | ||
}) | ||
if (err) { return cb(err) } | ||
var stats = { | ||
verifiedContent: 0, | ||
collectedCount: 0, | ||
reclaimedSize: 0 | ||
} | ||
var contentDir = path.join(cache, 'content') | ||
fs.readdir(contentDir, function (err, files) { | ||
if (err && err.code === 'ENOENT') { | ||
return cb(null, stats) | ||
} else if (err) { | ||
return cb(err) | ||
return fs.readdirAsync(contentDir).catch(err => { | ||
if (err.code === 'ENOENT') { | ||
return | ||
} else { | ||
asyncMap(files, function (f, cb) { | ||
var fullPath = path.join(contentDir, f) | ||
if (byDigest[f]) { | ||
var algo = opts.hashAlgorithm || 'sha1' | ||
verifyContent(fullPath, algo, function (err, collected) { | ||
if (err) { return cb(err) } | ||
if (collected != null) { | ||
stats.collectedCount++ | ||
stats.reclaimedSize += collected | ||
} else { | ||
stats.verifiedContent++ | ||
} | ||
cb() | ||
}) | ||
} else { | ||
stats.collectedCount++ | ||
fs.stat(fullPath, function (err, s) { | ||
if (err) { return cb(err) } | ||
stats.reclaimedSize += s.size | ||
rimraf(path.join(contentDir, f), cb) | ||
}) | ||
} | ||
}, function (err) { | ||
if (err) { return cb(err) } | ||
cb(null, stats) | ||
}) | ||
throw err | ||
} | ||
}).then(files => { | ||
var stats = { | ||
verifiedContent: 0, | ||
collectedCount: 0, | ||
reclaimedSize: 0, | ||
keptSize: 0 | ||
} | ||
return Promise.reduce(files, (stats, f) => { | ||
var fullPath = path.join(contentDir, f) | ||
if (byDigest[f]) { | ||
var algo = opts.hashAlgorithm || 'sha512' | ||
return verifyContent(fullPath, algo).then(info => { | ||
if (!info.valid) { | ||
stats.collectedCount++ | ||
stats.reclaimedSize += info.size | ||
} else { | ||
stats.verifiedContent++ | ||
stats.keptSize += info.size | ||
} | ||
return stats | ||
}) | ||
} else { | ||
stats.collectedCount++ | ||
return fs.statAsync(fullPath).then(s => { | ||
stats.reclaimedSize += s.size | ||
return rimraf(path.join(contentDir, f)).then(() => stats) | ||
}) | ||
} | ||
}, stats) | ||
}) | ||
@@ -148,5 +144,4 @@ }) | ||
function verifyContent (filepath, algo, cb) { | ||
fs.stat(filepath, function (err, stat) { | ||
if (err) { return cb(err) } | ||
function verifyContent (filepath, algo) { | ||
return fs.statAsync(filepath).then(stat => { | ||
var reader = fs.createReadStream(filepath) | ||
@@ -157,15 +152,16 @@ var checksummer = checksumStream({ | ||
}) | ||
checksummer.on('data', function () {}) | ||
pipe(reader, checksummer, function (err) { | ||
var contentInfo = { | ||
size: stat.size, | ||
valid: true | ||
} | ||
checksummer.on('data', () => {}) | ||
return pipe(reader, checksummer).catch(err => { | ||
if (err && err.code === 'EBADCHECKSUM') { | ||
rimraf(filepath, function (err) { | ||
if (err) { return cb(err) } | ||
cb(null, stat.size) | ||
return rimraf(filepath).then(() => { | ||
contentInfo.valid = false | ||
}) | ||
} else if (err) { | ||
return cb(err) | ||
} else { | ||
cb(null, null) | ||
throw err | ||
} | ||
}) | ||
}).then(() => contentInfo) | ||
}) | ||
@@ -175,7 +171,6 @@ } | ||
module.exports.lastRun = lastRun | ||
function lastRun (cache, cb) { | ||
fs.readFile(path.join(cache, '_lastverified'), 'utf8', function (err, data) { | ||
if (err) { return cb(err) } | ||
cb(null, new Date(+data)) | ||
}) | ||
function lastRun (cache) { | ||
return fs.readFileAsync( | ||
path.join(cache, '_lastverified'), 'utf8' | ||
).then(data => new Date(+data)) | ||
} |
{ | ||
"name": "cacache", | ||
"version": "5.0.3", | ||
"version": "6.0.0", | ||
"cache-version": { | ||
"content": "1", | ||
"index": "1" | ||
}, | ||
"description": "General content-addressable cache system that maintains a filesystem registry of file data.", | ||
@@ -11,6 +15,9 @@ "main": "index.js", | ||
"scripts": { | ||
"preversion": "npm t", | ||
"postversion": "npm publish && git push --follow-tags", | ||
"benchmarks": "node test/benchmarks", | ||
"prerelease": "npm t", | ||
"postrelease": "npm publish && git push --follow-tags", | ||
"pretest": "standard lib test *.js", | ||
"test": "nyc -- tap -j8 test/*.js", | ||
"release": "standard-version -s", | ||
"test": "nyc -- tap -J test/*.js", | ||
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", | ||
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'", | ||
@@ -43,2 +50,4 @@ "update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'" | ||
"dependencies": { | ||
"@npmcorp/move": "^1.0.0", | ||
"bluebird": "^3.4.7", | ||
"checksum-stream": "^1.0.2", | ||
@@ -48,3 +57,2 @@ "chownr": "^1.0.1", | ||
"graceful-fs": "^4.1.10", | ||
"inflight": "^1.0.6", | ||
"lockfile": "^1.0.2", | ||
@@ -54,16 +62,19 @@ "mississippi": "^1.2.0", | ||
"once": "^1.4.0", | ||
"rimraf": "^2.5.4", | ||
"promise-inflight": "^1.0.1", | ||
"rimraf": "^2.6.1", | ||
"slide": "^1.1.6", | ||
"split": "^1.0.0", | ||
"unique-filename": "^1.1.0" | ||
}, | ||
"devDependencies": { | ||
"benchmark": "^2.1.3", | ||
"chalk": "^1.1.3", | ||
"glob": "^7.1.1", | ||
"nyc": "^10.0.0", | ||
"require-inject": "^1.4.0", | ||
"standard": "^8.6.0", | ||
"standard": "^9.0.0", | ||
"standard-version": "^4.0.0", | ||
"tacks": "^1.2.2", | ||
"tap": "^10.0.2", | ||
"tap": "^10.3.0", | ||
"weallbehave": "^1.0.0", | ||
"weallcontribute": "^1.0.7" | ||
"weallcontribute": "^1.0.8" | ||
}, | ||
@@ -70,0 +81,0 @@ "config": { |
73
put.js
'use strict' | ||
var index = require('./lib/entry-index') | ||
var pipe = require('mississippi').pipe | ||
var putContent = require('./lib/content/put-stream') | ||
var through = require('mississippi').through | ||
var to = require('mississippi').to | ||
const index = require('./lib/entry-index') | ||
const memo = require('./lib/memoization') | ||
const write = require('./lib/content/write') | ||
const to = require('mississippi').to | ||
module.exports = putData | ||
function putData (cache, key, data, opts, cb) { | ||
if (!cb) { | ||
cb = opts | ||
opts = null | ||
} | ||
function putData (cache, key, data, opts) { | ||
opts = opts || {} | ||
var src = through() | ||
var meta | ||
var dest = putStream(cache, key, opts) | ||
dest.on('metadata', function (m) { meta = m }) | ||
pipe(src, dest, function (err) { | ||
cb(err, meta) | ||
return write(cache, data, opts).then(digest => { | ||
return index.insert(cache, key, digest, opts).then(entry => { | ||
if (opts.memoize) { | ||
memo.put(cache, entry, data) | ||
} | ||
return digest | ||
}) | ||
}) | ||
src.write(data, function () { | ||
src.end() | ||
}) | ||
} | ||
@@ -31,15 +24,24 @@ | ||
opts = opts || {} | ||
var digest | ||
var contentStream = putContent(cache, opts).on('digest', function (d) { | ||
let digest | ||
const contentStream = write.stream(cache, opts).on('digest', d => { | ||
digest = d | ||
}) | ||
var errored = false | ||
var stream = to(function (chunk, enc, cb) { | ||
contentStream.write(chunk, enc, cb) | ||
}, function (cb) { | ||
contentStream.end(function () { | ||
index.insert(cache, key, digest, opts, function (err, entry) { | ||
if (err) { return cb(err) } | ||
let memoData | ||
let memoTotal = 0 | ||
const stream = to((chunk, enc, cb) => { | ||
contentStream.write(chunk, enc, () => { | ||
if (opts.memoize) { | ||
if (!memoData) { memoData = [] } | ||
memoData.push(chunk) | ||
memoTotal += chunk.length | ||
} | ||
cb() | ||
}) | ||
}, cb => { | ||
contentStream.end(() => { | ||
index.insert(cache, key, digest, opts).then(entry => { | ||
if (opts.memoize) { | ||
memo.put(cache, entry, Buffer.concat(memoData, memoTotal)) | ||
} | ||
stream.emit('digest', digest) | ||
stream.emit('metadata', entry) | ||
cb() | ||
@@ -49,10 +51,11 @@ }) | ||
}) | ||
stream.on('error', function (err) { | ||
if (errored) { return } | ||
errored = true | ||
let erred = false | ||
stream.once('error', err => { | ||
if (erred) { return } | ||
erred = true | ||
contentStream.emit('error', err) | ||
}) | ||
contentStream.on('error', function (err) { | ||
if (errored) { return } | ||
errored = true | ||
contentStream.once('error', err => { | ||
if (erred) { return } | ||
erred = true | ||
stream.emit('error', err) | ||
@@ -59,0 +62,0 @@ }) |
191
README.md
@@ -19,9 +19,12 @@ # cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest) | ||
* [`ls`](#ls) | ||
* [`get`](#get-data) | ||
* [`get.stream`](#get-stream) | ||
* [`get.info`](#get-info) | ||
* [`put`](#put-data) | ||
* [`put.stream`](#put-stream) | ||
* [`put options`](#put-options) | ||
* [`put*` opts](#put-options) | ||
* [`rm.all`](#rm-all) | ||
* [`rm.entry`](#rm-entry) | ||
* [`rm.content`](#rm-content) | ||
* [`clearMemoized`](#clear-memoized) | ||
* [`verify`](#verify) | ||
@@ -48,3 +51,3 @@ * [`verify.lastRun`](#verify-last-run) | ||
).on('digest', (d) => tarballDigest = d) | ||
).on('end', function () { | ||
).on('finish', function () { | ||
console.log(`Saved ${tarball} to ${cachePath}.`) | ||
@@ -60,3 +63,3 @@ }) | ||
fs.createWriteStream(destination) | ||
).on('end', () => { | ||
).on('finish', () => { | ||
console.log('done extracting!') | ||
@@ -70,3 +73,3 @@ }) | ||
fs.createWriteStream(destination) | ||
).on('end', () => { | ||
).on('finish', () => { | ||
console.log('done extracting using sha1!') | ||
@@ -91,3 +94,3 @@ }) | ||
#### <a name="ls"></a> `> cacache.ls(cache, cb)` | ||
#### <a name="ls"></a> `> cacache.ls(cache) -> Promise` | ||
@@ -101,6 +104,3 @@ Lists info for all entries currently in the cache as a single large object. Each | ||
```javascript | ||
cacache.ls(cachePath, (err, allEntries) => { | ||
if (err) { throw err } | ||
console.log(allEntries) | ||
}) | ||
cacache.ls(cachePath).then(console.log) | ||
// Output | ||
@@ -128,11 +128,58 @@ { | ||
#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts])` | ||
#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, digest})` | ||
Returns a stream of the cached data identified by `key`. | ||
Returns an object with the cached data, digest, and metadata identified by | ||
`key`. The `data` property of this object will be a `Buffer` instance that | ||
presumably holds some data that means something to you. I'm sure you know what | ||
to do with it! cacache just won't care. | ||
If there is no content identified by `key`, or if the locally-stored data does | ||
not pass the validity checksum, the promise will be rejected. | ||
A sub-function, `get.byDigest` may be used for identical behavior, except lookup | ||
will happen by content digest, bypassing the index entirely. This version of the | ||
function *only* returns `data` itself, without any wrapper. | ||
##### Note | ||
This function loads the entire cache entry into memory before returning it. If | ||
you're dealing with Very Large data, consider using [`get.stream`](#get-stream) | ||
instead. | ||
##### Example | ||
```javascript | ||
// Look up by key | ||
cache.get(cachePath, 'my-thing').then(console.log) | ||
// Output: | ||
{ | ||
metadata: { | ||
thingName: 'my' | ||
}, | ||
digest: 'deadbeef', | ||
hashAlgorithm: 'sha512' | ||
data: Buffer#<deadbeef> | ||
} | ||
// Look up by digest | ||
cache.get.byDigest(cachePath, 'deadbeef', { | ||
hashAlgorithm: 'sha512' | ||
}).then(console.log) | ||
// Output: | ||
Buffer#<deadbeef> | ||
``` | ||
#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable` | ||
Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`. | ||
If there is no content identified by `key`, or if the locally-stored data does | ||
not pass the validity checksum, an error will be emitted. | ||
`metadata` and `digest` events will be emitted before the stream closes, if | ||
you need to collect that extra data about the cached entry. | ||
A sub-function, `get.stream.byDigest` may be used for identical behavior, | ||
except lookup will happen by content digest, bypassing the index entirely. | ||
except lookup will happen by content digest, bypassing the index entirely. This | ||
version does not emit the `metadata` and `digest` events at all. | ||
@@ -142,10 +189,22 @@ ##### Example | ||
```javascript | ||
// Look up by key | ||
cache.get.stream( | ||
cachePath, 'my-thing' | ||
).pipe( | ||
).on('metadata', metadata => { | ||
console.log('metadata:', metadata) | ||
}).on('hashAlgorithm', algo => { | ||
console.log('hashAlgorithm:', algo) | ||
}).on('digest', digest => { | ||
console.log('digest:', digest) | ||
}).pipe( | ||
fs.createWriteStream('./x.tgz') | ||
) | ||
// Outputs: | ||
metadata: { ... } | ||
hashAlgorithm: 'sha512' | ||
digest: deadbeef | ||
// Look up by digest | ||
cache.get.stream.byDigest( | ||
cachePath, 'deadbeef' | ||
cachePath, 'deadbeef', { hashAlgorithm: 'sha512' } | ||
).pipe( | ||
@@ -156,3 +215,3 @@ fs.createWriteStream('./x.tgz') | ||
#### <a name="get-info"></a> `> cacache.get.info(cache, key, cb)` | ||
#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise` | ||
@@ -167,2 +226,3 @@ Looks up `key` in the cache index, returning information about the entry if | ||
* `digest` - Content digest the entry refers to. | ||
* `hashAlgorithm` - Hashing algorithm used to generate `digest`. | ||
* `path` - Filesystem path relative to `cache` argument where content is stored. | ||
@@ -175,6 +235,4 @@ * `time` - Timestamp the entry was first added on. | ||
```javascript | ||
cacache.get.info(cachePath, 'my-thing', (err, info) => { | ||
if (err) { throw err } | ||
console.log(info) | ||
}) | ||
cacache.get.info(cachePath, 'my-thing').then(console.log) | ||
// Output | ||
@@ -194,6 +252,7 @@ { | ||
#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, stream, [opts])` | ||
#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise` | ||
Inserts data from a stream into the cache. Emits a `digest` event with the | ||
digest of written contents when it succeeds. | ||
Inserts data passed to it into the cache. The returned Promise resolves with a | ||
digest (generated according to [`opts.hashAlgorithm`](#hashalgorithm)) after the | ||
cache entry has been successfully written. | ||
@@ -203,2 +262,23 @@ ##### Example | ||
```javascript | ||
fetch( | ||
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' | ||
).then(data => { | ||
cacache.put( | ||
cachePath, 'registry.npmjs.org|cacache@1.0.0', data | ||
) | ||
}).then(digest => { | ||
console.log('digest is', digest) | ||
}) | ||
``` | ||
#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable` | ||
Returns a [Writable | ||
Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts | ||
data written to it into the cache. Emits a `digest` event with the digest of | ||
written contents when it succeeds. | ||
##### Example | ||
```javascript | ||
request.get( | ||
@@ -209,3 +289,3 @@ 'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz' | ||
cachePath, 'registry.npmjs.org|cacache@1.0.0' | ||
).on('digest', d => console.log(`digest is ${d}`)) | ||
).on('digest', d => console.log('digest is ${d}')) | ||
) | ||
@@ -225,4 +305,4 @@ ``` | ||
If provided, the data stream will be verified to check that enough data was | ||
passed through. If there's more or less data than expected, an `EBADSIZE` error | ||
will be returned. | ||
passed through. If there's more or less data than expected, insertion will fail | ||
with an `EBADSIZE` error. | ||
@@ -232,3 +312,4 @@ ##### `digest` | ||
If present, the pre-calculated digest for the inserted content. If this option | ||
if provided and does not match the post-insertion digest, insertion will fail. | ||
if provided and does not match the post-insertion digest, insertion will fail | ||
with an `EBADCHECKSUM` error. | ||
@@ -239,6 +320,8 @@ To control the hashing algorithm, use `opts.hashAlgorithm`. | ||
Default: 'sha1' | ||
Default: 'sha512' | ||
Hashing algorithm to use when calculating the digest for inserted data. Can use | ||
any algorithm supported by Node.js' `crypto` module. | ||
any algorithm listed in `crypto.getHashes()` or `'omakase'`/`'お任せします'` to | ||
pick a random hash algorithm on each insertion. You may also use any anagram of | ||
`'modnar'` to use this feature. | ||
@@ -252,4 +335,20 @@ ##### `uid`/`gid` | ||
#### <a name="rm-all"></a> `> cacache.rm.all(cache, cb)` | ||
##### `memoize` | ||
Default: null | ||
If provided, cacache will memoize the given cache insertion in memory, bypassing | ||
any filesystem checks for that key or digest in future cache fetches. Nothing | ||
will be written to the in-memory cache unless this option is explicitly truthy. | ||
There is no facility for limiting memory usage short of | ||
[`cacache.clearMemoized()`](#clear-memoized), so be mindful of the sort of data | ||
you ask to get memoized! | ||
Reading from existing memoized data can be forced by explicitly passing | ||
`memoize: false` to the reader functions, but their default will be to read from | ||
memory. | ||
#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise` | ||
Clears the entire cache. Mainly by blowing away the cache directory itself. | ||
@@ -260,4 +359,3 @@ | ||
```javascript | ||
cacache.rm.all(cachePath, (err) => { | ||
if (err) { throw err } | ||
cacache.rm.all(cachePath).then(() => { | ||
console.log('THE APOCALYPSE IS UPON US 😱') | ||
@@ -267,6 +365,6 @@ }) | ||
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, cb)` | ||
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key) -> Promise` | ||
Removes the index entry for `key`. Content will still be accessible if | ||
requested directly. | ||
requested directly by content address ([`get.stream.byDigest`](#get-stream)). | ||
@@ -276,4 +374,3 @@ ##### Example | ||
```javascript | ||
cacache.rm.entry(cachePath, 'my-thing', (err) => { | ||
if (err) { throw err } | ||
cacache.rm.entry(cachePath, 'my-thing').then(() => { | ||
console.log('I did not like it anyway') | ||
@@ -283,3 +380,3 @@ }) | ||
#### <a name="rm-content"></a> `> cacache.rm.content(cache, digest, cb)` | ||
#### <a name="rm-content"></a> `> cacache.rm.content(cache, digest) -> Promise` | ||
@@ -293,4 +390,3 @@ Removes the content identified by `digest`. Any index entries referring to it | ||
```javascript | ||
cacache.rm.content(cachePath, 'deadbeef', (err) => { | ||
if (err) { throw err } | ||
cacache.rm.content(cachePath, 'deadbeef').then(() => { | ||
console.log('data for my-thing is gone!') | ||
@@ -300,4 +396,8 @@ }) | ||
#### <a name="verify"></a> `> cacache.verify(cache, opts, cb)` | ||
#### <a name="clear-memoized"></a> `> cacache.clearMemoized()` | ||
Completely resets the in-memory entry cache. | ||
#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise` | ||
Checks out and fixes up your cache: | ||
@@ -324,3 +424,3 @@ | ||
* `opts.gid` - gid to assign to cache and its contents | ||
* `opts.hashAlgorithm` - defaults to `'sha256'`. Hash to use for content checks. | ||
* `opts.hashAlgorithm` - defaults to `'sha512'`. Hash to use for content checks. | ||
@@ -335,4 +435,3 @@ | ||
```javascript | ||
cacache.verify(cachePath, (err, stats) => { | ||
if (err) { throw err } | ||
cacache.verify(cachePath).then(stats => { | ||
// deadbeef collected, because of invalid checksum. | ||
@@ -343,3 +442,3 @@ console.log('cache is much nicer now! stats:', stats) | ||
#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache, cb)` | ||
#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise` | ||
@@ -351,6 +450,4 @@ Returns a `Date` representing the last time `cacache.verify` was run on `cache`. | ||
```javascript | ||
cacache.verify(cachePath, (err) => { | ||
if (err) { throw err } | ||
cacache.verify.lastRun(cachePath, (err, lastTime) => { | ||
if (err) { throw err } | ||
cacache.verify(cachePath).then(() => { | ||
cacache.verify.lastRun(cachePath).then(lastTime => { | ||
console.log('cacache.verify was last called on' + lastTime) | ||
@@ -357,0 +454,0 @@ }) |
24
rm.js
'use strict' | ||
var rmContent = require('./lib/content/rm') | ||
var index = require('./lib/entry-index') | ||
var rimraf = require('rimraf') | ||
const Promise = require('bluebird') | ||
const index = require('./lib/entry-index') | ||
const memo = require('./lib/memoization') | ||
const rimraf = Promise.promisify(require('rimraf')) | ||
const rmContent = require('./lib/content/rm') | ||
module.exports.all = all | ||
function all (cache, cb) { | ||
rimraf(cache, cb) | ||
function all (cache) { | ||
memo.clearMemoized() | ||
return rimraf(cache) | ||
} | ||
module.exports.entry = entry | ||
function entry (cache, key, cb) { | ||
index.delete(cache, key, cb) | ||
function entry (cache, key) { | ||
memo.clearMemoized() | ||
return index.delete(cache, key) | ||
} | ||
module.exports.content = content | ||
function content (cache, address, cb) { | ||
rmContent(cache, address, cb) | ||
function content (cache, address) { | ||
memo.clearMemoized() | ||
return rmContent(cache, address) | ||
} |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
49829
19
941
429
14
11
1
+ Added@npmcorp/move@^1.0.0
+ Addedbluebird@^3.4.7
+ Addedpromise-inflight@^1.0.1
+ Added@npmcorp/copy@1.0.0(transitive)
+ Added@npmcorp/move@1.0.0(transitive)
+ Addedaproba@1.2.0(transitive)
+ Addedbluebird@3.7.2(transitive)
+ Addedfs-write-stream-atomic@1.0.10(transitive)
+ Addediferr@0.1.5(transitive)
+ Addedpromise-inflight@1.0.1(transitive)
+ Addedrun-queue@1.0.3(transitive)
- Removedinflight@^1.0.6
- Removedsplit@^1.0.0
- Removedsplit@1.0.1(transitive)
- Removedthrough@2.3.8(transitive)
Updatedrimraf@^2.6.1