Socket
Socket
Sign inDemoInstall

cacache

Package Overview
Dependencies
Maintainers
2
Versions
102
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cacache - npm Package Compare versions

Comparing version 6.0.2 to 6.1.0

17

CHANGELOG.md

@@ -5,2 +5,19 @@ # Change Log

<a name="6.1.0"></a>
# [6.1.0](https://github.com/zkat/cacache/compare/v6.0.2...v6.1.0) (2017-03-12)
### Bug Fixes
* **coverage:** more coverage for content reads (#70) ([ef4f70a](https://github.com/zkat/cacache/commit/ef4f70a))
* **tests:** use safe-buffer because omfg (#69) ([6ab8132](https://github.com/zkat/cacache/commit/6ab8132))
### Features
* **rm:** limited rm.all and fixed bugs (#66) ([d5d25ba](https://github.com/zkat/cacache/commit/d5d25ba)), closes [#66](https://github.com/zkat/cacache/issues/66)
* **verify:** tested, working cache verifier/gc (#68) ([45ad77a](https://github.com/zkat/cacache/commit/45ad77a))
<a name="6.0.2"></a>

@@ -7,0 +24,0 @@ ## [6.0.2](https://github.com/zkat/cacache/compare/v6.0.1...v6.0.2) (2017-03-11)

8

get.js
'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')

@@ -27,3 +27,3 @@ const index = require('./lib/entry-index')

if (memoized && opts.memoize !== false) {
return Promise.resolve(byDigest ? memoized : {
return BB.resolve(byDigest ? memoized : {
metadata: memoized.entry.metadata,

@@ -36,3 +36,3 @@ data: memoized.data,

return (
byDigest ? Promise.resolve(null) : index.find(cache, key, opts)
byDigest ? BB.resolve(null) : index.find(cache, key, opts)
).then(entry => {

@@ -153,3 +153,3 @@ if (!entry && !byDigest) {

if (memoized && opts.memoize !== false) {
return Promise.resolve(memoized.entry)
return BB.resolve(memoized.entry)
} else {

@@ -156,0 +156,0 @@ return index.find(cache, key)

@@ -16,6 +16,10 @@ 'use strict'

return path.join.apply(path, [
cache,
`content-v${contentVer}`,
hashAlgorithm,
contentDir(cache),
hashAlgorithm
].concat(hashToSegments(address)))
}
module.exports._contentDir = contentDir
function contentDir (cache) {
return path.join(cache, `content-v${contentVer}`)
}
'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')

@@ -11,3 +11,3 @@ const checksumStream = require('checksum-stream')

Promise.promisifyAll(fs)
BB.promisifyAll(fs)

@@ -47,11 +47,9 @@ module.exports = read

function hasContent (cache, address, algorithm) {
if (!address) { return Promise.resolve(false) }
if (!address) { return BB.resolve(false) }
return fs.lstatAsync(
contentPath(cache, address, algorithm || 'sha512')
).then(() => true).catch(err => {
if (err && err.code === 'ENOENT') {
return Promise.resolve(false)
} else if (err && process.platform === 'win32' && err.code === 'EPERM') {
return Promise.resolve(false)
} else {
).then(() => true)
.catch({code: 'ENOENT'}, () => false)
.catch({code: 'EPERM'}, err => {
if (process.platform !== 'win32') {
throw err

@@ -58,0 +56,0 @@ }

'use strict'
var Promise = require('bluebird')
var BB = require('bluebird')
var contentPath = require('./path')
var rimraf = Promise.promisify(require('rimraf'))
var rimraf = BB.promisify(require('rimraf'))

@@ -8,0 +8,0 @@ module.exports = rm

'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')

@@ -13,3 +13,3 @@ const checksumStream = require('checksum-stream')

const pipe = require('mississippi').pipe
const rimraf = Promise.promisify(require('rimraf'))
const rimraf = BB.promisify(require('rimraf'))
const through = require('mississippi').through

@@ -19,3 +19,3 @@ const to = require('mississippi').to

const writeFileAsync = Promise.promisify(fs.writeFile)
const writeFileAsync = BB.promisify(fs.writeFile)

@@ -29,8 +29,8 @@ module.exports = write

if (typeof opts.size === 'number' && data.length !== opts.size) {
return Promise.reject(sizeError(opts.size, data.length))
return BB.reject(sizeError(opts.size, data.length))
}
if (opts.digest && digest !== opts.digest) {
return Promise.reject(checksumError(opts.digest, digest))
return BB.reject(checksumError(opts.digest, digest))
}
return Promise.using(makeTmp(cache, opts), tmp => (
return BB.using(makeTmp(cache, opts), tmp => (
writeFileAsync(

@@ -81,3 +81,3 @@ tmp.target, data, {flag: 'wx'}

function handleContent (inputStream, cache, opts, errCheck) {
return Promise.using(makeTmp(cache, opts), tmp => {
return BB.using(makeTmp(cache, opts), tmp => {
errCheck()

@@ -104,3 +104,3 @@ return pipeToTmp(

let outStream = new Promise((resolve, reject) => {
let outStream = new BB((resolve, reject) => {
errCheck()

@@ -111,5 +111,5 @@ resolve(fs.createWriteStream(tmpTarget, {

})
return Promise.using(outStream, outStream => {
return BB.using(outStream, outStream => {
errCheck()
return new Promise((resolve, reject) => {
return new BB((resolve, reject) => {
errCheck()

@@ -116,0 +116,0 @@ inputStream.on('error', reject)

@@ -8,3 +8,3 @@ 'use strict'

const path = require('path')
const Promise = require('bluebird')
const BB = require('bluebird')
const ms = require('mississippi')

@@ -15,5 +15,5 @@ const hashToSegments = require('./util/hash-to-segments')

const appendFileAsync = Promise.promisify(fs.appendFile)
const readFileAsync = Promise.promisify(fs.readFile)
const readdirAsync = Promise.promisify(fs.readdir)
const appendFileAsync = BB.promisify(fs.appendFile)
const readFileAsync = BB.promisify(fs.readFile)
const readdirAsync = BB.promisify(fs.readdir)
const concat = ms.concat

@@ -129,3 +129,3 @@ const from = ms.from

function ls (cache) {
return Promise.fromNode(cb => {
return BB.fromNode(cb => {
lsStream(cache).on('error', cb).pipe(concat(entries => {

@@ -166,2 +166,3 @@ cb(null, entries.reduce((acc, xs) => {

module.exports._bucketDir = bucketDir
function bucketDir (cache) {

@@ -188,2 +189,4 @@ return path.join(cache, `index-v${indexV}`)

function formatEntry (cache, entry) {
// Treat null digests as deletions. They'll shadow any previous entries.
if (!entry.digest) { return null }
return {

@@ -190,0 +193,0 @@ key: entry.key,

'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')
const chownr = Promise.promisify(require('chownr'))
const mkdirp = Promise.promisify(require('mkdirp'))
const chownr = BB.promisify(require('chownr'))
const mkdirp = BB.promisify(require('mkdirp'))
const inflight = require('promise-inflight')

@@ -13,7 +13,7 @@

// This platform doesn't need ownership fixing
return Promise.resolve()
return BB.resolve()
}
if (typeof uid !== 'number' && typeof gid !== 'number') {
// There's no permissions override. Nothing to do here.
return Promise.resolve()
return BB.resolve()
}

@@ -23,3 +23,3 @@ if ((typeof uid === 'number' && process.getuid() === uid) &&

// No need to override if it's already what we used.
return Promise.resolve()
return BB.resolve()
}

@@ -26,0 +26,0 @@ return inflight(

'use strict'
const fs = require('graceful-fs')
const Promise = require('bluebird')
const BB = require('bluebird')
let move

@@ -19,3 +19,3 @@ let pinflight

// Note that, as the name suggests, this strictly only supports file moves.
return Promise.fromNode(cb => {
return BB.fromNode(cb => {
fs.link(src, dest, err => {

@@ -39,3 +39,3 @@ if (err) {

return pinflight('cacache-move-file:' + dest, () => {
return Promise.promisify(fs.stat)(dest).catch(err => {
return BB.promisify(fs.stat)(dest).catch(err => {
if (err !== 'ENOENT') {

@@ -47,3 +47,3 @@ // Something else is wrong here. Bail bail bail

if (!move) { move = require('@npmcorp/move') }
return move(src, dest, { Promise, fs })
return move(src, dest, { BB, fs })
})

@@ -50,0 +50,0 @@ })

'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')
var checksumStream = require('checksum-stream')
var fixOwner = require('./util/fix-owner')
var fs = require('graceful-fs')
var index = require('./entry-index')
var lockfile = Promise.promisifyAll(require('lockfile'))
var path = require('path')
var pipe = Promise.promisify(require('mississippi').pipe)
var rimraf = Promise.promisify(require('rimraf'))
const checksumStream = require('checksum-stream')
const contentPath = require('./content/path')
const finished = BB.promisify(require('mississippi').finished)
const fixOwner = require('./util/fix-owner')
const fs = require('graceful-fs')
const glob = BB.promisify(require('glob'))
const index = require('./entry-index')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
Promise.promisifyAll(fs)
BB.promisifyAll(fs)
module.exports.lastRun = lastRun
function lastRun (cache) {
return fs.readFileAsync(
path.join(cache, '_lastverified'), 'utf8'
).then(data => new Date(+data))
}
module.exports = verify
function verify (cache, opts) {
opts = opts || {}
opts.log && opts.log.verbose('verify', 'verifying content cache at', cache)
const startTime = +(new Date())
return fixOwner.mkdirfix(
cache, opts.uid, opts.gid
).then(() => {
const lockPath = path.join(cache, 'verify.lock')
const lock = lockfile.lockAsync(lockPath).disposer(() => {
return lockfile.unlock(lockPath)
})
return Promise.using(lock, () => {
return garbageCollect(cache, opts).then(gcStats => {
return tidyIndex(cache, opts).then(tidyStats => {
var stats = tidyStats
Object.keys(gcStats).forEach(function (key) {
stats[key] = gcStats[key]
})
return stats
})
}).then(stats => {
var verifile = path.join(cache, '_lastverified')
opts.log && opts.log.verbose('verify', 'writing verifile to ' + verifile)
return fs.writeFileAsync(
verifile, '' + (+(new Date()))
).then(() => {
opts.log && opts.log.verbose('verify', 'fixing cache ownership')
return fixOwner.chownr(cache, opts.uid, opts.gid)
}).then(() => {
opts.log && opts.log.verbose('verify', 'clearing out tmp')
return rimraf(path.join(cache, 'tmp'))
}).then(() => stats)
opts.log && opts.log.silly('verify', 'verifying cache at', cache)
return BB.reduce([
markStartTime,
fixPerms,
garbageCollect,
rebuildIndex,
cleanTmp,
writeVerifile,
markEndTime
], (stats, step, i) => {
const label = step.name || `step #${i}`
const start = new Date()
return BB.resolve(step(cache, opts)).then(s => {
s && Object.keys(s).forEach(k => {
stats[k] = s[k]
})
const end = new Date()
if (!stats.runTime) { stats.runTime = {} }
stats.runTime[label] = end - start
return stats
})
}).then(stats => {
stats.runTime = (+(new Date()) - startTime) / 1000
opts.log && opts.log.verbose('verify', 'final stats:', stats)
return stats
}, {}).tap(stats => {
stats.runTime.total = stats.endTime - stats.startTime
opts.log && opts.log.silly('verify', 'verification finished for', cache, 'in', `${stats.runTime.total}ms`)
})
}
function tidyIndex (cache, opts) {
opts.log && opts.log.verbose('verify', 'tidying index')
return index.ls(cache).then(entries => {
return rimraf(path.join(cache, 'index')).then(() => {
var stats = {
entriesRemoved: 0,
digestMissing: 0,
totalEntries: 0
}
return Promise.reduce(Object.keys(entries), (stats, key) => {
var entry = entries[key]
if (!entry.digest) {
stats.digestMissing++
return stats
}
var content = path.join(cache, 'content', entries[key].digest)
return fs.statAsync(content).catch(err => {
if (err.code === 'ENOENT') {
stats.entriesRemoved++
return stats
}
}).then(() => {
stats.totalEntries++
return index.insert(cache, key, entry.digest, {
uid: opts.uid,
gid: opts.gid,
metadata: entry.metadata
}).then(() => stats)
})
}, stats)
})
})
function markStartTime (cache, opts) {
return { startTime: new Date() }
}
function markEndTime (cache, opts) {
return { endTime: new Date() }
}
function fixPerms (cache, opts) {
opts.log && opts.log.silly('verify', 'fixing cache permissions')
return fixOwner.mkdirfix(cache, opts.uid, opts.gid).then(() => {
// TODO - fix file permissions too
fixOwner.chownr(cache, opts.uid, opts.gid)
}).then(() => null)
}
// Implements a naive mark-and-sweep tracing garbage collector.
//
// The algorithm is basically as follows:
// 1. Read (and filter) all index entries ("pointers")
// 2. Mark each algo/digest combo as "live"
// 3. Read entire filesystem tree in `content-vX/` dir
// 4. If content is live, verify its checksum and delete it if it fails
// 5. If content is not marked as live, rimraf it.
//
function garbageCollect (cache, opts) {
opts.log && opts.log.verbose('verify', 'garbage collecting content')
return index.ls(cache).then(entries => {
var byDigest = {}
Object.keys(entries).forEach(function (k) {
byDigest[entries[k].digest] = entries[k]
})
var contentDir = path.join(cache, 'content')
return fs.readdirAsync(contentDir).catch(err => {
if (err.code === 'ENOENT') {
return
} else {
throw err
}
opts.log && opts.log.silly('verify', 'garbage collecting content')
const indexStream = index.lsStream(cache)
const liveContent = new Set()
indexStream.on('data', entry => {
if (opts && opts.filter && !opts.filter(entry)) { return }
liveContent.add(`${entry.hashAlgorithm}-${entry.digest}`)
})
return finished(indexStream).then(() => {
const contentDir = contentPath._contentDir(cache)
return glob(path.join(contentDir, '**'), {
follow: false,
nodir: true,
nosort: true
}).then(files => {
var stats = {
return BB.resolve({
verifiedContent: 0,
collectedCount: 0,
reclaimedCount: 0,
reclaimedSize: 0,
badContentCount: 0,
keptSize: 0
}
return Promise.reduce(files, (stats, f) => {
var fullPath = path.join(contentDir, f)
if (byDigest[f]) {
var algo = opts.hashAlgorithm || 'sha512'
return verifyContent(fullPath, algo).then(info => {
}).tap((stats) => BB.map(files, (f) => {
const split = f.split(/[/\\]/)
const digest = split.slice(split.length - 3).join('')
const algo = split[split.length - 4]
if (liveContent.has(`${algo}-${digest}`)) {
return verifyContent(f, digest, algo).then(info => {
if (!info.valid) {
stats.collectedCount++
stats.reclaimedCount++
stats.badContentCount++
stats.reclaimedSize += info.size

@@ -128,9 +118,12 @@ } else {

} else {
stats.collectedCount++
return fs.statAsync(fullPath).then(s => {
stats.reclaimedSize += s.size
return rimraf(path.join(contentDir, f)).then(() => stats)
// No entries refer to this content. We can delete.
stats.reclaimedCount++
return fs.statAsync(f).then(s => {
return rimraf(f).then(() => {
stats.reclaimedSize += s.size
return stats
})
})
}
}, stats)
}, {concurrency: opts.concurrency || 20}))
})

@@ -140,10 +133,7 @@ })

function verifyContent (filepath, algo) {
function verifyContent (filepath, digest, algorithm) {
return fs.statAsync(filepath).then(stat => {
var reader = fs.createReadStream(filepath)
var checksummer = checksumStream({
digest: path.basename(filepath),
algorithm: algo
})
var contentInfo = {
const reader = fs.createReadStream(filepath)
const checksummer = checksumStream({digest, algorithm})
const contentInfo = {
size: stat.size,

@@ -153,19 +143,74 @@ valid: true

checksummer.on('data', () => {})
return pipe(reader, checksummer).catch(err => {
if (err && err.code === 'EBADCHECKSUM') {
return rimraf(filepath).then(() => {
contentInfo.valid = false
})
} else {
throw err
return pipe(reader, checksummer).catch({code: 'EBADCHECKSUM'}, () => {
return rimraf(filepath).then(() => {
contentInfo.valid = false
})
}).then(() => contentInfo)
}).catch({code: 'ENOENT'}, () => ({size: 0, valid: false}))
}
function rebuildIndex (cache, opts) {
opts.log && opts.log.silly('verify', 'rebuilding index')
return index.ls(cache).then(entries => {
const stats = {
missingContent: 0,
rejectedEntries: 0,
totalEntries: 0
}
const buckets = {}
for (let k in entries) {
if (entries.hasOwnProperty(k)) {
const hashed = index._hashKey(k)
const entry = entries[k]
const excluded = opts && opts.filter && !opts.filter(entry)
excluded && stats.rejectedEntries++
if (buckets[hashed] && !excluded) {
buckets[hashed].push(entry)
} else if (buckets[hashed] && excluded) {
// skip
} else if (excluded) {
buckets[hashed] = []
buckets[hashed]._path = index._bucketPath(cache, k)
} else {
buckets[hashed] = [entry]
buckets[hashed]._path = index._bucketPath(cache, k)
}
}
}).then(() => contentInfo)
}
return BB.map(Object.keys(buckets), key => {
return rebuildBucket(cache, buckets[key], stats, opts)
}, {concurrency: opts.concurrency || 20}).then(() => stats)
})
}
module.exports.lastRun = lastRun
function lastRun (cache) {
return fs.readFileAsync(
path.join(cache, '_lastverified'), 'utf8'
).then(data => new Date(+data))
function rebuildBucket (cache, bucket, stats, opts) {
return fs.truncateAsync(bucket._path).then(() => {
// This needs to be serialized because cacache explicitly
// lets very racy bucket conflicts clobber each other.
return BB.mapSeries(bucket, entry => {
const content = contentPath(cache, entry.digest, entry.hashAlgorithm)
return fs.statAsync(content).then(() => {
return index.insert(cache, entry.key, entry.digest, {
uid: opts.uid,
gid: opts.gid,
hashAlgorithm: entry.hashAlgorithm,
metadata: entry.metadata
}).then(() => { stats.totalEntries++ })
}).catch({code: 'ENOENT'}, () => {
stats.rejectedEntries++
stats.missingContent++
})
})
})
}
function cleanTmp (cache, opts) {
opts.log && opts.log.silly('verify', 'cleaning tmp directory')
return rimraf(path.join(cache, 'tmp'))
}
function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
opts.log && opts.log.silly('verify', 'writing verifile to ' + verifile)
return fs.writeFileAsync(verifile, '' + (+(new Date())))
}
{
"name": "cacache",
"version": "6.0.2",
"version": "6.1.0",
"cache-version": {

@@ -55,3 +55,2 @@ "content": "2",

"graceful-fs": "^4.1.10",
"lockfile": "^1.0.2",
"mississippi": "^1.2.0",

@@ -70,3 +69,4 @@ "mkdirp": "^0.5.1",

"require-inject": "^1.4.0",
"standard": "^9.0.0",
"safe-buffer": "^5.0.1",
"standard": "^9.0.1",
"standard-version": "^4.0.0",

@@ -73,0 +73,0 @@ "tacks": "^1.2.2",

@@ -389,2 +389,4 @@ # cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/zkat/cacache.svg)](https://travis-ci.org/zkat/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/zkat/cacache?svg=true)](https://ci.appveyor.com/project/zkat/cacache) [![Coverage Status](https://coveralls.io/repos/github/zkat/cacache/badge.svg?branch=latest)](https://coveralls.io/github/zkat/cacache?branch=latest)

Alias: `cacache.rm`
Removes the index entry for `key`. Content will still be accessible if

@@ -424,2 +426,3 @@ requested directly by content address ([`get.stream.byDigest`](#get-stream)).

* Cleans up corrupted or invalid index entries.
* Custom entry filtering options.
* Garbage collects any content entries not referenced by the index.

@@ -443,5 +446,5 @@ * Checks digests for all content entries and removes invalid content.

* `opts.gid` - gid to assign to cache and its contents
* `opts.hashAlgorithm` - defaults to `'sha512'`. Hash to use for content checks.
* `opts.filter` - receives a formatted entry. Return false to remove it.
Note: might be called more than once on the same entry.
##### Example

@@ -448,0 +451,0 @@

'use strict'
const Promise = require('bluebird')
const BB = require('bluebird')
const index = require('./lib/entry-index')
const memo = require('./lib/memoization')
const rimraf = Promise.promisify(require('rimraf'))
const path = require('path')
const rimraf = BB.promisify(require('rimraf'))
const rmContent = require('./lib/content/rm')
module.exports.all = all
function all (cache) {
memo.clearMemoized()
return rimraf(cache)
}
module.exports = entry
module.exports.entry = entry

@@ -27,1 +23,7 @@ function entry (cache, key) {

}
module.exports.all = all
function all (cache) {
memo.clearMemoized()
return rimraf(path.join(cache, '*(content-*|index-*)'))
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc