Socket
Socket
Sign inDemoInstall

cacache

Package Overview
Dependencies
Maintainers
5
Versions
101
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cacache - npm Package Compare versions

Comparing version 16.1.3 to 17.0.0

lib/util/glob.js

77

lib/content/read.js
'use strict'
const fs = require('@npmcli/fs')
const fs = require('fs/promises')
const fsm = require('fs-minipass')

@@ -49,20 +49,2 @@ const ssri = require('ssri')

module.exports.sync = readSync
function readSync (cache, integrity, opts = {}) {
const { size } = opts
return withContentSriSync(cache, integrity, (cpath, sri) => {
const data = fs.readFileSync(cpath, { encoding: null })
if (typeof size === 'number' && size !== data.length) {
throw sizeError(size, data.length)
}
if (ssri.checkData(data, sri)) {
return data
}
throw integrityError(sri, cpath)
})
}
module.exports.stream = readStream

@@ -92,3 +74,2 @@ module.exports.readStream = readStream

module.exports.copy = copy
module.exports.copy.sync = copySync

@@ -101,8 +82,2 @@ function copy (cache, integrity, dest) {

function copySync (cache, integrity, dest) {
return withContentSriSync(cache, integrity, (cpath, sri) => {
return fs.copyFileSync(cpath, dest)
})
}
module.exports.hasContent = hasContent

@@ -136,30 +111,2 @@

module.exports.hasContent.sync = hasContentSync
function hasContentSync (cache, integrity) {
if (!integrity) {
return false
}
return withContentSriSync(cache, integrity, (cpath, sri) => {
try {
const stat = fs.statSync(cpath)
return { size: stat.size, sri, stat }
} catch (err) {
if (err.code === 'ENOENT') {
return false
}
if (err.code === 'EPERM') {
/* istanbul ignore else */
if (process.platform !== 'win32') {
throw err
} else {
return false
}
}
}
})
}
async function withContentSri (cache, integrity, fn) {

@@ -208,24 +155,2 @@ const sri = ssri.parse(integrity)

function withContentSriSync (cache, integrity, fn) {
const sri = ssri.parse(integrity)
// If `integrity` has multiple entries, pick the first digest
// with available local data.
const algo = sri.pickAlgorithm()
const digests = sri[algo]
if (digests.length <= 1) {
const cpath = contentPath(cache, digests[0])
return fn(cpath, digests[0])
} else {
let lastErr = null
for (const meta of digests) {
try {
return withContentSriSync(cache, meta, fn)
} catch (err) {
lastErr = err
}
}
throw lastErr
}
}
function sizeError (expected, found) {

@@ -232,0 +157,0 @@ /* eslint-disable-next-line max-len */

'use strict'
const util = require('util')
const fs = require('fs/promises')
const contentPath = require('./path')
const { hasContent } = require('./read')
const rimraf = util.promisify(require('rimraf'))

@@ -15,3 +13,3 @@ module.exports = rm

if (content && content.sri) {
await rimraf(contentPath(cache, content.sri))
await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
return true

@@ -18,0 +16,0 @@ } else {

'use strict'
const events = require('events')
const util = require('util')
const contentPath = require('./path')
const fixOwner = require('../util/fix-owner')
const fs = require('@npmcli/fs')
const fs = require('fs/promises')
const moveFile = require('../util/move-file')

@@ -14,3 +12,2 @@ const Minipass = require('minipass')

const path = require('path')
const rimraf = util.promisify(require('rimraf'))
const ssri = require('ssri')

@@ -44,3 +41,3 @@ const uniqueFilename = require('unique-filename')

if (!tmp.moved) {
await rimraf(tmp.target)
await fs.rm(tmp.target, { recursive: true, force: true })
}

@@ -116,3 +113,3 @@ }

if (!tmp.moved) {
await rimraf(tmp.target)
await fs.rm(tmp.target, { recursive: true, force: true })
}

@@ -158,3 +155,3 @@ }

const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
await fixOwner.mkdirfix(cache, path.dirname(tmpTarget))
await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
return {

@@ -170,6 +167,5 @@ target: tmpTarget,

await fixOwner.mkdirfix(cache, destDir)
await fs.mkdir(destDir, { recursive: true })
await moveFile(tmp.target, destination)
tmp.moved = true
await fixOwner.chownr(cache, destination)
}

@@ -176,0 +172,0 @@

'use strict'
const util = require('util')
const crypto = require('crypto')
const fs = require('@npmcli/fs')
const {
appendFile,
mkdir,
readFile,
readdir,
rm,
writeFile,
} = require('fs/promises')
const Minipass = require('minipass')

@@ -12,9 +18,5 @@ const path = require('path')

const contentPath = require('./content/path')
const fixOwner = require('./util/fix-owner')
const hashToSegments = require('./util/hash-to-segments')
const indexV = require('../package.json')['cache-version'].index
const moveFile = require('@npmcli/move-file')
const _rimraf = require('rimraf')
const rimraf = util.promisify(_rimraf)
rimraf.sync = _rimraf.sync

@@ -70,3 +72,3 @@ module.exports.NotFoundError = class NotFoundError extends Error {

const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
await fixOwner.mkdirfix(cache, path.dirname(target))
await mkdir(path.dirname(target), { recursive: true })
return {

@@ -80,3 +82,3 @@ target,

if (!tmp.moved) {
return rimraf(tmp.target)
return rm(tmp.target, { recursive: true, force: true })
}

@@ -86,4 +88,4 @@ }

const write = async (tmp) => {
await fs.writeFile(tmp.target, newIndex, { flag: 'wx' })
await fixOwner.mkdirfix(cache, path.dirname(bucket))
await writeFile(tmp.target, newIndex, { flag: 'wx' })
await mkdir(path.dirname(bucket), { recursive: true })
// we use @npmcli/move-file directly here because we

@@ -93,9 +95,2 @@ // want to overwrite the existing file

tmp.moved = true
try {
await fixOwner.chownr(cache, bucket)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
}
}
}

@@ -132,3 +127,3 @@

try {
await fixOwner.mkdirfix(cache, path.dirname(bucket))
await mkdir(path.dirname(bucket), { recursive: true })
const stringified = JSON.stringify(entry)

@@ -143,4 +138,3 @@ // NOTE - Cleverness ahoy!

// this.
await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
await fixOwner.chownr(cache, bucket)
await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
} catch (err) {

@@ -152,7 +146,2 @@ if (err.code === 'ENOENT') {

throw err
// There's a class of race conditions that happen when things get deleted
// during fixOwner, or between the two mkdirfix/chownr calls.
//
// It's perfectly fine to just not bother in those cases and lie
// that the index entry was written. Because it's a cache.
}

@@ -162,27 +151,2 @@ return formatEntry(cache, entry)

module.exports.insert.sync = insertSync
function insertSync (cache, key, integrity, opts = {}) {
const { metadata, size } = opts
const bucket = bucketPath(cache, key)
const entry = {
key,
integrity: integrity && ssri.stringify(integrity),
time: Date.now(),
size,
metadata,
}
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
const stringified = JSON.stringify(entry)
fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
try {
fixOwner.chownr.sync(cache, bucket)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
}
}
return formatEntry(cache, entry)
}
module.exports.find = find

@@ -210,23 +174,2 @@

module.exports.find.sync = findSync
function findSync (cache, key) {
const bucket = bucketPath(cache, key)
try {
return bucketEntriesSync(bucket).reduce((latest, next) => {
if (next && next.key === key) {
return formatEntry(cache, next)
} else {
return latest
}
}, null)
} catch (err) {
if (err.code === 'ENOENT') {
return null
} else {
throw err
}
}
}
module.exports.delete = del

@@ -240,16 +183,5 @@

const bucket = bucketPath(cache, key)
return rimraf(bucket)
return rm(bucket, { recursive: true, force: true })
}
module.exports.delete.sync = delSync
function delSync (cache, key, opts = {}) {
if (!opts.removeFully) {
return insertSync(cache, key, null, opts)
}
const bucket = bucketPath(cache, key)
return rimraf.sync(bucket)
}
module.exports.lsStream = lsStream

@@ -318,13 +250,6 @@

async function bucketEntries (bucket, filter) {
const data = await fs.readFile(bucket, 'utf8')
const data = await readFile(bucket, 'utf8')
return _bucketEntries(data, filter)
}
module.exports.bucketEntries.sync = bucketEntriesSync
function bucketEntriesSync (bucket, filter) {
const data = fs.readFileSync(bucket, 'utf8')
return _bucketEntries(data, filter)
}
function _bucketEntries (data, filter) {

@@ -346,6 +271,7 @@ const entries = []

obj = JSON.parse(pieces[1])
} catch (e) {
// Entry is corrupted!
return
} catch (_) {
// eslint-ignore-next-line no-empty-block
}
// coverage disabled here, no need to test with an entry that parses to something falsey
// istanbul ignore else
if (obj) {

@@ -410,3 +336,3 @@ entries.push(obj)

function readdirOrEmpty (dir) {
return fs.readdir(dir).catch((err) => {
return readdir(dir).catch((err) => {
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {

@@ -413,0 +339,0 @@ return []

@@ -56,57 +56,2 @@ 'use strict'

function getDataSync (cache, key, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get(cache, key, opts)
if (memoized && memoize !== false) {
return {
metadata: memoized.entry.metadata,
data: memoized.data,
integrity: memoized.entry.integrity,
size: memoized.entry.size,
}
}
const entry = index.find.sync(cache, key, opts)
if (!entry) {
throw new index.NotFoundError(cache, key)
}
const data = read.sync(cache, entry.integrity, {
integrity: integrity,
size: size,
})
const res = {
metadata: entry.metadata,
data: data,
size: entry.size,
integrity: entry.integrity,
}
if (memoize) {
memo.put(cache, entry, res.data, opts)
}
return res
}
module.exports.sync = getDataSync
function getDataByDigestSync (cache, digest, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get.byDigest(cache, digest, opts)
if (memoized && memoize !== false) {
return memoized
}
const res = read.sync(cache, digest, {
integrity: integrity,
size: size,
})
if (memoize) {
memo.put.byDigest(cache, digest, res, opts)
}
return res
}
module.exports.sync.byDigest = getDataByDigestSync
const getMemoizedStream = (memoized) => {

@@ -113,0 +58,0 @@ const stream = new Minipass()

@@ -20,4 +20,2 @@ 'use strict'

module.exports.get.byDigest = get.byDigest
module.exports.get.sync = get.sync
module.exports.get.sync.byDigest = get.sync.byDigest
module.exports.get.stream = get.stream

@@ -29,3 +27,2 @@ module.exports.get.stream.byDigest = get.stream.byDigest

module.exports.get.hasContent = get.hasContent
module.exports.get.hasContent.sync = get.hasContent.sync

@@ -32,0 +29,0 @@ module.exports.put = put

'use strict'
const util = require('util')
const { rm } = require('fs/promises')
const glob = require('./util/glob.js')
const index = require('./entry-index')
const memo = require('./memoization')
const path = require('path')
const rimraf = util.promisify(require('rimraf'))
const rmContent = require('./content/rm')

@@ -28,5 +27,6 @@

function all (cache) {
async function all (cache) {
memo.clearMemoized()
return rimraf(path.join(cache, '*(content-*|index-*)'))
const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
}
'use strict'
const fs = require('@npmcli/fs')
const fs = require('fs/promises')
const move = require('@npmcli/move-file')

@@ -5,0 +5,0 @@ const pinflight = require('promise-inflight')

'use strict'
const fs = require('@npmcli/fs')
const fixOwner = require('./fix-owner')
const { withTempDir } = require('@npmcli/fs')
const fs = require('fs/promises')
const path = require('path')

@@ -26,9 +25,3 @@

}
return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
return withTempDir(path.join(cache, 'tmp'), cb, opts)
}
module.exports.fix = fixtmpdir
function fixtmpdir (cache) {
return fixOwner(cache, path.join(cache, 'tmp'))
}
'use strict'
const util = require('util')
const {
mkdir,
readFile,
rm,
stat,
truncate,
writeFile,
} = require('fs/promises')
const pMap = require('p-map')
const contentPath = require('./content/path')
const fixOwner = require('./util/fix-owner')
const fs = require('@npmcli/fs')
const fsm = require('fs-minipass')
const glob = util.promisify(require('glob'))
const glob = require('./util/glob.js')
const index = require('./entry-index')
const path = require('path')
const rimraf = util.promisify(require('rimraf'))
const ssri = require('ssri')
const globify = pattern => pattern.split('\\').join('/')
const hasOwnProperty = (obj, key) =>

@@ -80,5 +81,3 @@ Object.prototype.hasOwnProperty.call(obj, key)

opts.log.silly('verify', 'fixing cache permissions')
await fixOwner.mkdirfix(cache, cache)
// TODO - fix file permissions too
await fixOwner.chownr(cache, cache)
await mkdir(cache, { recursive: true })
return null

@@ -94,3 +93,3 @@ }

// 4. If content is live, verify its checksum and delete it if it fails
// 5. If content is not marked as live, rimraf it.
// 5. If content is not marked as live, rm it.
//

@@ -112,3 +111,3 @@ async function garbageCollect (cache, opts) {

const contentDir = contentPath.contentDir(cache)
const files = await glob(globify(path.join(contentDir, '**')), {
const files = await glob(path.join(contentDir, '**'), {
follow: false,

@@ -145,4 +144,4 @@ nodir: true,

stats.reclaimedCount++
const s = await fs.stat(f)
await rimraf(f)
const s = await stat(f)
await rm(f, { recursive: true, force: true })
stats.reclaimedSize += s.size

@@ -160,3 +159,3 @@ }

try {
const { size } = await fs.stat(filepath)
const { size } = await stat(filepath)
contentInfo.size = size

@@ -173,3 +172,3 @@ contentInfo.valid = true

await rimraf(filepath)
await rm(filepath, { recursive: true, force: true })
contentInfo.valid = false

@@ -220,3 +219,3 @@ }

async function rebuildBucket (cache, bucket, stats, opts) {
await fs.truncate(bucket._path)
await truncate(bucket._path)
// This needs to be serialized because cacache explicitly

@@ -227,3 +226,3 @@ // lets very racy bucket conflicts clobber each other.

try {
await fs.stat(content)
await stat(content)
await index.insert(cache, entry.key, entry.integrity, {

@@ -247,13 +246,9 @@ metadata: entry.metadata,

opts.log.silly('verify', 'cleaning tmp directory')
return rimraf(path.join(cache, 'tmp'))
return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
}
function writeVerifile (cache, opts) {
async function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
opts.log.silly('verify', 'writing verifile to ' + verifile)
try {
return fs.writeFile(verifile, `${Date.now()}`)
} finally {
fixOwner.chownr.sync(cache, verifile)
}
return writeFile(verifile, `${Date.now()}`)
}

@@ -264,4 +259,4 @@

async function lastRun (cache) {
const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
return new Date(+data)
}
{
"name": "cacache",
"version": "16.1.3",
"version": "17.0.0",
"cache-version": {

@@ -15,5 +15,2 @@ "content": "2",

"scripts": {
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
"test": "tap",

@@ -52,8 +49,6 @@ "snap": "tap",

"dependencies": {
"@npmcli/fs": "^2.1.0",
"@npmcli/fs": "^3.0.0",
"@npmcli/move-file": "^2.0.0",
"chownr": "^2.0.0",
"fs-minipass": "^2.1.0",
"glob": "^8.0.1",
"infer-owner": "^1.0.4",
"lru-cache": "^7.7.1",

@@ -64,6 +59,4 @@ "minipass": "^3.1.6",

"minipass-pipeline": "^1.2.4",
"mkdirp": "^1.0.4",
"p-map": "^4.0.0",
"promise-inflight": "^1.0.1",
"rimraf": "^3.0.2",
"ssri": "^9.0.0",

@@ -75,7 +68,7 @@ "tar": "^6.1.11",

"@npmcli/eslint-config": "^3.0.1",
"@npmcli/template-oss": "3.5.0",
"@npmcli/template-oss": "4.5.1",
"tap": "^16.0.0"
},
"engines": {
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
},

@@ -85,5 +78,11 @@ "templateOSS": {

"windowsCI": false,
"version": "3.5.0"
"version": "4.5.1"
},
"author": "GitHub Inc."
"author": "GitHub Inc.",
"tap": {
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
]
}
}

@@ -604,3 +604,3 @@ # cacache [![npm version](https://img.shields.io/npm/v/cacache.svg)](https://npm.im/cacache) [![license](https://img.shields.io/npm/l/cacache.svg)](https://npm.im/cacache) [![Travis](https://img.shields.io/travis/npm/cacache.svg)](https://travis-ci.org/npm/cacache) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/cacache?svg=true)](https://ci.appveyor.com/project/npm/cacache) [![Coverage Status](https://coveralls.io/repos/github/npm/cacache/badge.svg?branch=latest)](https://coveralls.io/github/npm/cacache?branch=latest)

cacache.tmp.withTmp(cache, dir => {
return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
return fs.writeFile(path.join(dir, 'blablabla'), 'blabla contents', { encoding: 'utf8' })
}).then(() => {

@@ -607,0 +607,0 @@ // `dir` no longer exists

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚑️ by Socket Inc