cacache
Advanced tools
Comparing version 11.2.0 to 11.3.0
@@ -5,2 +5,12 @@ # Change Log | ||
<a name="11.3.0"></a> | ||
# [11.3.0](https://github.com/zkat/cacache/compare/v11.2.0...v11.3.0) (2018-11-05) | ||
### Features | ||
* **get:** add sync API for reading ([db1e094](https://github.com/zkat/cacache/commit/db1e094)) | ||
<a name="11.2.0"></a> | ||
@@ -7,0 +17,0 @@ # [11.2.0](https://github.com/zkat/cacache/compare/v11.1.0...v11.2.0) (2018-08-08) |
49
get.js
@@ -66,2 +66,51 @@ 'use strict' | ||
module.exports.sync = function get (cache, key, opts) { | ||
return getDataSync(false, cache, key, opts) | ||
} | ||
module.exports.sync.byDigest = function getByDigest (cache, digest, opts) { | ||
return getDataSync(true, cache, digest, opts) | ||
} | ||
function getDataSync (byDigest, cache, key, opts) { | ||
opts = GetOpts(opts) | ||
const memoized = ( | ||
byDigest | ||
? memo.get.byDigest(cache, key, opts) | ||
: memo.get(cache, key, opts) | ||
) | ||
if (memoized && opts.memoize !== false) { | ||
return byDigest ? memoized : { | ||
metadata: memoized.entry.metadata, | ||
data: memoized.data, | ||
integrity: memoized.entry.integrity, | ||
size: memoized.entry.size | ||
} | ||
} | ||
const entry = !byDigest && index.find.sync(cache, key, opts) | ||
if (!entry && !byDigest) { | ||
throw new index.NotFoundError(cache, key) | ||
} | ||
const data = read.sync( | ||
cache, | ||
byDigest ? key : entry.integrity, | ||
{ | ||
integrity: opts.integrity, | ||
size: opts.size | ||
} | ||
) | ||
const res = byDigest | ||
? data | ||
: { | ||
metadata: entry.metadata, | ||
data: data, | ||
size: entry.size, | ||
integrity: entry.integrity | ||
} | ||
if (opts.memoize && byDigest) { | ||
memo.put.byDigest(cache, key, res, opts) | ||
} else if (opts.memoize) { | ||
memo.put(cache, entry, res.data, opts) | ||
} | ||
return res | ||
} | ||
module.exports.stream = getStream | ||
@@ -68,0 +117,0 @@ function getStream (cache, key, opts) { |
@@ -78,6 +78,32 @@ 'use strict' | ||
module.exports.insert.sync = insertSync | ||
function insertSync (cache, key, integrity, opts) { | ||
opts = IndexOpts(opts) | ||
const bucket = bucketPath(cache, key) | ||
const entry = { | ||
key, | ||
integrity: integrity && ssri.stringify(integrity), | ||
time: Date.now(), | ||
size: opts.size, | ||
metadata: opts.metadata | ||
} | ||
fixOwner.mkdirfix.sync(path.dirname(bucket), opts.uid, opts.gid) | ||
const stringified = JSON.stringify(entry) | ||
fs.appendFileSync( | ||
bucket, `\n${hashEntry(stringified)}\t${stringified}` | ||
) | ||
try { | ||
fixOwner.chownr.sync(bucket, opts.uid, opts.gid) | ||
} catch (err) { | ||
if (err.code !== 'ENOENT') { | ||
throw err | ||
} | ||
} | ||
return formatEntry(cache, entry) | ||
} | ||
module.exports.find = find | ||
function find (cache, key) { | ||
const bucket = bucketPath(cache, key) | ||
return bucketEntries(cache, bucket).then(entries => { | ||
return bucketEntries(bucket).then(entries => { | ||
return entries.reduce((latest, next) => { | ||
@@ -99,2 +125,22 @@ if (next && next.key === key) { | ||
module.exports.find.sync = findSync | ||
function findSync (cache, key) { | ||
const bucket = bucketPath(cache, key) | ||
try { | ||
return bucketEntriesSync(bucket).reduce((latest, next) => { | ||
if (next && next.key === key) { | ||
return formatEntry(cache, next) | ||
} else { | ||
return latest | ||
} | ||
}, null) | ||
} catch (err) { | ||
if (err.code === 'ENOENT') { | ||
return null | ||
} else { | ||
throw err | ||
} | ||
} | ||
} | ||
module.exports.delete = del | ||
@@ -105,2 +151,7 @@ function del (cache, key, opts) { | ||
module.exports.delete.sync = delSync | ||
function delSync (cache, key, opts) { | ||
return insertSync(cache, key, null, opts) | ||
} | ||
module.exports.lsStream = lsStream | ||
@@ -122,3 +173,2 @@ function lsStream (cache) { | ||
const getKeyToEntry = bucketEntries( | ||
cache, | ||
path.join(subbucketPath, entry) | ||
@@ -159,28 +209,35 @@ ).reduce((acc, entry) => { | ||
function bucketEntries (cache, bucket, filter) { | ||
function bucketEntries (bucket, filter) { | ||
return readFileAsync( | ||
bucket, 'utf8' | ||
).then(data => { | ||
let entries = [] | ||
data.split('\n').forEach(entry => { | ||
if (!entry) { return } | ||
const pieces = entry.split('\t') | ||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { | ||
// Hash is no good! Corruption or malice? Doesn't matter! | ||
// EJECT EJECT | ||
return | ||
} | ||
let obj | ||
try { | ||
obj = JSON.parse(pieces[1]) | ||
} catch (e) { | ||
// Entry is corrupted! | ||
return | ||
} | ||
if (obj) { | ||
entries.push(obj) | ||
} | ||
}) | ||
return entries | ||
).then(data => _bucketEntries(data, filter)) | ||
} | ||
function bucketEntriesSync (bucket, filter) { | ||
const data = fs.readFileSync(bucket, 'utf8') | ||
return _bucketEntries(data, filter) | ||
} | ||
function _bucketEntries (data, filter) { | ||
let entries = [] | ||
data.split('\n').forEach(entry => { | ||
if (!entry) { return } | ||
const pieces = entry.split('\t') | ||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { | ||
// Hash is no good! Corruption or malice? Doesn't matter! | ||
// EJECT EJECT | ||
return | ||
} | ||
let obj | ||
try { | ||
obj = JSON.parse(pieces[1]) | ||
} catch (e) { | ||
// Entry is corrupted! | ||
return | ||
} | ||
if (obj) { | ||
entries.push(obj) | ||
} | ||
}) | ||
return entries | ||
} | ||
@@ -187,0 +244,0 @@ |
@@ -34,2 +34,30 @@ 'use strict' | ||
module.exports.chownr.sync = fixOwnerSync | ||
function fixOwnerSync (filepath, uid, gid) { | ||
if (!process.getuid) { | ||
// This platform doesn't need ownership fixing | ||
return | ||
} | ||
if (typeof uid !== 'number' && typeof gid !== 'number') { | ||
// There's no permissions override. Nothing to do here. | ||
return | ||
} | ||
if ((typeof uid === 'number' && process.getuid() === uid) && | ||
(typeof gid === 'number' && process.getgid() === gid)) { | ||
// No need to override if it's already what we used. | ||
return | ||
} | ||
try { | ||
chownr.sync( | ||
filepath, | ||
typeof uid === 'number' ? uid : process.getuid(), | ||
typeof gid === 'number' ? gid : process.getgid() | ||
) | ||
} catch (err) { | ||
if (err.code === 'ENOENT') { | ||
return null | ||
} | ||
} | ||
} | ||
module.exports.mkdirfix = mkdirfix | ||
@@ -46,1 +74,19 @@ function mkdirfix (p, uid, gid, cb) { | ||
} | ||
module.exports.mkdirfix.sync = mkdirfixSync | ||
function mkdirfixSync (p, uid, gid) { | ||
try { | ||
const made = mkdirp.sync(p) | ||
if (made) { | ||
fixOwnerSync(made, uid, gid) | ||
return made | ||
} | ||
} catch (err) { | ||
if (err.code === 'EEXIST') { | ||
fixOwnerSync(p, uid, gid) | ||
return null | ||
} else { | ||
throw err | ||
} | ||
} | ||
} |
@@ -21,2 +21,4 @@ 'use strict' | ||
x.get.byDigest = (cache, hash, opts) => get.byDigest(cache, hash, opts) | ||
x.get.sync = (cache, key, opts) => get.sync(cache, key, opts) | ||
x.get.sync.byDigest = (cache, key, opts) => get.sync.byDigest(cache, key, opts) | ||
x.get.stream = (cache, key, opts) => get.stream(cache, key, opts) | ||
@@ -23,0 +25,0 @@ x.get.stream.byDigest = (cache, hash, opts) => get.stream.byDigest(cache, hash, opts) |
@@ -21,2 +21,4 @@ 'use strict' | ||
x.saca.porHacheo = (cache, hacheo, ops) => get.byDigest(cache, hacheo, ops) | ||
x.saca.sinc = (cache, clave, ops) => get.sync(cache, clave, ops) | ||
x.saca.sinc.porHacheo = (cache, hacheo, ops) => get.sync.byDigest(cache, hacheo, ops) | ||
x.saca.flujo = (cache, clave, ops) => get.stream(cache, clave, ops) | ||
@@ -23,0 +25,0 @@ x.saca.flujo.porHacheo = (cache, hacheo, ops) => get.stream.byDigest(cache, hacheo, ops) |
{ | ||
"name": "cacache", | ||
"version": "11.2.0", | ||
"version": "11.3.0", | ||
"cache-version": { | ||
@@ -5,0 +5,0 @@ "content": "2", |
110919
1541