hyperdrive
Advanced tools
Comparing version 11.2.0 to 11.3.0
151
index.js
@@ -11,2 +11,4 @@ const Hyperbee = require('hyperbee') | ||
const keyEncoding = new SubEncoder('files', 'utf-8') | ||
module.exports = class Hyperdrive extends ReadyResource { | ||
@@ -20,8 +22,5 @@ constructor (corestore, key, opts = {}) { | ||
} | ||
const { _checkout, _db, _files, onwait } = opts | ||
this._onwait = onwait || null | ||
this.corestore = corestore | ||
this.db = _db || makeBee(key, corestore, this._onwait) | ||
this.files = _files || this.db.sub('files') | ||
this.db = opts._db || makeBee(key, corestore, opts) | ||
this.core = this.db.core | ||
@@ -32,4 +31,5 @@ this.blobs = null | ||
this._openingBlobs = null | ||
this._checkout = _checkout || null | ||
this._batching = !!_files | ||
this._onwait = opts.onwait || null | ||
this._batching = !!(opts._checkout === null && opts._db) | ||
this._checkout = opts._checkout || null | ||
@@ -87,4 +87,3 @@ this.ready().catch(safetyCatch) | ||
_checkout: this._checkout || this, | ||
_db: snapshot, | ||
_files: null | ||
_db: snapshot | ||
}) | ||
@@ -101,4 +100,3 @@ } | ||
_checkout: null, | ||
_db: this.db, | ||
_files: this.files.batch() | ||
_db: this.db.batch() | ||
}) | ||
@@ -108,3 +106,3 @@ } | ||
async flush () { | ||
await this.files.flush() | ||
await this.db.flush() | ||
return this.close() | ||
@@ -114,17 +112,10 @@ } | ||
async _close () { | ||
try { | ||
if (this.blobs !== null && (this._checkout === null || this.blobs !== this._checkout.blobs)) { | ||
await this.blobs.core.close() | ||
} | ||
if (!this._batching) await this.db.close() | ||
await this.files.close() // workaround to flush the batches for now. TODO: kill the sub! | ||
} catch (e) { | ||
safetyCatch(e) | ||
if (this.blobs && (!this._checkout || this.blobs !== this._checkout.blobs)) { | ||
await this.blobs.core.close() | ||
} | ||
if (this._checkout || this._batching) return | ||
try { | ||
await this.db.close() | ||
if (!this._checkout && !this._batching) { | ||
await this.corestore.close() | ||
} catch (e) { | ||
safetyCatch(e) | ||
} | ||
@@ -136,3 +127,3 @@ } | ||
const header = await this.db.getHeader(opts) | ||
const header = await getBee(this.db).getHeader(opts) | ||
if (!header) return false | ||
@@ -183,3 +174,3 @@ | ||
this.blobs = new Hyperblobs(blobsCore) | ||
this.db.metadata.contentFeed = this.blobs.core.key | ||
getBee(this.db).metadata.contentFeed = this.blobs.core.key | ||
@@ -221,8 +212,8 @@ this.emit('blobs', this.blobs) | ||
await this.getBlobs() | ||
const id = await this.blobs.put(buf) | ||
return this.files.put(normalizePath(name), { executable, linkname: null, blob: id, metadata }) | ||
const blob = await this.blobs.put(buf) | ||
return this.db.put(std(name), { executable, linkname: null, blob, metadata }, { keyEncoding }) | ||
} | ||
async del (name) { | ||
return this.files.del(normalizePath(name)) | ||
return this.db.del(std(name), { keyEncoding }) | ||
} | ||
@@ -264,5 +255,3 @@ | ||
async purge () { | ||
if (this._checkout || this._batching) { | ||
throw new Error('Can only purge the main session') | ||
} | ||
if (this._checkout || this._batch) throw new Error('Can only purge the main session') | ||
@@ -278,9 +267,9 @@ await this.ready() // Ensure blobs loaded if present | ||
async symlink (name, dst, { metadata = null } = {}) { | ||
return this.files.put(normalizePath(name), { executable: false, linkname: dst, blob: null, metadata }) | ||
return this.db.put(std(name), { executable: false, linkname: dst, blob: null, metadata }, { keyEncoding }) | ||
} | ||
entry (name, opts) { | ||
return typeof name === 'string' | ||
? this.files.get(normalizePath(name), opts) | ||
: Promise.resolve(name) | ||
async entry (name, opts) { | ||
if (typeof name !== 'string') return name | ||
return this.db.get(std(name), { ...opts, keyEncoding }) | ||
} | ||
@@ -293,11 +282,5 @@ | ||
watch (folder) { | ||
folder = normalizePath(folder || '/') | ||
folder = std(folder || '/', true) | ||
if (folder.endsWith('/')) folder = folder.slice(0, -1) | ||
const encoder = new SubEncoder() | ||
const files = encoder.sub('files', this.db.keyEncoding) | ||
const options = { map: (snap) => this._makeCheckout(snap) } | ||
return this.db.watch({ gt: files.encode(folder + '/'), lt: files.encode(folder + '0') }, options) | ||
return this.db.watch(prefixRange(folder), { keyEncoding, map: (snap) => this._makeCheckout(snap) }) | ||
} | ||
@@ -307,8 +290,6 @@ | ||
if (typeof folder === 'object' && folder && !opts) return this.diff(length, null, folder) | ||
if (folder) { | ||
if (folder.endsWith('/')) folder = folder.slice(0, -1) | ||
if (folder) folder = normalizePath(folder) | ||
opts = { gt: folder + '/', lt: folder + '0', ...opts } | ||
} | ||
return this.files.createDiffStream(length, opts) | ||
folder = std(folder || '/', true) | ||
return this.db.createDiffStream(length, prefixRange(folder), { ...opts, keyEncoding }) | ||
} | ||
@@ -354,4 +335,4 @@ | ||
entries (opts) { | ||
return this.files.createReadStream(opts) | ||
entries (range, opts) { | ||
return this.db.createReadStream(range, { ...opts, keyEncoding }) | ||
} | ||
@@ -379,18 +360,15 @@ | ||
// atm always recursive, but we should add some depth thing to it | ||
list (folder = '/', { recursive = true } = {}) { | ||
list (folder, opts) { | ||
if (typeof folder === 'object') return this.list(undefined, folder) | ||
if (folder.endsWith('/')) folder = folder.slice(0, -1) | ||
if (folder) folder = normalizePath(folder) | ||
folder = std(folder || '/', true) | ||
if (recursive === false) return shallowReadStream(this.files, folder, false) | ||
// '0' is binary +1 of / | ||
return folder ? this.entries({ gt: folder + '/', lt: folder + '0' }) : this.entries() | ||
if (opts && opts.recursive === false) return shallowReadStream(this.db, folder, false) | ||
return this.entries(prefixRange(folder)) | ||
} | ||
readdir (folder = '/') { | ||
if (folder.endsWith('/')) folder = folder.slice(0, -1) | ||
if (folder) folder = normalizePath(folder) | ||
return shallowReadStream(this.files, folder, true) | ||
readdir (folder) { | ||
folder = std(folder || '/', true) | ||
return shallowReadStream(this.db, folder, true) | ||
} | ||
@@ -516,3 +494,3 @@ | ||
if (err) return cb(err) | ||
self.files.put(normalizePath(name), { executable, linkname: null, blob: ws.id, metadata }).then(() => cb(null), cb) | ||
self.db.put(std(name), { executable, linkname: null, blob: ws.id, metadata }, { keyEncoding }).then(() => cb(null), cb) | ||
} | ||
@@ -530,3 +508,3 @@ | ||
static normalizePath (name) { | ||
return normalizePath(name) | ||
return std(name) | ||
} | ||
@@ -542,6 +520,3 @@ } | ||
try { | ||
node = await files.peek({ | ||
gt: folder + prev, | ||
lt: folder + '0' | ||
}) | ||
node = await files.peek(prefixRange(folder, prev), { keyEncoding }) | ||
} catch (err) { | ||
@@ -568,13 +543,33 @@ return cb(err) | ||
function makeBee (key, corestore, onwait) { | ||
const metadataOpts = key | ||
? { key, cache: true, exclusive: true, onwait } | ||
: { name: 'db', cache: true, exclusive: true, onwait } | ||
const core = corestore.get(metadataOpts) | ||
const metadata = { contentFeed: null } | ||
return new Hyperbee(core, { keyEncoding: 'utf-8', valueEncoding: 'json', metadata }) | ||
function makeBee (key, corestore, opts) { | ||
const name = key ? undefined : 'db' | ||
const core = corestore.get({ key, name, cache: true, exclusive: true, onwait: opts.onwait }) | ||
return new Hyperbee(core, { | ||
keyEncoding: 'utf-8', | ||
valueEncoding: 'json', | ||
metadata: { contentFeed: null } | ||
}) | ||
} | ||
function normalizePath (name) { | ||
return unixPathResolve('/', name) | ||
function getBee (bee) { | ||
// A Batch instance will have a .tree property for the actual Hyperbee | ||
return bee.tree || bee | ||
} | ||
function std (name, removeSlash) { | ||
// Note: only remove slash if you're going to use it as prefix range | ||
name = unixPathResolve('/', name) | ||
if (removeSlash && name.endsWith('/')) name = name.slice(0, -1) | ||
validateFilename(name) | ||
return name | ||
} | ||
function validateFilename (name) { | ||
if (name === '/') throw new Error('Invalid filename: ' + name) | ||
} | ||
function prefixRange (name, prev = '/') { | ||
// '0' is binary +1 of / | ||
return { gt: name + prev, lt: name + '0' } | ||
} |
{ | ||
"name": "hyperdrive", | ||
"version": "11.2.0", | ||
"version": "11.3.0", | ||
"description": "Hyperdrive is a secure, real-time distributed file system", | ||
@@ -31,3 +31,3 @@ "main": "index.js", | ||
"streamx": "^2.12.4", | ||
"sub-encoder": "^2.1.0", | ||
"sub-encoder": "^2.1.1", | ||
"unix-path-resolve": "^1.0.2" | ||
@@ -34,0 +34,0 @@ }, |
@@ -213,7 +213,7 @@ # Hyperdrive | ||
#### `const stream = await drive.entries([options])` | ||
#### `const stream = await drive.entries([range], [options])` | ||
Returns a read stream of entries in the drive. | ||
`options` are the same as the `options` to `Hyperbee().createReadStream(options)`. | ||
`range` and `options` are the same as `Hyperbee().createReadStream([range], [options])`. | ||
@@ -220,0 +220,0 @@ #### `const mirror = drive.mirror(out, [options])` |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
35495
424
Updatedsub-encoder@^2.1.1