New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

autobase

Package Overview
Dependencies
Maintainers
3
Versions
91
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

autobase - npm Package Compare versions

Comparing version 6.0.0-rc1 to 6.0.0-rc10

lib/topolist.js

458

index.js

@@ -18,11 +18,12 @@ const b4a = require('b4a')

const { AUTOBASE_VERSION } = require('./lib/constants')
const inspect = Symbol.for('nodejs.util.inspect.custom')
const AUTOBASE_VERSION = 1
// default is to automatically ack
const DEFAULT_ACK_INTERVAL = 10 * 1000
const DEFAULT_ACK_INTERVAL = 10_000
const DEFAULT_ACK_THRESHOLD = 4
const FF_THRESHOLD = 16
const DEFAULT_FF_TIMEOUT = 60_000

@@ -43,2 +44,3 @@ const REMOTE_ADD_BATCH = 64

this.bootstrap = bootstrap ? toKey(bootstrap) : null
this.keyPair = handlers.keyPair || null
this.valueEncoding = c.from(handlers.valueEncoding || 'binary')

@@ -54,4 +56,5 @@ this.store = store

this.local = Autobase.getLocalCore(this.store, handlers, this.encryptionKey)
this.local = null
this.localWriter = null
this.activeWriters = new ActiveWriters()

@@ -61,5 +64,10 @@ this.linearizer = null

this.fastForwardingTo = handlers.fastForward !== false ? 0 : -1
this.fastForwardEnabled = handlers.fastForward !== false
this.fastForwarding = false
this.fastForwardTo = null
if (this.fastForwardEnabled && isObject(handlers.fastForward)) {
this.fastForwardTo = handlers.fastForward
}
this._checkWriters = []

@@ -74,3 +82,2 @@ this._appending = null

this._localDigest = null
this._maybeUpdateDigest = true
this._needsWakeup = true

@@ -87,5 +94,7 @@ this._needsWakeupHeads = true

this._handlers = handlers || {}
this._warn = emitWarning.bind(this)
this._advancing = null
this._advanced = null
this.reindexing = false

@@ -135,3 +144,5 @@ this._bump = debounceify(() => {

this.ready().catch(safetyCatch)
if (handlers.eagerReady !== false) {
this.ready().catch(safetyCatch)
}
}

@@ -208,2 +219,10 @@

const opts = {
valueEncoding: this.valueEncoding,
keyPair: this.keyPair,
key: this._primaryBootstrap ? await this._primaryBootstrap.getUserData('autobase/local') : null
}
this.local = Autobase.getLocalCore(this.store, opts, this.encryptionKey)
await this.local.ready()

@@ -233,3 +252,6 @@

await this._primaryBootstrap.ready()
this._primaryBootstrap.setUserData('autobase/local', this.local.key)
if (this.encryptionKey) await this._primaryBootstrap.setUserData('autobase/encryption', this.encryptionKey)
} else {
this.local.setUserData('autobase/local', this.local.key)
}

@@ -242,3 +264,3 @@

: this.bootstrap && !b4a.equals(this.bootstrap, this.local.key)
? 0
? -1
: this.maxSupportedVersion

@@ -253,7 +275,7 @@

async _loadSystemInfo () {
const pointer = await this.local.getUserData('autobase/system')
const pointer = await this.local.getUserData('autobase/boot')
const bootstrap = this.bootstrap || (await this.local.getUserData('referrer')) || this.local.key
if (!pointer) return { bootstrap, system: null }
const { indexed, views } = c.decode(messages.SystemPointer, pointer)
const { indexed, views } = c.decode(messages.BootRecord, pointer)
const { key, length } = indexed

@@ -275,2 +297,6 @@

if (system.version > this.maxSupportedVersion) {
throw new Error('Autobase upgrade required')
}
this._initialViews = [{ name: '_system', key, length }]

@@ -296,3 +322,4 @@

safetyCatch(err)
await this.local.setUserData('autobase/system', null)
await this.local.setUserData('autobase/boot', null)
this.store.close().catch(safetyCatch)
throw err

@@ -308,2 +335,6 @@ }

this._initialViews = null
} else {
// check if this is a v0 base
const record = await this.getUserData('autobase/system')
if (record !== null) this.emit('reindexing')
}

@@ -313,8 +344,25 @@

if (this.localWriter && !this.system.bootstrapping) {
await this._updateDigest()
await this._restoreLocalState()
}
if (this.fastForwardTo !== null) {
const { key, length, timeout } = this.fastForwardTo
this.fastForwardTo = null // will get reset once ready
this.initialFastForward(key, length, timeout || DEFAULT_FF_TIMEOUT)
}
if (this.localWriter && this._ackInterval) this._startAckTimer()
}
async _restoreLocalState () {
const version = await this.localWriter.getVersion()
if (version > this.maxSupportedVersion) {
this.store.close().catch(safetyCatch)
throw new Error('Autobase version cannot be downgraded')
}
await this._updateDigest()
}
async _open () {

@@ -328,5 +376,8 @@ this._prebump = this._openPreBump()

// set reindexing for initial bump
this.reindexing = true
// queue a full bump that handles wakeup etc (not legal to wait for that here)
this._queueBump()
this._advanced = this._advancing
this._advanced = this._advancing.then(() => { this.reindexing = false }, safetyCatch)

@@ -357,2 +408,10 @@ this.queueFastForward()

this.close().catch(safetyCatch)
// if no one is listening we should crash! we cannot rely on the EE here
// as this is wrapped in a promise so instead of nextTick throw it
if (ReadyResource.listenerCount(this, 'error') === 0) {
crashSoon(err)
return
}
this.emit('error', err)

@@ -434,3 +493,3 @@ }

if (this.fastForwardTo !== null) return true
return this.fastForwardingTo > 0 && this.fastForwardingTo > this.system.core.getBackingCore().length
return this.fastForwardEnabled && this.fastForwarding
}

@@ -485,2 +544,5 @@

if (!this.opened) await this.ready()
if (this.closing) throw new Error('Autobase is closing')
await this._advanced // ensure all local state has been applied, only needed until persistent batches

@@ -570,3 +632,5 @@

await this._presystem
await this._primaryBootstrap.setUserData(key, val)
const core = this._primaryBootstrap === null ? this.local : this._primaryBootstrap
await core.setUserData(key, val)
}

@@ -576,3 +640,5 @@

await this._presystem
return await this._primaryBootstrap.getUserData(key)
const core = this._primaryBootstrap === null ? this.local : this._primaryBootstrap
return await core.getUserData(key)
}

@@ -634,3 +700,3 @@

const p = []
for (const w of this.activeWriters) p.push(w.update())
for (const w of this.activeWriters) p.push(w.update().catch(this._warn))
return Promise.all(p)

@@ -669,3 +735,2 @@ }

this._addCheckpoints = !!(this.localWriter && (this.localWriter.isIndexer || this._isPending()))
this._maybeUpdateDigest = true
this._updateAckThreshold()

@@ -684,8 +749,5 @@ }

this._updateLinearizer([bootstrap], [])
this._updateDigest([bootstrap])
}
async _makeLinearizer (sys) {
this._maybeUpdateDigest = true
if (sys === null) {

@@ -715,5 +777,3 @@ return this._bootstrapLinearizer()

async _reindex (nodes) {
this._maybeUpdateDigest = true
if (nodes) {
if (nodes && nodes.length) {
this._undoAll()

@@ -723,5 +783,9 @@ await this.system.update()

const sameIndexers = this.system.sameIndexers(this.linearizer.indexers)
await this._makeLinearizer(this.system)
await this._viewStore.migrate()
if (!sameIndexers) await this._viewStore.migrate()
this.version = this.system.version
this.queueFastForward()

@@ -750,9 +814,3 @@

_onUpgrade (version) {
if (version > this.maxSupportedVersion) {
this._onError(new Error('Autobase upgrade required'))
return false
}
this.version = version
return true
if (version > this.maxSupportedVersion) throw new Error('Autobase upgrade required')
}

@@ -807,3 +865,3 @@

async _advanceSystemPointer (length) {
async _advanceBootRecord (length) {
if (length) { // TODO: remove when we are 100% we never hit the return in this if

@@ -829,8 +887,13 @@ const { views } = await this.system.getIndexedInfo(length)

await this._setSystemPointer(this.system.core.key, length, this.system.heads, views)
await this._setBootRecord(this.system.core.key, length, this.system.heads, views)
}
async _setSystemPointer (key, length, heads, views) {
const pointer = c.encode(messages.SystemPointer, { indexed: { key, length }, heads, views })
await this.local.setUserData('autobase/system', pointer)
async _setBootRecord (key, length, heads, views) {
const pointer = c.encode(messages.BootRecord, {
indexed: { key, length },
heads,
views
})
await this.local.setUserData('autobase/boot', pointer)
}

@@ -867,3 +930,3 @@

const flushed = (await this._flushIndexes()) ? this.system.core.getBackingCore().flushedLength : this._systemPointer
if (this.updating || flushed > this._systemPointer) await this._advanceSystemPointer(flushed)
if (this.updating || flushed > this._systemPointer) await this._advanceBootRecord(flushed)

@@ -877,3 +940,4 @@ if (indexed) await this.onindex(this)

this._queueViewReset = false
await this._forceResetViews()
const sysCore = this.system.core.getBackingCore()
await this._forceResetViews(sysCore.indexedLength)
continue

@@ -896,6 +960,21 @@ }

progress () {
let processed = 0
let total = 0
for (const w of this.activeWriters) {
processed += w.length
total += w.core.length
}
return {
processed,
total
}
}
async _getLocallyStoredHeads () {
const buffer = await this.local.getUserData('autobase/system')
const buffer = await this.local.getUserData('autobase/boot')
if (!buffer) return []
return c.decode(messages.SystemPointer, buffer).heads
return c.decode(messages.BootRecord, buffer).heads
}

@@ -1006,4 +1085,3 @@

async _forceResetViews () {
const length = this.system.core.getBackingCore().indexedLength
async _forceResetViews (length) {
const info = await this.system.getIndexedInfo(length)

@@ -1014,6 +1092,6 @@

const pointer = await this.local.getUserData('autobase/system')
const { views } = c.decode(messages.SystemPointer, pointer)
const pointer = await this.local.getUserData('autobase/boot')
const { views } = c.decode(messages.BootRecord, pointer)
await this._setSystemPointer(this.system.core.key, length, info.heads, views)
await this._setBootRecord(this.system.core.key, length, info.heads, views)

@@ -1034,6 +1112,24 @@ for (const { key, length } of info.views) {

// NOTE: runs in parallel with everything, can never fail
async initialFastForward (key, length, timeout) {
const encryptionKey = this._viewStore.getBlockKey(this._viewStore.getSystemCore().name)
const core = this.store.get({ key, encryptionKey, isBlockKey: true })
const target = await this._preFastForward(core, length, timeout)
await core.close()
// initial fast-forward failed
if (target === null) return
for (const w of this.activeWriters) w.pause()
this.fastForwarding = false
this.fastForwardTo = target
this._bumpAckTimer()
this._queueBump()
}
async queueFastForward () {
// if already FFing, let the finish. TODO: auto kill the attempt after a while and move to latest?
if (this.fastForwardingTo !== 0) return
if (!this.fastForwardEnabled || this.fastForwarding) return

@@ -1045,18 +1141,31 @@ const core = this.system.core.getBackingCore()

this.fastForwardingTo = core.session.length
const target = await this._preFastForward(core.session, core.session.length, null)
// fast-forward failed
if (target === null) return
// if it migrated underneath us, ignore for now
if (core !== this.system.core.getBackingCore()) return
for (const w of this.activeWriters) w.pause()
this.fastForwardTo = target
this._bumpAckTimer()
this._queueBump()
}
const fastForwardTo = {
key: core.session.key,
length: this.fastForwardingTo,
migrate: null
}
// NOTE: runs in parallel with everything, can never fail
async _preFastForward (core, length, timeout) {
this.fastForwarding = true
const info = { key: core.key, length }
try {
// sys runs open with wait false, so get head block first for low complexity
if (!(await core.session.has(this.fastForwardingTo - 1))) {
await core.session.get(this.fastForwardingTo - 1)
if (!(await core.has(length - 1))) {
await core.get(length - 1, { timeout })
}
const system = new SystemView(core.session.session(), this.fastForwardingTo)
const system = new SystemView(core.session(), length)
await system.ready()

@@ -1067,11 +1176,12 @@

version: system.version,
length: this.fastForwardingTo
length
}
this.fastForwardingTo = 0
this.fastForwarding = false
this.emit('upgrade-available', upgrade)
return
return null
}
const migrated = system.sameIndexers(this.linearizer.indexers)
const systemShouldMigrate = b4a.equals(core.key, this.system.core.key) &&
!system.sameIndexers(this.linearizer.indexers)

@@ -1088,22 +1198,2 @@ const indexers = []

let sysCore = system.core
// handle system migration
if (migrated) {
const idx = []
for (const { key } of system.indexers) idx.push(await this._getWriterByKey(key))
const hash = sysCore.core.tree.hash()
const name = this.system.core._source.name
const prologue = { hash, length: this.fastForwardingTo }
const key = this.deriveKey(name, idx, prologue)
await sysCore.close() // close unused session
fastForwardTo.migrate = { key }
sysCore = this.store.get(key)
}
pendingViews.push({ core: sysCore, length: this.fastForwardingTo })
// handle rest of views

@@ -1122,5 +1212,4 @@ for (const v of system.views) {

const promises = []
for (const { core, length } of indexers) {
if (core.length === 0 && length > 0) promises.push(core.get(length - 1))
if (core.length === 0 && length > 0) promises.push(core.get(length - 1, { timeout }))
}

@@ -1130,3 +1219,3 @@

// we could just get the hash here, but likely user wants the block so yolo
promises.push(core.get(length - 1))
promises.push(core.get(length - 1, { timeout }))
}

@@ -1136,26 +1225,33 @@

const closing = []
// handle system migration
if (systemShouldMigrate) {
const hash = system.core.core.tree.hash()
const name = this.system.core._source.name
const prologue = { hash, length }
info.key = this.deriveKey(name, indexers, prologue)
const core = this.store.get(info.key)
await core.get(length - 1)
closing.push(core.close())
}
for (const { core } of pendingViews) {
await core.close()
closing.push(core.close())
}
await system.close()
closing.push(system.close())
await Promise.allSettled(closing)
} catch (err) {
this.fastForwardingTo = 0
safetyCatch(err)
return
return null
} finally {
this.fastForwarding = false
}
// if it migrated underneath us, ignore for now
if (core !== this.system.core.getBackingCore()) {
this.fastForwardingTo = 0
return
}
for (const w of this.activeWriters) w.pause()
this.fastForwardingTo = 0
this.fastForwardTo = fastForwardTo
this._bumpAckTimer()
this._queueBump()
return info
}

@@ -1170,12 +1266,18 @@

async _applyFastForward () {
const core = this.system.core.getBackingCore()
const from = core.length
// remember these in case another fast forward gets queued
const { key, length, migrate } = this.fastForwardTo
const { key, length } = this.fastForwardTo
const migrated = !b4a.equals(key, this.system.core.key)
const encryptionKey = this._viewStore.getBlockKey(this._viewStore.getSystemCore().name)
const core = this.store.get({ key, encryptionKey, isBlockKey: true })
await core.ready()
const from = this.system.core.getBackingCore().length
// just extra sanity check
// TODO: if we simply load the core from the corestore the key check isn't needed
// getting rid of that is essential for dbl ff, but for now its ok with some safety from migrations
if (!b4a.equals(core.key, key) || length <= from) {
if (length <= from) {
this._clearFastForward()

@@ -1185,10 +1287,23 @@ return

const system = new SystemView(core.session.session(), length)
const system = new SystemView(core.session(), length)
await system.ready()
const indexers = [] // only used in migrate branch
if (migrate) {
const prologues = [] // only used in migrate branch
// preload async state
if (migrated) {
for (const { key } of system.indexers) {
indexers.push(await this._getWriterByKey(key))
const core = this.store.get(key)
await core.ready()
indexers.push({ core })
await core.close()
}
for (const { key } of system.views) {
const core = this.store.get(key)
await core.ready()
prologues.push(core.manifest.prologue)
await core.close()
}
}

@@ -1199,3 +1314,3 @@

const sysView = this.system.core._source
const sysInfo = { key: migrate ? migrate.key : key, length }
const sysInfo = { key, length }

@@ -1211,7 +1326,5 @@ views.set(sysView, sysInfo)

// search for corresponding view
if (!view && migrate) {
if (!view) {
for (view of this._viewStore.opened.values()) {
const hash = view.core.session.core.tree.hash()
const key = this.deriveKey(view.name, indexers, { hash, length: v.length })
const key = this.deriveKey(view.name, indexers, prologues[i])
if (b4a.equals(key, v.key)) break

@@ -1222,3 +1335,3 @@ view = null

if (!view || view.core.session.length < v.length) {
if (!view) {
this._clearFastForward() // something wrong somewhere, likely a bug, just safety

@@ -1249,4 +1362,7 @@ return

await this._makeLinearizer(this.system)
await this._advanceSystemPointer(length)
await this._advanceBootRecord(length)
// manually set the digest
if (migrated) this._setDigest(key)
const to = length

@@ -1306,3 +1422,2 @@

if (writer === this.localWriter && isIndexer) this._addCheckpoints = true
if (isIndexer) this._maybeUpdateDigest = true

@@ -1316,3 +1431,3 @@ // fetch any nodes needed for dependents

assert(this._applying !== null, 'System changes are only allowed in apply')
const wasIndexer = await this.system.remove(key)
await this.system.remove(key)

@@ -1324,4 +1439,2 @@ if (b4a.equals(key, this.local.key)) {

if (wasIndexer) this._maybeUpdateDigest = true
this._queueBump()

@@ -1387,3 +1500,3 @@ }

if (u.popped) this._undo(u.popped)
if (u.undo) this._undo(u.undo)

@@ -1414,3 +1527,3 @@ // if anything was indexed reset the ticks

if (update.version > this.version) {
if (!this._onUpgrade(update.version)) return // failed
this._onUpgrade(update.version) // throws if not supported
upgraded = true

@@ -1423,2 +1536,6 @@ }

// we have to set the digest here so it is
// flushed to local appends in same iteration
await this._updateDigest()
return u.indexed.slice(i).concat(u.tip)

@@ -1445,3 +1562,3 @@ }

if (this.system.addHead(node)) this._maybeUpdateDigest = true
this.system.addHead(node)

@@ -1461,3 +1578,4 @@ if (node.value !== null) {

if (versionUpgrade) {
this.system.version = await this._checkVersion()
const version = await this._checkVersion()
this.system.version = version === -1 ? node.version : version
}

@@ -1478,8 +1596,3 @@

if (applyBatch.length && this._hasApply === true) {
try {
await this._handlers.apply(applyBatch, this.view, this)
} catch (err) {
this._onError(err)
return null
}
await this._handlers.apply(applyBatch, this.view, this)
}

@@ -1509,3 +1622,3 @@

if (update.version > this.version) {
if (!this._onUpgrade(update.version)) return // failed
this._onUpgrade(update.version) // throws if not supported
upgraded = true

@@ -1518,2 +1631,4 @@ }

this._queueIndexFlush(i + 1)
await this._updateDigest() // see above
return u.indexed.slice(i + 1).concat(u.tip)

@@ -1524,2 +1639,3 @@ }

this._queueIndexFlush(u.indexed.length)
await this._updateDigest() // see above
}

@@ -1547,4 +1663,7 @@

const needsKey = !length || indexerUpdate
const key = needsKey ? await view.deriveKey(indexers, length) : null
const key = indexers.length > 1 && indexerUpdate
? await view.deriveKey(indexers, length + view.appending)
: view.systemIndex === -1
? view.key
: null

@@ -1558,2 +1677,4 @@ views.push({ view, key })

async _checkVersion () {
if (!this.system.indexers.length) return -1
const maj = (this.system.indexers.length >> 1) + 1

@@ -1583,3 +1704,3 @@

for (const { versionSignal: version } of heads) {
for (const { maxSupportedVersion: version } of heads) {
let v = tally.get(version)

@@ -1630,3 +1751,2 @@

async _updateDigest () {
this._maybeUpdateDigest = false
if (!this._addCheckpoints) return

@@ -1637,42 +1757,33 @@

// no previous digest available
if (this._localDigest === null) {
this._localDigest = {
pointer: 0,
indexers: []
}
this._setDigest(this.system.core.key)
}
return
}
const indexers = []
// we predict what the system key will be after flushing
const pending = this.system.core._source.pendingIndexedLength
const info = await this.system.getIndexedInfo(pending)
const p = []
for (const { key } of info.indexers) {
const w = await this._getWriterByKey(key)
indexers.push({
signature: 0,
namespace: w.core.manifest.signers[0].namespace,
publicKey: w.core.manifest.signers[0].publicKey
})
p.push(await this._getWriterByKey(key))
}
let same = indexers.length === this._localDigest.indexers.length
const indexers = await p
for (let i = 0; i < indexers.length; i++) {
if (!same) break
const sys = this._viewStore.getSystemCore()
const key = await sys.deriveKey(indexers, pending)
const a = indexers[i]
const b = this._localDigest.indexers[i]
if (this._localDigest.key && b4a.equals(key, this._localDigest.key)) return
if (a.signature !== b.signature || !b4a.equals(a.namespace, b.namespace) || !b4a.equals(a.publicKey, b.publicKey)) {
same = false
}
}
this._setDigest(key)
}
if (same) return
_setDigest (key) {
if (this._localDigest === null) this._localDigest = {}
this._localDigest.key = key
this._localDigest.pointer = 0
this._localDigest.indexers = indexers
}

@@ -1683,3 +1794,3 @@

pointer: this._localDigest.pointer,
indexers: this._localDigest.indexers
key: this._localDigest.key
}

@@ -1701,3 +1812,3 @@ }

async _flushLocal (localNodes) {
if (this._maybeUpdateDigest) await this._updateDigest()
if (!this._localDigest) await this._updateDigest()

@@ -1711,5 +1822,6 @@ const cores = this._addCheckpoints ? this._viewStore.getIndexedCores() : []

blocks[i] = {
version: 0,
version: 1,
maxSupportedVersion: this.maxSupportedVersion,
checkpoint: this._addCheckpoints ? await generateCheckpoint(cores) : null,
digest: this._addCheckpoints ? this._generateDigest() : null,
checkpoint: this._addCheckpoints ? await generateCheckpoint(cores) : null,
node: {

@@ -1720,7 +1832,3 @@ heads,

},
additional: {
pointer: 0,
data: {}
},
versionSignal: this.maxSupportedVersion
trace: []
}

@@ -1774,1 +1882,15 @@

function noop () {}
function crashSoon (err) {
queueMicrotask(() => { throw err })
throw err
}
function isObject (obj) {
return typeof obj === 'object' && obj !== null
}
function emitWarning (err) {
safetyCatch(err)
this.emit('warning', err)
}

@@ -5,2 +5,6 @@ const BufferMap = require('tiny-buffer-map')

const UNSEEN = 0
const NEWER = 1
const ACKED = 2
// Consensus machine for Autobase. Sort DAG nodes using

@@ -62,3 +66,3 @@ // vector clocks to determine a globally consistent view

const head = idx.get(seq)
if (!head || head.yielded) continue
if (!head || this.removed.includes(head.writer.core.key, head.length)) continue

@@ -94,3 +98,3 @@ let isDep = true

const head = idx.get(length)
if (!head || head.yielded) continue
if (!head || this.removed.includes(head.writer.core.key, head.length)) continue

@@ -222,3 +226,3 @@ let isTail = true

confirms (indexer, target, acks, length) {
if (!length || this.removed.get(indexer.core.key) >= length) return false
if (!length || this.removed.get(indexer.core.key) >= length) return UNSEEN
// def feels like there is a smarter way of doing this part

@@ -229,6 +233,7 @@ // ie we just wanna find a node from the indexer that is strictly newer than target

let jump = true
let newer = true
for (let i = length - 1; i >= 0; i--) {
const head = indexer.get(i)
if (head === null) return false
if (head === null) return UNSEEN

@@ -243,4 +248,4 @@ let seen = 0

if (seen < this.majority) {
return false
if (!newer && seen < this.majority) {
break
}

@@ -271,9 +276,12 @@

newer = false
continue
} else if (seen < this.majority) {
return NEWER
}
return true
return ACKED
}
return false
return UNSEEN
}

@@ -286,2 +294,3 @@

if (acks.length < this.majority) return false
let allNewer = true

@@ -293,3 +302,5 @@ for (const indexer of this.indexers) {

if (this.confirms(indexer, target, acks, length)) {
const result = this.confirms(indexer, target, acks, length)
if (result === ACKED) {
confs.add(indexer)

@@ -300,2 +311,4 @@ if (confs.size >= this.majority) {

}
if (result === UNSEEN) allNewer = false
}

@@ -305,3 +318,3 @@

return false
return allNewer
}

@@ -337,4 +350,11 @@

// this can get called multiple times for same node
remove (node) {
this.merges.delete(node)
this.removed.set(node.writer.core.key, node.length)
return node
}
shift () {
if (!this.updated) return null
if (!this.updated) return []

@@ -345,3 +365,3 @@ const tails = this._indexerTails()

if (this._isConfirmed(tail)) {
return tail
return [this.remove(tail)]
}

@@ -357,3 +377,3 @@ }

this.updated = false
return null
return []
}

@@ -375,14 +395,17 @@

// otherwise tiebreak between current tails
if (!next) {
for (const t of this._tails(node, tails)) {
if (next && next.tieBreak(t)) continue
next = t
}
if (next) {
node = next
continue
}
node = next
// otherwise yield all tails
const tailSet = []
for (const t of this._tails(node, tails)) {
tailSet.push(this.remove(t))
}
return tailSet
}
return node
return [this.remove(node)]
}

@@ -414,3 +437,3 @@

if (acks.length >= this.majority) {
return !this.confirms(writer, target, acks, writer.length)
return this.confirms(writer, target, acks, writer.length) === UNSEEN
}

@@ -417,0 +440,0 @@

@@ -769,10 +769,16 @@ const { EventEmitter } = require('events')

} else {
const sourceLength = this.core.indexedLength
// copy state over
const additional = []
for (let i = this.core.indexedLength; i < length; i++) {
additional.push(await this.core.get(i))
for (let i = sourceLength; i < length; i++) {
additional.push(await this.core.get(i, { raw: true }))
}
if (this.core.session.core !== next.core) {
await next.core.copyFrom(this.core.session.core, null, { length, additional })
await next.core.copyFrom(this.core.session.core, null, {
sourceLength,
length,
additional
})
}

@@ -779,0 +785,0 @@ }

@@ -6,2 +6,3 @@ const b4a = require('b4a')

const Consensus = require('./consensus')
const Topolist = require('./topolist')

@@ -24,4 +25,5 @@ class Node {

this.clock = new Clock()
this.yielded = false
this.ordering = 0
this.yielding = false
}

@@ -38,2 +40,3 @@

this.yielded = false
this.yielding = false
for (const dep of this.dependents) dep.dependencies.add(this)

@@ -76,3 +79,3 @@ this.dependents.clear()

this.tails = new Set()
this.tip = []
this.tip = new Topolist()
this.size = 0 // useful for debugging

@@ -124,2 +127,3 @@ this.updated = false

this.tip.add(node)
if (node.writer.isIndexer) this.consensus.addHead(node)

@@ -130,9 +134,2 @@

if (this.heads.size === 1 && (this.updated === false || this._strictlyAdded !== null)) {
if (this._strictlyAdded === null) this._strictlyAdded = []
this._strictlyAdded.push(node)
} else {
this._strictlyAdded = null
}
this.updated = true

@@ -147,90 +144,14 @@

// get the indexed nodes
const indexed = []
// get the indexed nodes
while (true) {
const node = this.consensus.shift()
if (!node) break
const nodes = this.consensus.shift()
if (!nodes.length) break
const batch = this._yield(node)
if (batch.length === 0) break
for (const node of batch) {
indexed.push(node)
}
this._yield(nodes, indexed)
}
const diff = this._maybeStrictlyAdded(indexed)
if (diff !== null) return diff
let pushed = 0
let popped = 0
const list = this._orderTip()
const dirtyList = indexed.length ? indexed.concat(list) : list
const min = Math.min(dirtyList.length, this.tip.length)
let same = true
let shared = 0
for (; shared < min; shared++) {
if (dirtyList[shared] === this.tip[shared]) {
continue
}
same = false
popped = this.tip.length - shared
pushed = dirtyList.length - shared
break
}
if (same) {
pushed = dirtyList.length - this.tip.length
}
this.tip = list
const update = {
shared,
popped,
pushed,
length: shared + pushed,
indexed,
tip: list
}
return update
return this.tip.flush(indexed)
}
_maybeStrictlyAdded (indexed) {
if (this._strictlyAdded === null) return null
const added = this._strictlyAdded
this._strictlyAdded = null
for (let i = 0; i < indexed.length; i++) {
const node = indexed[i]
const other = i < this.tip.length ? this.tip[i] : added[i - this.tip.length]
if (node !== other) return null
}
const shared = this.tip.length
this.tip.push(...added)
const length = this.tip.length
if (indexed.length) this.tip = this.tip.slice(indexed.length)
return {
shared,
popped: 0,
pushed: added.length,
length,
indexed,
tip: this.tip
}
}
_updateInitialHeads (node) {

@@ -246,35 +167,2 @@ for (const head of node.actualHeads) {

/* Tip ordering methods */
_orderTip () {
const tip = []
const stack = [...this.tails]
while (stack.length) {
const node = stack.pop()
if (node.ordering) continue
node.ordering = node.dependencies.size
stack.push(...node.dependents)
}
stack.push(...this.tails)
stack.sort(keySort)
while (stack.length) {
const node = stack.pop()
tip.push(node)
const batch = []
for (const dep of node.dependents) {
if (--dep.ordering === 0) batch.push(dep)
}
if (batch.length > 0) stack.push(...batch.sort(keySort))
}
return tip
}
/* Ack methods */

@@ -385,32 +273,31 @@

// yields full dags including non-indexer nodes
_yield (node) {
const nodes = []
_yield (nodes, indexed = []) {
const offset = indexed.length
const tails = []
while (true) {
let current = node
while (current.dependencies.size) {
// easy with only one dependency
if (current.dependencies.size === 1) {
current = getFirst(current.dependencies)
continue
}
// determine which nodes are yielded
while (nodes.length) {
const node = nodes.pop()
let next = null
for (const t of current.dependencies) {
if (next && next.tieBreak(t)) continue
next = t
}
if (node.yielding) continue
node.yielding = true
current = next
if (!node.dependencies.size) tails.push(node)
nodes.push(...node.dependencies)
}
while (tails.length) {
let tail = tails.pop()
for (tail of this._removeBatch(tail)) {
Topolist.add(tail, indexed, offset)
}
for (const removed of this._removeBatch(current)) {
nodes.push(removed)
for (const dep of tail.dependents) {
if (!dep.dependencies.size && dep.yielding) tails.push(dep)
}
if (node === current) break
}
return nodes
return indexed
}

@@ -429,10 +316,5 @@

this.tails.delete(node)
this.consensus.merges.delete(node)
this.heads.delete(node)
this.consensus.remove(node)
// update the removed clock
if (node.writer.isIndexer) {
this.consensus.removed.set(node.writer.core.key, node.length)
}
// update the tailset

@@ -456,3 +338,3 @@ for (const d of node.dependents) {

_removeBatch (node) {
const batch = [node]
const batch = [this._removeNode(node)]

@@ -468,9 +350,5 @@ while (node.batch !== 1) { // its a batch!

node = getFirst(node.dependents)
batch.push(node)
batch.push(this._removeNode(node))
}
for (const node of batch) {
this._removeNode(node)
}
return batch

@@ -480,5 +358,4 @@ }

// if same key, earlier node is first
function tieBreak (a, b) {
return keySort(a, b) > 0 // keySort sorts high to low
return Topolist.compare(a, b) < 0 // lowest key wis
}

@@ -490,9 +367,4 @@

function keySort (a, b) {
const cmp = b4a.compare(a.writer.core.key, b.writer.core.key)
return cmp === 0 ? b.length - a.length : -cmp
}
function sameNode (a, b) {
return b4a.equals(a.key, b.writer.core.key) && a.length === b.length
}
const c = require('compact-encoding')
const assert = require('nanoassert')

@@ -75,4 +76,5 @@ const keys = c.array(c.fixed32)

const SystemPointer = {
const BootRecord = {
preencode (state, m) {
c.uint.preencode(state, 0) // version
Checkout.preencode(state, m.indexed)

@@ -83,2 +85,3 @@ Clock.preencode(state, m.heads)

encode (state, m) {
c.uint.encode(state, 0) // version
Checkout.encode(state, m.indexed)

@@ -89,10 +92,10 @@ Clock.encode(state, m.heads)

decode (state) {
const indexed = Checkout.decode(state)
if (state.start === state.end) {
return { indexed, heads: [], views: [] } // just compat, can be removed in a week
const v = c.uint.decode(state)
assert(v === 0, 'Unsupported version: ' + v)
return {
indexed: Checkout.decode(state),
heads: Clock.decode(state),
views: Views.decode(state)
}
const heads = Clock.decode(state)
const views = Views.decode(state)
return { indexed, heads, views }
}

@@ -121,22 +124,2 @@ }

const Node = {
preencode (state, m) {
Clock.preencode(state, m.heads)
c.uint.preencode(state, m.batch)
c.buffer.preencode(state, m.value)
},
encode (state, m) {
Clock.encode(state, m.heads)
c.uint.encode(state, m.batch)
c.buffer.encode(state, m.value)
},
decode (state, m) {
return {
heads: Clock.decode(state),
batch: c.uint.decode(state),
value: c.buffer.decode(state)
}
}
}
const Indexer = {

@@ -161,5 +144,6 @@ preencode (state, m) {

}
const Indexers = c.array(Indexer)
const Digest = {
const DigestV0 = {
preencode (state, m) {

@@ -186,2 +170,44 @@ c.uint.preencode(state, m.pointer)

const Digest = {
preencode (state, m) {
c.uint.preencode(state, m.pointer)
if (m.pointer === 0) {
c.fixed32.preencode(state, m.key)
}
},
encode (state, m) {
c.uint.encode(state, m.pointer)
if (m.pointer === 0) {
c.fixed32.encode(state, m.key)
}
},
decode (state) {
const pointer = c.uint.decode(state)
return {
pointer,
key: pointer === 0 ? c.fixed32.decode(state) : null
}
}
}
const Node = {
preencode (state, m) {
Clock.preencode(state, m.heads)
c.uint.preencode(state, m.batch)
c.buffer.preencode(state, m.value)
},
encode (state, m) {
Clock.encode(state, m.heads)
c.uint.encode(state, m.batch)
c.buffer.encode(state, m.value)
},
decode (state, m) {
return {
heads: Clock.decode(state),
batch: c.uint.decode(state),
value: c.buffer.decode(state)
}
}
}
const Additional = {

@@ -228,8 +254,79 @@ preencode (state, m) {

c.uint.preencode(state, m.version)
c.uint.preencode(state, m.maxSupportedVersion)
const isCheckpointer = m.digest !== null && m.checkpoint !== null
let flags = 0
if (isCheckpointer) flags |= 1
c.uint.preencode(state, flags)
if (isCheckpointer) {
Checkpoint.preencode(state, m.checkpoint)
Digest.preencode(state, m.digest)
}
Node.preencode(state, m.node)
},
encode (state, m) {
c.uint.encode(state, m.version)
c.uint.encode(state, m.maxSupportedVersion)
const isCheckpointer = m.digest !== null && m.checkpoint !== null
let flags = 0
if (isCheckpointer) flags |= 1
c.uint.encode(state, flags)
if (isCheckpointer) {
Checkpoint.encode(state, m.checkpoint)
Digest.encode(state, m.digest)
}
Node.encode(state, m.node)
},
decode (state) {
const version = c.uint.decode(state)
if (version === 0) {
const m = OplogMessageV0.decode(state)
return {
version,
maxSupportedVersion: m.maxSupportedVersion,
digest: null,
checkpoint: m.checkpoint,
node: m.node
}
}
const maxSupportedVersion = c.uint.decode(state)
const flags = c.uint.decode(state)
const isCheckpointer = (flags & 1) !== 0
const checkpoint = isCheckpointer ? Checkpoint.decode(state) : null
const digest = isCheckpointer ? Digest.decode(state) : null
const node = Node.decode(state)
return {
version,
maxSupportedVersion,
digest,
checkpoint,
node
}
}
}
const OplogMessageV0 = {
preencode (state, m) {
const isCheckpointer = m.digest !== null && m.checkpoint !== null
c.uint.preencode(state, isCheckpointer ? 1 : 0)
if (isCheckpointer) {
Digest.preencode(state, m.digest)
DigestV0.preencode(state, m.digest)
Checkpoint.preencode(state, m.checkpoint)

@@ -241,3 +338,3 @@ }

Additional.preencode(state, m.additional) // at the btm so it can be edited
c.uint.preencode(state, m.versionSignal)
c.uint.preencode(state, m.maxSupportedVersion)
},

@@ -251,3 +348,3 @@ encode (state, m) {

if (isCheckpointer) {
Digest.encode(state, m.digest)
DigestV0.encode(state, m.digest)
Checkpoint.encode(state, m.checkpoint)

@@ -259,6 +356,5 @@ }

Additional.encode(state, m.additional)
c.uint.encode(state, m.versionSignal)
c.uint.encode(state, m.maxSupportedVersion)
},
decode (state) {
const version = c.uint.decode(state)
const flags = c.uint.decode(state)

@@ -268,10 +364,10 @@

const digest = isCheckpointer ? Digest.decode(state) : null
const digest = isCheckpointer ? DigestV0.decode(state) : null
const checkpoint = isCheckpointer ? Checkpoint.decode(state) : null
const node = Node.decode(state)
const additional = Additional.decode(state)
const versionSignal = state.start < state.end ? c.uint.decode(state) : 0
const maxSupportedVersion = state.start < state.end ? c.uint.decode(state) : 0
return {
version,
version: 0,
digest,

@@ -281,3 +377,3 @@ checkpoint,

additional,
versionSignal
maxSupportedVersion
}

@@ -341,3 +437,3 @@ }

Checkout,
SystemPointer,
BootRecord,
OplogMessage,

@@ -344,0 +440,0 @@ Checkpoint,

@@ -8,3 +8,2 @@ const Hyperbee = require('hyperbee')

const { Info, Member } = require('./messages')
const { AUTOBASE_VERSION } = require('./constants')

@@ -24,3 +23,3 @@ const subs = new SubEncoder()

this.version = 0
this.version = -1 // set version in apply
this.members = 0

@@ -98,9 +97,9 @@ this.pendingIndexers = []

sameIndexers (indexers) {
if (this.indexers.length !== indexers.length) return true
if (this.indexers.length !== indexers.length) return false
for (let i = 0; i < this.indexers.length; i++) {
if (!b4a.equals(this.indexers[i].key, indexers[i].core.key)) return true
if (!b4a.equals(this.indexers[i].key, indexers[i].core.key)) return false
}
return false
return true
}

@@ -116,3 +115,3 @@

async _reset (info) {
this.version = info === null ? AUTOBASE_VERSION : info.value.version
this.version = info === null ? -1 : info.value.version
this.members = info === null ? 0 : info.value.members

@@ -119,0 +118,0 @@ this.pendingIndexers = info === null ? [] : info.value.pendingIndexers

@@ -7,2 +7,3 @@ const Linearizer = require('./linearizer')

const safetyCatch = require('safety-catch')
const assert = require('nanoassert')

@@ -159,2 +160,12 @@ const MAX_PRELOAD = 4

async getVersion (length = this.core.length) {
if (this.opened === false) await this.ready()
if (length === 0) return -1
const node = await this.core.get(length - 1)
return node.maxSupportedVersion
}
async getDigest (length = this.core.length) {

@@ -248,3 +259,3 @@ if (this.opened === false) await this.ready()

if (!(await this.core.has(seq))) return false
const { node, checkpoint, versionSignal } = await this.core.get(seq, { wait: false })
const { node, checkpoint, maxSupportedVersion } = await this.core.get(seq, { wait: false })

@@ -256,3 +267,3 @@ if (this.isIndexer && checkpoint) {

const value = node.value == null ? null : c.decode(this.base.valueEncoding, node.value)
this.node = Linearizer.createNode(this, seq + 1, value, node.heads, node.batch, new Set(), versionSignal)
this.node = Linearizer.createNode(this, seq + 1, value, node.heads, node.batch, new Set(), maxSupportedVersion)
return true

@@ -283,5 +294,3 @@ }

// TODO: generalise DAG validation and freeze the writer
if (this.node.dependencies.has(headNode)) {
throw new Error('Corrupted DAG') // sanity check
}
assert(!this.node.dependencies.has(headNode), 'Corrupted DAG')

@@ -297,2 +306,8 @@ // TODO: better way to solve the stub check is to never mutate heads below

// always link previous node if it's not indexed
const offset = this.node.length - 1
if (offset > this.indexed) {
this.node.dependencies.add(this.nodes.get(offset - 1))
}
return true

@@ -299,0 +314,0 @@ }

{
"name": "autobase",
"version": "6.0.0-rc1",
"version": "6.0.0-rc10",
"description": "",

@@ -8,2 +8,3 @@ "main": "index.js",

"test": "standard && brittle test/*.js",
"test:encrypted": "standard && brittle test/*.js --encrypt-all",
"fuzz:generated": "brittle test/reference/fuzz/generated/*.js",

@@ -25,3 +26,3 @@ "fuzz:main": "node test/fuzz/index.js",

"type": "git",
"url": "git+https://github.com/holepunchto/autobase-next.git"
"url": "git+https://github.com/holepunchto/autobase.git"
},

@@ -31,7 +32,8 @@ "author": "Holepunch",

"bugs": {
"url": "https://github.com/holepunchto/autobase-next/issues"
"url": "https://github.com/holepunchto/autobase/issues"
},
"homepage": "https://github.com/holepunchto/autobase-next#readme",
"homepage": "https://github.com/holepunchto/autobase#readme",
"dependencies": {
"b4a": "^1.6.1",
"bare-events": "^2.2.0",
"debounceify": "^1.0.0",

@@ -48,5 +50,2 @@ "hyperbee": "^2.15.0",

},
"optionalDependencies": {
"bare-events": "^2.2.0"
},
"devDependencies": {

@@ -53,0 +52,0 @@ "autobase-test-helpers": "^2.0.1",

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc