@peerbit/indexer-simple
Advanced tools
Comparing version 1.0.4 to 1.1.0-0b8baa8
import * as types from "@peerbit/indexer-interface"; | ||
export declare class HashmapIndex<T extends Record<string, any>, NestedType = any> implements types.Index<T, NestedType> { | ||
private _index; | ||
private _resultsCollectQueue; | ||
private indexByArr; | ||
@@ -10,16 +9,15 @@ private properties; | ||
put(value: T, id?: types.IdKey): void; | ||
del(query: types.DeleteRequest): Promise<types.IdKey[]>; | ||
del(query: types.DeleteOptions): Promise<types.IdKey[]>; | ||
getSize(): number | Promise<number>; | ||
iterator(): IterableIterator<[string | number | bigint, types.IndexedValue<T>]>; | ||
iterator(): MapIterator<[string | number | bigint, types.IndexedValue<T>]>; | ||
start(): void | Promise<void>; | ||
stop(): void | Promise<void>; | ||
drop(): void; | ||
sum(query: types.SumRequest): Promise<number | bigint>; | ||
count(query: types.CountRequest): Promise<number>; | ||
sum(query: types.SumOptions): Promise<number | bigint>; | ||
count(query: types.CountOptions): Promise<number>; | ||
private queryAll; | ||
query(query: types.SearchRequest, properties: { | ||
iterate<S extends types.Shape | undefined>(query: types.IterateOptions, properties: { | ||
shape?: S; | ||
reference?: boolean; | ||
}): Promise<types.IndexedResults<T>>; | ||
next(query: types.CollectNextRequest): Promise<types.IndexedResults<T>>; | ||
close(query: types.CloseIteratorRequest): void; | ||
}): types.IndexIterator<T, S>; | ||
private handleFieldQuery; | ||
@@ -29,4 +27,2 @@ private handleQueryObject; | ||
private _queryDocuments; | ||
getPending(cursorId: string): number | undefined; | ||
get cursorCount(): number; | ||
} | ||
@@ -33,0 +29,0 @@ export declare class HashmapIndices implements types.Indices { |
import { deserialize, serialize } from "@dao-xyz/borsh"; | ||
import { Cache } from "@peerbit/cache"; | ||
import * as types from "@peerbit/indexer-interface"; | ||
@@ -23,10 +22,2 @@ import { logger as loggerFn } from "@peerbit/logger"; | ||
}; | ||
/* const resolveNestedAliasesRecursively = (request: types.SearchRequest) => { | ||
const map = new Map(); | ||
for (const query of request.query) { | ||
_resolveNestedAliasesRecursively(query, map); | ||
} | ||
return map; | ||
} | ||
*/ | ||
const cloneResults = (indexed, schema) => { | ||
@@ -37,30 +28,4 @@ return indexed.map((x) => { | ||
}; | ||
/* | ||
const _resolveNestedAliasesRecursively = (query: types.Query, aliases: Map<string, string>) => { | ||
if (query instanceof types.Nested) { | ||
aliases.set(query.id, query.path); | ||
for (const subQuery of query.query) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.And) { | ||
for (const subQuery of query.and) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.Or) { | ||
for (const subQuery of query.or) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.Not) { | ||
_resolveNestedAliasesRecursively(query.not, aliases); | ||
} | ||
} | ||
*/ | ||
export class HashmapIndex { | ||
_index; | ||
_resultsCollectQueue; | ||
indexByArr; | ||
@@ -71,3 +36,2 @@ properties; | ||
this._index = new Map(); | ||
this._resultsCollectQueue = new Cache({ max: 10000 }); // TODO choose limit better | ||
if (properties.indexBy) { | ||
@@ -119,8 +83,5 @@ this.indexByArr = Array.isArray(properties.indexBy) | ||
} | ||
stop() { | ||
this._resultsCollectQueue.clear(); | ||
} | ||
stop() { } | ||
drop() { | ||
this._index.clear(); | ||
this._resultsCollectQueue.clear(); | ||
/* for (const subindex of this.subIndices) { | ||
@@ -141,3 +102,3 @@ subindex[1].clear() | ||
let value = doc.value; | ||
for (const path of query.key) { | ||
for (const path of Array.isArray(query.key) ? query.key : [query.key]) { | ||
value = value[path]; | ||
@@ -161,7 +122,8 @@ if (!value) { | ||
async queryAll(query) { | ||
if (query.query.length === 1 && | ||
(query.query[0] instanceof types.ByteMatchQuery || | ||
query.query[0] instanceof types.StringMatch) && | ||
types.stringArraysEquals(query.query[0].key, this.indexByArr)) { | ||
const firstQuery = query.query[0]; | ||
const queryCoerced = types.toQuery(query?.query); | ||
if (queryCoerced.length === 1 && | ||
(queryCoerced[0] instanceof types.ByteMatchQuery || | ||
queryCoerced[0] instanceof types.StringMatch) && | ||
types.stringArraysEquals(queryCoerced[0].key, this.indexByArr)) { | ||
const firstQuery = queryCoerced[0]; | ||
if (firstQuery instanceof types.ByteMatchQuery) { | ||
@@ -180,3 +142,3 @@ const doc = this._index.get(types.toId(firstQuery.value).primitive); | ||
const indexedDocuments = await this._queryDocuments(async (doc) => { | ||
for (const f of query.query) { | ||
for (const f of queryCoerced) { | ||
if (!(await this.handleQueryObject(f, doc.value))) { | ||
@@ -190,51 +152,64 @@ return false; | ||
} | ||
async query(query, properties) { | ||
const indexedDocuments = await this.queryAll(query); | ||
if (indexedDocuments.length <= 1) { | ||
return { | ||
kept: 0, | ||
results: indexedDocuments, | ||
}; | ||
} | ||
/* const aliases = resolveNestedAliasesRecursively(query) */ | ||
// Sort | ||
indexedDocuments.sort((a, b) => types.extractSortCompare(a.value, b.value, query.sort)); | ||
const batch = getBatchFromResults(indexedDocuments, query.fetch); | ||
if (indexedDocuments.length > 0) { | ||
this._resultsCollectQueue.add(query.idString, { | ||
arr: indexedDocuments, | ||
reference: properties?.reference, | ||
}); // cache resulst not returned | ||
} | ||
iterate(query, properties) { | ||
let done = undefined; | ||
let queue = undefined; | ||
const fetch = async (n) => { | ||
if (!queue && !done) { | ||
const indexedDocuments = await this.queryAll(query); | ||
if (indexedDocuments.length > 1) { | ||
// Sort | ||
if (query.sort) { | ||
const sortArr = Array.isArray(query.sort) | ||
? query.sort | ||
: [query.sort]; | ||
sortArr.length > 0 && | ||
indexedDocuments.sort((a, b) => types.extractSortCompare(a.value, b.value, sortArr)); | ||
} | ||
} | ||
if (indexedDocuments.length > 0) { | ||
queue = { | ||
arr: indexedDocuments, | ||
reference: properties?.reference, | ||
}; // cache resulst not returned | ||
done = false; | ||
} | ||
else { | ||
done = true; | ||
} | ||
} | ||
if (queue && queue.arr.length <= n) { | ||
done = true; | ||
} | ||
if (!queue) { | ||
return []; | ||
} | ||
const batch = getBatchFromResults(queue.arr, n); | ||
return (queue.reference ? batch : cloneResults(batch, this.properties.schema)); | ||
}; | ||
// TODO dont leak kept if canRead is defined, or return something random | ||
return { | ||
/* return { | ||
kept: indexedDocuments.length, | ||
results: properties?.reference | ||
results: (properties?.reference | ||
? batch | ||
: cloneResults(batch, this.properties.schema), | ||
}; | ||
} | ||
async next(query) { | ||
const results = this._resultsCollectQueue.get(query.idString); | ||
if (!results) { | ||
return { | ||
results: [], | ||
kept: 0, | ||
}; | ||
} | ||
const batch = getBatchFromResults(results.arr, query.amount); | ||
if (results.arr.length === 0) { | ||
this._resultsCollectQueue.del(query.idString); // TODO add tests for proper cleanup/timeouts | ||
} | ||
// TODO dont leak kept if canRead is defined, or return something random | ||
: cloneResults(batch, this.properties.schema)) as any, // TODO fix this type, | ||
}; */ | ||
return { | ||
results: results.reference | ||
? batch | ||
: cloneResults(batch, this.properties.schema), | ||
kept: results.arr.length, | ||
all: async () => { | ||
const results = await fetch(Infinity); | ||
return results; | ||
}, | ||
next: (n) => fetch(n), | ||
done: () => done, | ||
pending: async () => { | ||
if (done == null) { | ||
await fetch(0); | ||
} | ||
return done ? 0 : (queue?.arr.length ?? 0); | ||
}, | ||
close: () => { | ||
done = true; | ||
queue = undefined; | ||
}, | ||
}; | ||
} | ||
close(query) { | ||
this._resultsCollectQueue.del(query.idString); | ||
} | ||
async handleFieldQuery(f, obj, startIndex) { | ||
@@ -268,3 +243,5 @@ // this clause is needed if we have a field that is of type [][] (we will recursively go through each subarray) | ||
queryCloned.key.splice(0, i + 1); // remove key path until the document store | ||
const results = await this.properties.nested.query(obj, new types.SearchRequest({ query: [queryCloned] })); | ||
const results = await this.properties.nested.iterate(obj, { | ||
query: [queryCloned], | ||
}); | ||
return results.length > 0 ? true : false; // TODO return INNER HITS? | ||
@@ -395,8 +372,2 @@ } | ||
} | ||
getPending(cursorId) { | ||
return this._resultsCollectQueue.get(cursorId)?.arr.length; | ||
} | ||
get cursorCount() { | ||
return this._resultsCollectQueue.size; | ||
} | ||
} | ||
@@ -403,0 +374,0 @@ export class HashmapIndices { |
128
package.json
{ | ||
"name": "@peerbit/indexer-simple", | ||
"version": "1.0.4", | ||
"description": "Simple in memory index for document store", | ||
"sideEffects": false, | ||
"type": "module", | ||
"types": "./dist/src/index.d.ts", | ||
"typesVersions": { | ||
"*": { | ||
"*": [ | ||
"*", | ||
"dist/*", | ||
"dist/src/*", | ||
"dist/src/*/index" | ||
], | ||
"src/*": [ | ||
"*", | ||
"dist/*", | ||
"dist/src/*", | ||
"dist/src/*/index" | ||
] | ||
} | ||
}, | ||
"files": [ | ||
"src", | ||
"dist", | ||
"!dist/e2e", | ||
"!dist/test", | ||
"!**/*.tsbuildinfo" | ||
], | ||
"exports": { | ||
".": { | ||
"types": "./dist/src/index.d.ts", | ||
"import": "./dist/src/index.js" | ||
} | ||
}, | ||
"eslintConfig": { | ||
"extends": "peerbit", | ||
"parserOptions": { | ||
"project": true, | ||
"sourceType": "module" | ||
}, | ||
"ignorePatterns": [ | ||
"!.aegir.js", | ||
"test/ts-use", | ||
"*.d.ts" | ||
] | ||
}, | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"scripts": { | ||
"clean": "aegir clean", | ||
"build": "aegir build --no-bundle", | ||
"test": "aegir test", | ||
"lint": "aegir lint" | ||
}, | ||
"author": "dao.xyz", | ||
"license": "MIT", | ||
"dependencies": { | ||
"@peerbit/indexer-interface": "^1.0.3" | ||
}, | ||
"devDependencies": { | ||
"@peerbit/indexer-tests": "^1.0.4" | ||
} | ||
"name": "@peerbit/indexer-simple", | ||
"version": "1.1.0-0b8baa8", | ||
"description": "Simple in memory index for document store", | ||
"sideEffects": false, | ||
"type": "module", | ||
"types": "./dist/src/index.d.ts", | ||
"typesVersions": { | ||
"*": { | ||
"*": [ | ||
"*", | ||
"dist/*", | ||
"dist/src/*", | ||
"dist/src/*/index" | ||
], | ||
"src/*": [ | ||
"*", | ||
"dist/*", | ||
"dist/src/*", | ||
"dist/src/*/index" | ||
] | ||
} | ||
}, | ||
"files": [ | ||
"src", | ||
"dist", | ||
"!dist/e2e", | ||
"!dist/test", | ||
"!**/*.tsbuildinfo" | ||
], | ||
"exports": { | ||
".": { | ||
"types": "./dist/src/index.d.ts", | ||
"import": "./dist/src/index.js" | ||
} | ||
}, | ||
"eslintConfig": { | ||
"extends": "peerbit", | ||
"parserOptions": { | ||
"project": true, | ||
"sourceType": "module" | ||
}, | ||
"ignorePatterns": [ | ||
"!.aegir.js", | ||
"test/ts-use", | ||
"*.d.ts" | ||
] | ||
}, | ||
"publishConfig": { | ||
"access": "public" | ||
}, | ||
"scripts": { | ||
"clean": "aegir clean", | ||
"build": "aegir build --no-bundle", | ||
"test": "aegir test", | ||
"lint": "aegir lint" | ||
}, | ||
"author": "dao.xyz", | ||
"license": "MIT", | ||
"dependencies": { | ||
"@peerbit/indexer-interface": "1.1.0-0b8baa8" | ||
}, | ||
"devDependencies": { | ||
"@peerbit/indexer-tests": "1.1.0-0b8baa8" | ||
} | ||
} |
234
src/index.ts
import { deserialize, serialize } from "@dao-xyz/borsh"; | ||
import { Cache } from "@peerbit/cache"; | ||
import * as types from "@peerbit/indexer-interface"; | ||
@@ -29,11 +28,2 @@ import { logger as loggerFn } from "@peerbit/logger"; | ||
}; | ||
/* const resolveNestedAliasesRecursively = (request: types.SearchRequest) => { | ||
const map = new Map(); | ||
for (const query of request.query) { | ||
_resolveNestedAliasesRecursively(query, map); | ||
} | ||
return map; | ||
} | ||
*/ | ||
const cloneResults = <T>( | ||
@@ -47,39 +37,10 @@ indexed: types.IndexedValue<T>[], | ||
}; | ||
/* | ||
const _resolveNestedAliasesRecursively = (query: types.Query, aliases: Map<string, string>) => { | ||
if (query instanceof types.Nested) { | ||
aliases.set(query.id, query.path); | ||
for (const subQuery of query.query) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.And) { | ||
for (const subQuery of query.and) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.Or) { | ||
for (const subQuery of query.or) { | ||
_resolveNestedAliasesRecursively(subQuery, aliases); | ||
} | ||
} | ||
else if (query instanceof types.Not) { | ||
_resolveNestedAliasesRecursively(query.not, aliases); | ||
} | ||
} | ||
*/ | ||
export class HashmapIndex<T extends Record<string, any>, NestedType = any> | ||
implements types.Index<T, NestedType> | ||
{ | ||
private _index: Map<string | bigint | number, types.IndexedValue<T>>; | ||
private _resultsCollectQueue: Cache<{ | ||
arr: types.IndexedValue<T>[]; | ||
reference: boolean | undefined; | ||
}>; | ||
private _index!: Map<string | bigint | number, types.IndexedValue<T>>; | ||
private indexByArr: string[]; | ||
private properties: types.IndexEngineInitProperties<T, NestedType>; | ||
private indexByArr!: string[]; | ||
private properties!: types.IndexEngineInitProperties<T, NestedType>; | ||
@@ -89,3 +50,2 @@ init(properties: types.IndexEngineInitProperties<T, NestedType>) { | ||
this._index = new Map(); | ||
this._resultsCollectQueue = new Cache({ max: 10000 }); // TODO choose limit better | ||
if (properties.indexBy) { | ||
@@ -128,3 +88,3 @@ this.indexByArr = Array.isArray(properties.indexBy) | ||
async del(query: types.DeleteRequest): Promise<types.IdKey[]> { | ||
async del(query: types.DeleteOptions): Promise<types.IdKey[]> { | ||
let deleted: types.IdKey[] = []; | ||
@@ -152,9 +112,6 @@ for (const doc of await this.queryAll(query)) { | ||
stop(): void | Promise<void> { | ||
this._resultsCollectQueue.clear(); | ||
} | ||
stop(): void | Promise<void> {} | ||
drop() { | ||
this._index.clear(); | ||
this._resultsCollectQueue.clear(); | ||
/* for (const subindex of this.subIndices) { | ||
@@ -173,7 +130,7 @@ subindex[1].clear() | ||
async sum(query: types.SumRequest): Promise<number | bigint> { | ||
async sum(query: types.SumOptions): Promise<number | bigint> { | ||
let sum: undefined | number | bigint = undefined; | ||
outer: for (const doc of await this.queryAll(query)) { | ||
let value: any = doc.value; | ||
for (const path of query.key) { | ||
for (const path of Array.isArray(query.key) ? query.key : [query.key]) { | ||
value = value[path]; | ||
@@ -194,3 +151,3 @@ if (!value) { | ||
async count(query: types.CountRequest): Promise<number> { | ||
async count(query: types.CountOptions): Promise<number> { | ||
return (await this.queryAll(query)).length; | ||
@@ -200,15 +157,16 @@ } | ||
private async queryAll( | ||
query: | ||
| types.SearchRequest | ||
| types.DeleteRequest | ||
| types.CountRequest | ||
| types.SumRequest, | ||
query?: | ||
| types.IterateOptions | ||
| types.DeleteOptions | ||
| types.CountOptions | ||
| types.SumOptions, | ||
): Promise<types.IndexedValue<T>[]> { | ||
const queryCoerced = types.toQuery(query?.query); | ||
if ( | ||
query.query.length === 1 && | ||
(query.query[0] instanceof types.ByteMatchQuery || | ||
query.query[0] instanceof types.StringMatch) && | ||
types.stringArraysEquals(query.query[0].key, this.indexByArr) | ||
queryCoerced.length === 1 && | ||
(queryCoerced[0] instanceof types.ByteMatchQuery || | ||
queryCoerced[0] instanceof types.StringMatch) && | ||
types.stringArraysEquals(queryCoerced[0].key, this.indexByArr) | ||
) { | ||
const firstQuery = query.query[0]; | ||
const firstQuery = queryCoerced[0]; | ||
if (firstQuery instanceof types.ByteMatchQuery) { | ||
@@ -229,3 +187,3 @@ const doc = this._index.get(types.toId(firstQuery.value).primitive); | ||
const indexedDocuments = await this._queryDocuments(async (doc) => { | ||
for (const f of query.query) { | ||
for (const f of queryCoerced) { | ||
if (!(await this.handleQueryObject(f, doc.value))) { | ||
@@ -241,76 +199,89 @@ return false; | ||
async query( | ||
query: types.SearchRequest, | ||
properties: { reference?: boolean }, | ||
): Promise<types.IndexedResults<T>> { | ||
const indexedDocuments = await this.queryAll(query); | ||
if (indexedDocuments.length <= 1) { | ||
return { | ||
kept: 0, | ||
results: indexedDocuments, | ||
}; | ||
} | ||
iterate<S extends types.Shape | undefined>( | ||
query: types.IterateOptions, | ||
properties: { shape?: S; reference?: boolean }, | ||
): types.IndexIterator<T, S> { | ||
let done: boolean | undefined = undefined; | ||
let queue: | ||
| { | ||
arr: types.IndexedValue<T>[]; | ||
reference: boolean | undefined; | ||
} | ||
| undefined = undefined; | ||
const fetch = async ( | ||
n: number, | ||
): Promise<types.IndexedResults<types.ReturnTypeFromShape<T, S>>> => { | ||
if (!queue && !done) { | ||
const indexedDocuments = await this.queryAll(query); | ||
if (indexedDocuments.length > 1) { | ||
// Sort | ||
if (query.sort) { | ||
const sortArr = Array.isArray(query.sort) | ||
? query.sort | ||
: [query.sort]; | ||
sortArr.length > 0 && | ||
indexedDocuments.sort((a, b) => | ||
types.extractSortCompare(a.value, b.value, sortArr), | ||
); | ||
} | ||
} | ||
/* const aliases = resolveNestedAliasesRecursively(query) */ | ||
if (indexedDocuments.length > 0) { | ||
queue = { | ||
arr: indexedDocuments, | ||
reference: properties?.reference, | ||
}; // cache resulst not returned | ||
done = false; | ||
} else { | ||
done = true; | ||
} | ||
} | ||
if (queue && queue.arr.length <= n) { | ||
done = true; | ||
} | ||
// Sort | ||
indexedDocuments.sort((a, b) => | ||
types.extractSortCompare(a.value, b.value, query.sort), | ||
); | ||
const batch = getBatchFromResults<T>( | ||
indexedDocuments, | ||
query.fetch, | ||
/* this.properties.iterator.batch, */ | ||
); | ||
if (!queue) { | ||
return []; | ||
} | ||
if (indexedDocuments.length > 0) { | ||
this._resultsCollectQueue.add(query.idString, { | ||
arr: indexedDocuments, | ||
reference: properties?.reference, | ||
}); // cache resulst not returned | ||
} | ||
const batch = getBatchFromResults<T>( | ||
queue.arr, | ||
n, | ||
/* this.properties.iterator.batch */ | ||
); | ||
return ( | ||
queue.reference ? batch : cloneResults(batch, this.properties.schema) | ||
) as types.IndexedResults<types.ReturnTypeFromShape<T, S>>; | ||
}; | ||
// TODO dont leak kept if canRead is defined, or return something random | ||
return { | ||
/* return { | ||
kept: indexedDocuments.length, | ||
results: properties?.reference | ||
results: (properties?.reference | ||
? batch | ||
: cloneResults(batch, this.properties.schema), | ||
}; | ||
} | ||
: cloneResults(batch, this.properties.schema)) as any, // TODO fix this type, | ||
}; */ | ||
async next( | ||
query: types.CollectNextRequest, | ||
): Promise<types.IndexedResults<T>> { | ||
const results = this._resultsCollectQueue.get(query.idString); | ||
if (!results) { | ||
return { | ||
results: [], | ||
kept: 0, | ||
}; | ||
} | ||
const batch = getBatchFromResults<T>( | ||
results.arr, | ||
query.amount, | ||
/* this.properties.iterator.batch */ | ||
); | ||
if (results.arr.length === 0) { | ||
this._resultsCollectQueue.del(query.idString); // TODO add tests for proper cleanup/timeouts | ||
} | ||
// TODO dont leak kept if canRead is defined, or return something random | ||
return { | ||
results: results.reference | ||
? batch | ||
: cloneResults(batch, this.properties.schema), | ||
kept: results.arr.length, | ||
all: async () => { | ||
const results = await fetch(Infinity); | ||
return results; | ||
}, | ||
next: (n: number) => fetch(n), | ||
done: () => done, | ||
pending: async () => { | ||
if (done == null) { | ||
await fetch(0); | ||
} | ||
return done ? 0 : (queue?.arr.length ?? 0); | ||
}, | ||
close: () => { | ||
done = true; | ||
queue = undefined; | ||
}, | ||
}; | ||
} | ||
close(query: types.CloseIteratorRequest): void { | ||
this._resultsCollectQueue.del(query.idString); | ||
} | ||
private async handleFieldQuery( | ||
@@ -354,6 +325,5 @@ f: types.StateFieldQuery, | ||
queryCloned.key.splice(0, i + 1); // remove key path until the document store | ||
const results = await this.properties.nested.query( | ||
obj, | ||
new types.SearchRequest({ query: [queryCloned] }), | ||
); | ||
const results = await this.properties.nested.iterate(obj, { | ||
query: [queryCloned], | ||
}); | ||
return results.length > 0 ? true : false; // TODO return INNER HITS? | ||
@@ -494,10 +464,2 @@ } | ||
} | ||
getPending(cursorId: string): number | undefined { | ||
return this._resultsCollectQueue.get(cursorId)?.arr.length; | ||
} | ||
get cursorCount(): number { | ||
return this._resultsCollectQueue.size; | ||
} | ||
} | ||
@@ -504,0 +466,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
46605
936
2
3
+ Added@peerbit/cache@2.1.0-0b8baa8(transitive)
+ Added@peerbit/crypto@2.3.2-0b8baa8(transitive)
+ Added@peerbit/indexer-interface@1.1.0-0b8baa8(transitive)
- Removed@peerbit/cache@2.1.2(transitive)
- Removed@peerbit/crypto@2.3.5(transitive)
- Removed@peerbit/indexer-interface@1.1.1(transitive)