@orama/orama
Advanced tools
Comparing version 1.0.0-beta.4 to 1.0.0-beta.5
@@ -1,6 +0,6 @@ | ||
export * from './components/defaults.js'; | ||
export * as documentsStore from './components/documents-store.js'; | ||
export * as index from './components/index.js'; | ||
export * as tokenizer from './components/tokenizer/index.js'; | ||
export * from "./components/defaults.js"; | ||
export * as documentsStore from "./components/documents-store.js"; | ||
export * as index from "./components/index.js"; | ||
export * as tokenizer from "./components/tokenizer/index.js"; | ||
//# sourceMappingURL=components.js.map |
@@ -1,2 +0,2 @@ | ||
import { createError } from '../errors.js'; | ||
import { createError } from "../errors.js"; | ||
export function prioritizeTokenScores(arrays, boost) { | ||
@@ -3,0 +3,0 @@ if (boost === 0) { |
@@ -1,6 +0,5 @@ | ||
import { Document, ElapsedTime, Schema, SimpleComponents } from '../types.js'; | ||
import { Document, ElapsedTime, Schema } from '../types.js'; | ||
export { getDocumentProperties } from '../utils.js'; | ||
export declare function formatElapsedTime(n: bigint): ElapsedTime; | ||
export declare function getDocumentIndexId(doc: Document): string; | ||
export declare function validateSchema<S extends Schema = Schema>(doc: Document, schema: S): boolean; | ||
export declare function getDefaultComponents(): SimpleComponents; | ||
export declare function formatElapsedTime(n: bigint): Promise<ElapsedTime>; | ||
export declare function getDocumentIndexId(doc: Document): Promise<string>; | ||
export declare function validateSchema<S extends Schema = Schema>(doc: Document, schema: S): Promise<boolean>; |
@@ -1,11 +0,11 @@ | ||
import { createError } from '../errors.js'; | ||
import { getDocumentProperties, uniqueId, formatNanoseconds } from '../utils.js'; | ||
export { getDocumentProperties } from '../utils.js'; | ||
export function formatElapsedTime(n) { | ||
import { createError } from "../errors.js"; | ||
import { uniqueId, formatNanoseconds } from "../utils.js"; | ||
export { getDocumentProperties } from "../utils.js"; | ||
export async function formatElapsedTime(n) { | ||
return { | ||
raw: Number(n), | ||
formatted: formatNanoseconds(n) | ||
formatted: await formatNanoseconds(n) | ||
}; | ||
} | ||
export function getDocumentIndexId(doc) { | ||
export async function getDocumentIndexId(doc) { | ||
if (doc.id) { | ||
@@ -17,5 +17,5 @@ if (typeof doc.id !== 'string') { | ||
} | ||
return uniqueId(); | ||
return await uniqueId(); | ||
} | ||
export function validateSchema(doc, schema) { | ||
export async function validateSchema(doc, schema) { | ||
for (const [prop, type] of Object.entries(schema)){ | ||
@@ -36,11 +36,3 @@ if (typeof type === 'object') { | ||
} | ||
export function getDefaultComponents() { | ||
return { | ||
formatElapsedTime, | ||
getDocumentIndexId, | ||
getDocumentProperties, | ||
validateSchema | ||
}; | ||
} | ||
//# sourceMappingURL=defaults.js.map |
@@ -7,10 +7,10 @@ import { Document, IDocumentsStore, OpaqueDocumentStore } from '../types.js'; | ||
export type DefaultDocumentsStore = IDocumentsStore<DocumentsStore>; | ||
export declare function create(): DocumentsStore; | ||
export declare function get(store: DocumentsStore, id: string): Document | undefined; | ||
export declare function getMultiple(store: DocumentsStore, ids: string[]): (Document | undefined)[]; | ||
export declare function store(store: DocumentsStore, id: string, doc: Document): boolean; | ||
export declare function remove(store: DocumentsStore, id: string): boolean; | ||
export declare function count(store: DocumentsStore): number; | ||
export declare function load<R = unknown>(raw: R): DocumentsStore; | ||
export declare function save<R = unknown>(docs: DocumentsStore): R; | ||
export declare function createDocumentsStore(): DefaultDocumentsStore; | ||
export declare function create(): Promise<DocumentsStore>; | ||
export declare function get(store: DocumentsStore, id: string): Promise<Document | undefined>; | ||
export declare function getMultiple(store: DocumentsStore, ids: string[]): Promise<(Document | undefined)[]>; | ||
export declare function store(store: DocumentsStore, id: string, doc: Document): Promise<boolean>; | ||
export declare function remove(store: DocumentsStore, id: string): Promise<boolean>; | ||
export declare function count(store: DocumentsStore): Promise<number>; | ||
export declare function load<R = unknown>(raw: R): Promise<DocumentsStore>; | ||
export declare function save<R = unknown>(docs: DocumentsStore): Promise<R>; | ||
export declare function createDocumentsStore(): Promise<DefaultDocumentsStore>; |
@@ -1,2 +0,2 @@ | ||
export function create() { | ||
export async function create() { | ||
return { | ||
@@ -7,6 +7,6 @@ docs: {}, | ||
} | ||
export function get(store, id) { | ||
export async function get(store, id) { | ||
return store.docs[id]; | ||
} | ||
export function getMultiple(store, ids) { | ||
export async function getMultiple(store, ids) { | ||
const found = Array.from({ | ||
@@ -20,3 +20,3 @@ length: ids.length | ||
} | ||
export function store(store, id, doc) { | ||
export async function store(store, id, doc) { | ||
if (typeof store.docs[id] !== 'undefined') { | ||
@@ -29,3 +29,3 @@ return false; | ||
} | ||
export function remove(store, id) { | ||
export async function remove(store, id) { | ||
if (typeof store.docs[id] === 'undefined') { | ||
@@ -38,6 +38,6 @@ return false; | ||
} | ||
export function count(store) { | ||
export async function count(store) { | ||
return store.count; | ||
} | ||
export function load(raw) { | ||
export async function load(raw) { | ||
const rawDocument = raw; | ||
@@ -49,3 +49,3 @@ return { | ||
} | ||
export function save(docs) { | ||
export async function save(docs) { | ||
return { | ||
@@ -56,3 +56,3 @@ docs: docs.docs, | ||
} | ||
export function createDocumentsStore() { | ||
export async function createDocumentsStore() { | ||
return { | ||
@@ -59,0 +59,0 @@ create, |
@@ -1,2 +0,2 @@ | ||
import { getNested } from '../utils.js'; | ||
import { getNested } from "../utils.js"; | ||
function sortingPredicate(order = 'desc', a, b) { | ||
@@ -39,3 +39,3 @@ if (order.toLowerCase() === 'asc') { | ||
for (const facet of facetKeys){ | ||
const facetValue = facet.includes('.') ? getNested(doc, facet) : doc[facet]; | ||
const facetValue = facet.includes('.') ? await getNested(doc, facet) : doc[facet]; | ||
// Range facets based on numbers | ||
@@ -42,0 +42,0 @@ if (properties[facet] === 'number') { |
import { Document, MultipleCallbackComponent, Orama, SingleCallbackComponent } from '../types.js'; | ||
export declare const COMPLEX_COMPONENTS: string[]; | ||
export declare const SIMPLE_COMPONENTS: string[]; | ||
export declare const SIMPLE_OR_ARRAY_COMPONENTS: string[]; | ||
export declare const OBJECT_COMPONENTS: string[]; | ||
export declare const FUNCTION_COMPONENTS: string[]; | ||
export declare const SINGLE_OR_ARRAY_COMPONENTS: string[]; | ||
export declare function runSingleHook(hooks: SingleCallbackComponent[], orama: Orama, id: string, doc?: Document): Promise<void>; | ||
export declare function runMultipleHook(hooks: MultipleCallbackComponent[], orama: Orama, docsOrIds: Document[] | string[]): Promise<void>; |
@@ -1,2 +0,2 @@ | ||
export const COMPLEX_COMPONENTS = [ | ||
export const OBJECT_COMPONENTS = [ | ||
'tokenizer', | ||
@@ -6,3 +6,3 @@ 'index', | ||
]; | ||
export const SIMPLE_COMPONENTS = [ | ||
export const FUNCTION_COMPONENTS = [ | ||
'validateSchema', | ||
@@ -13,3 +13,3 @@ 'getDocumentIndexId', | ||
]; | ||
export const SIMPLE_OR_ARRAY_COMPONENTS = [ | ||
export const SINGLE_OR_ARRAY_COMPONENTS = [ | ||
'beforeInsert', | ||
@@ -16,0 +16,0 @@ 'afterInsert', |
@@ -27,11 +27,11 @@ import { Node as AVLNode } from '../trees/avl.js'; | ||
Index: Index; | ||
}>, schema: Schema, index?: Index, prefix?: string): Index; | ||
export declare function insert(index: Index, prop: string, id: string, value: SearchableValue, language: string | undefined, tokenizer: Tokenizer, docsCount: number): void; | ||
export declare function remove(index: Index, prop: string, id: string, value: SearchableValue, language: string | undefined, tokenizer: Tokenizer, docsCount: number): boolean; | ||
export declare function search(index: Index, prop: string, term: string, context: SearchContext): TokenScore[]; | ||
export declare function searchByWhereClause(index: Index, filters: Record<string, boolean | ComparisonOperator>): string[]; | ||
export declare function getSearchableProperties(index: Index): string[]; | ||
export declare function getSearchablePropertiesWithTypes(index: Index): Record<string, 'string' | 'number' | 'boolean'>; | ||
export declare function load<R = unknown>(raw: R): Index; | ||
export declare function save<R = unknown>(index: Index): R; | ||
export declare function createIndex(): DefaultIndex; | ||
}>, schema: Schema, index?: Index, prefix?: string): Promise<Index>; | ||
export declare function insert(index: Index, prop: string, id: string, value: SearchableValue, language: string | undefined, tokenizer: Tokenizer, docsCount: number): Promise<void>; | ||
export declare function remove(index: Index, prop: string, id: string, value: SearchableValue, language: string | undefined, tokenizer: Tokenizer, docsCount: number): Promise<boolean>; | ||
export declare function search(index: Index, prop: string, term: string, context: SearchContext): Promise<TokenScore[]>; | ||
export declare function searchByWhereClause(index: Index, filters: Record<string, boolean | ComparisonOperator>): Promise<string[]>; | ||
export declare function getSearchableProperties(index: Index): Promise<string[]>; | ||
export declare function getSearchablePropertiesWithTypes(index: Index): Promise<Record<string, 'string' | 'number' | 'boolean'>>; | ||
export declare function load<R = unknown>(raw: R): Promise<Index>; | ||
export declare function save<R = unknown>(index: Index): Promise<R>; | ||
export declare function createIndex(): Promise<DefaultIndex>; |
@@ -1,7 +0,7 @@ | ||
import { createError } from '../errors.js'; | ||
import { create as avlCreate, find as avlFind, greaterThan as avlGreaterThan, insert as avlInsert, lessThan as avlLessThan, rangeSearch as avlRangeSearch, removeDocument as avlRemoveDocument } from '../trees/avl.js'; | ||
import { create as radixCreate, find as radixFind, insert as radixInsert, removeDocumentByWord as radixRemoveDocument } from '../trees/radix.js'; | ||
import { intersect } from '../utils.js'; | ||
import { BM25 } from './algorithms.js'; | ||
export function create(orama, schema, index, prefix = '') { | ||
import { createError } from "../errors.js"; | ||
import { create as avlCreate, find as avlFind, greaterThan as avlGreaterThan, insert as avlInsert, lessThan as avlLessThan, rangeSearch as avlRangeSearch, removeDocument as avlRemoveDocument } from "../trees/avl.js"; | ||
import { create as radixCreate, find as radixFind, insert as radixInsert, removeDocumentByWord as radixRemoveDocument } from "../trees/radix.js"; | ||
import { intersect } from "../utils.js"; | ||
import { BM25 } from "./algorithms.js"; | ||
export async function create(orama, schema, index, prefix = '') { | ||
if (!index) { | ||
@@ -51,3 +51,3 @@ index = { | ||
} | ||
export function insert(index, prop, id, value, language, tokenizer, docsCount) { | ||
export async function insert(index, prop, id, value, language, tokenizer, docsCount) { | ||
if (typeof value === 'number') { | ||
@@ -62,3 +62,3 @@ avlInsert(index.indexes[prop], value, [ | ||
} | ||
const tokens = tokenizer.tokenize(value, language); | ||
const tokens = await tokenizer.tokenize(value, language); | ||
if (!(id in index.frequencies[prop])) { | ||
@@ -86,3 +86,3 @@ index.frequencies[prop][id] = {}; | ||
} | ||
export function remove(index, prop, id, value, language, tokenizer, docsCount) { | ||
export async function remove(index, prop, id, value, language, tokenizer, docsCount) { | ||
if (typeof value === 'number') { | ||
@@ -97,3 +97,3 @@ avlRemoveDocument(index.indexes[prop], id, value); | ||
} | ||
const tokens = tokenizer.tokenize(value, language); | ||
const tokens = await tokenizer.tokenize(value, language); | ||
index.avgFieldLength[prop] = (index.avgFieldLength[prop] * docsCount - index.fieldLengths[prop][id]) / (docsCount - 1); | ||
@@ -108,3 +108,3 @@ index.fieldLengths[prop][id] = undefined; | ||
} | ||
export function search(index, prop, term, context) { | ||
export async function search(index, prop, term, context) { | ||
if (!(prop in index.tokenOccurrencies)) { | ||
@@ -150,3 +150,3 @@ return []; | ||
} | ||
export function searchByWhereClause(index, filters) { | ||
export async function searchByWhereClause(index, filters) { | ||
const filterKeys = Object.keys(filters); | ||
@@ -215,9 +215,9 @@ const filtersMap = filterKeys.reduce((acc, key)=>({ | ||
} | ||
export function getSearchableProperties(index) { | ||
export async function getSearchableProperties(index) { | ||
return index.searchableProperties; | ||
} | ||
export function getSearchablePropertiesWithTypes(index) { | ||
export async function getSearchablePropertiesWithTypes(index) { | ||
return index.searchablePropertiesWithTypes; | ||
} | ||
export function load(raw) { | ||
export async function load(raw) { | ||
const { indexes , searchableProperties , searchablePropertiesWithTypes , frequencies , tokenOccurrencies , avgFieldLength , fieldLengths } = raw; | ||
@@ -234,3 +234,3 @@ return { | ||
} | ||
export function save(index) { | ||
export async function save(index) { | ||
const { indexes , searchableProperties , searchablePropertiesWithTypes , frequencies , tokenOccurrencies , avgFieldLength , fieldLengths } = index; | ||
@@ -247,3 +247,3 @@ return { | ||
} | ||
export function createIndex() { | ||
export async function createIndex() { | ||
return { | ||
@@ -250,0 +250,0 @@ create, |
@@ -11,3 +11,4 @@ export type BoundedMetric = { | ||
*/ | ||
export declare function boundedLevenshtein(a: string, b: string, tolerance: number): BoundedMetric; | ||
export declare function boundedLevenshtein(a: string, b: string, tolerance: number): Promise<BoundedMetric>; | ||
export declare function syncBoundedLevenshtein(a: string, b: string, tolerance: number): BoundedMetric; | ||
export declare function levenshtein(a: string, b: string): number; |
@@ -99,3 +99,3 @@ /** | ||
* - tolerance >= ||a| - |b|| >= 0 | ||
*/ export function boundedLevenshtein(a, b, tolerance) { | ||
*/ export async function boundedLevenshtein(a, b, tolerance) { | ||
const distance = _boundedLevenshtein(a, b, tolerance); | ||
@@ -107,2 +107,10 @@ return { | ||
} | ||
// This is only used internally, keep in sync with the previous one | ||
export function syncBoundedLevenshtein(a, b, tolerance) { | ||
const distance = _boundedLevenshtein(a, b, tolerance); | ||
return { | ||
distance, | ||
isBounded: distance >= 0 | ||
}; | ||
} | ||
export function levenshtein(a, b) { | ||
@@ -109,0 +117,0 @@ /* c8 ignore next 3 */ if (!a.length) { |
var _globalThis_process; | ||
import { kInsertions, kRemovals } from '../types.js'; | ||
import { kInsertions, kRemovals } from "../types.js"; | ||
// Web platforms don't have process. React-Native doesn't have process.emitWarning. | ||
@@ -4,0 +4,0 @@ const warn = ((_globalThis_process = globalThis.process) === null || _globalThis_process === void 0 ? void 0 : _globalThis_process.emitWarning) ?? function emitWarning(message, options) { |
@@ -1,5 +0,5 @@ | ||
import { createError } from '../../errors.js'; | ||
import { replaceDiacritics } from './diacritics.js'; | ||
import { SPLITTERS, STEMMERS, SUPPORTED_LANGUAGES } from './languages.js'; | ||
import { stopWords as defaultStopWords } from './stop-words/index.js'; | ||
import { createError } from "../../errors.js"; | ||
import { replaceDiacritics } from "./diacritics.js"; | ||
import { SPLITTERS, STEMMERS, SUPPORTED_LANGUAGES } from "./languages.js"; | ||
import { stopWords as defaultStopWords } from "./stop-words/index.js"; | ||
function normalizeToken(token) { | ||
@@ -68,7 +68,11 @@ var _this_stopWords; | ||
// with vite. | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
// @ts-ignore This fails when verifying CJS but it's actually correct | ||
const stemmersPath = import.meta.url.endsWith('ts') ? '../../stemmer/lib' : '../stemmer'; | ||
const stemmerImport = await import(`../${stemmersPath}/${STEMMERS[config.language]}.js`); | ||
stemmer = stemmerImport.stemmer; | ||
try { | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
// @ts-ignore This fails when verifying CJS but it's actually correct | ||
const stemmersPath = import.meta.url.endsWith('ts') ? '../../stemmers/lib' : '../stemmers'; | ||
const stemmerImport = await import(`../${stemmersPath}/${STEMMERS[config.language]}.js`); | ||
stemmer = stemmerImport.stemmer; | ||
} catch (e) { | ||
throw createError('BUNDLED_ORAMA', config.language); | ||
} | ||
} | ||
@@ -75,0 +79,0 @@ } |
@@ -1,13 +0,13 @@ | ||
import { en } from './en.js'; | ||
import { it } from './it.js'; | ||
import { fr } from './fr.js'; | ||
import { es } from './es.js'; | ||
import { pt } from './pt.js'; | ||
import { nl } from './nl.js'; | ||
import { se } from './se.js'; | ||
import { ru } from './ru.js'; | ||
import { no } from './no.js'; | ||
import { de } from './de.js'; | ||
import { dk } from './dk.js'; | ||
import { fi } from './fi.js'; | ||
import { en } from "./en.js"; | ||
import { it } from "./it.js"; | ||
import { fr } from "./fr.js"; | ||
import { es } from "./es.js"; | ||
import { pt } from "./pt.js"; | ||
import { nl } from "./nl.js"; | ||
import { se } from "./se.js"; | ||
import { ru } from "./ru.js"; | ||
import { no } from "./no.js"; | ||
import { de } from "./de.js"; | ||
import { dk } from "./dk.js"; | ||
import { fi } from "./fi.js"; | ||
export const stopWords = { | ||
@@ -14,0 +14,0 @@ english: en, |
declare const errors: { | ||
NO_LANGUAGE_WITH_CUSTOM_TOKENIZER: string; | ||
BUNDLED_ORAMA: string; | ||
LANGUAGE_NOT_SUPPORTED: string; | ||
@@ -4,0 +5,0 @@ INVALID_STEMMER_FUNCTION_TYPE: string; |
@@ -1,6 +0,7 @@ | ||
import { SUPPORTED_LANGUAGES } from './components/tokenizer/languages.js'; | ||
import { sprintf } from './utils.js'; | ||
import { SUPPORTED_LANGUAGES } from "./components/tokenizer/languages.js"; | ||
import { sprintf } from "./utils.js"; | ||
const allLanguages = SUPPORTED_LANGUAGES.join('\n - '); | ||
const errors = { | ||
NO_LANGUAGE_WITH_CUSTOM_TOKENIZER: 'Do not pass the language option to create when using a custom tokenizer.', | ||
BUNDLED_ORAMA: 'Cannot find the stemmer for the locale "%s". This can happen if you are using Orama within a bundler like webpack. To solve this issue please look at https://docs.oramasearch.com/usage/bundlers#using-stemming-with-bundlers.', | ||
LANGUAGE_NOT_SUPPORTED: `Language "%s" is not supported.\nSupported languages are:\n - ${allLanguages}`, | ||
@@ -7,0 +8,0 @@ INVALID_STEMMER_FUNCTION_TYPE: `config.stemmer property must be a function.`, |
@@ -9,3 +9,6 @@ export { create } from './methods/create.js'; | ||
export * from './types.js'; | ||
export * as components from './components.js'; | ||
export * as internals from './internals.js'; | ||
export * from './components/tokenizer/stemmers.js'; | ||
export type { RawData } from './methods/serialization.js'; | ||
export type { Language } from './components/tokenizer/languages.js'; |
@@ -1,10 +0,13 @@ | ||
export { create } from './methods/create.js'; | ||
export { count, getByID } from './methods/docs.js'; | ||
export { insert, insertMultiple } from './methods/insert.js'; | ||
export { remove, removeMultiple } from './methods/remove.js'; | ||
export { search } from './methods/search.js'; | ||
export { load, save } from './methods/serialization.js'; | ||
export { update, updateMultiple } from './methods/update.js'; | ||
export * from './types.js'; | ||
export { create } from "./methods/create.js"; | ||
export { count, getByID } from "./methods/docs.js"; | ||
export { insert, insertMultiple } from "./methods/insert.js"; | ||
export { remove, removeMultiple } from "./methods/remove.js"; | ||
export { search } from "./methods/search.js"; | ||
export { load, save } from "./methods/serialization.js"; | ||
export { update, updateMultiple } from "./methods/update.js"; | ||
export * from "./types.js"; | ||
export * as components from "./components.js"; | ||
export * as internals from "./internals.js"; | ||
export * from "./components/tokenizer/stemmers.js"; | ||
//# sourceMappingURL=index.js.map |
export { boundedLevenshtein } from './components/levenshtein.js'; | ||
export { sprintf, formatBytes, formatNanoseconds, getNanosecondsTime, uniqueId } from './utils.js'; | ||
export { formatBytes, formatNanoseconds, getNanosecondsTime, uniqueId } from './utils.js'; |
@@ -1,4 +0,4 @@ | ||
export { boundedLevenshtein } from './components/levenshtein.js'; | ||
export { sprintf, formatBytes, formatNanoseconds, getNanosecondsTime, uniqueId } from './utils.js'; | ||
export { boundedLevenshtein } from "./components/levenshtein.js"; | ||
export { formatBytes, formatNanoseconds, getNanosecondsTime, uniqueId } from "./utils.js"; | ||
//# sourceMappingURL=internals.js.map |
@@ -1,10 +0,15 @@ | ||
import { getDefaultComponents } from '../components/defaults.js'; | ||
import { createDocumentsStore } from '../components/documents-store.js'; | ||
import { COMPLEX_COMPONENTS, SIMPLE_COMPONENTS, SIMPLE_OR_ARRAY_COMPONENTS } from '../components/hooks.js'; | ||
import { createIndex } from '../components/index.js'; | ||
import { createTokenizer } from '../components/tokenizer/index.js'; | ||
import { createError } from '../errors.js'; | ||
import { formatElapsedTime, getDocumentIndexId, getDocumentProperties, validateSchema } from "../components/defaults.js"; | ||
import { createDocumentsStore } from "../components/documents-store.js"; | ||
import { OBJECT_COMPONENTS, FUNCTION_COMPONENTS, SINGLE_OR_ARRAY_COMPONENTS } from "../components/hooks.js"; | ||
import { createIndex } from "../components/index.js"; | ||
import { createTokenizer } from "../components/tokenizer/index.js"; | ||
import { createError } from "../errors.js"; | ||
function validateComponents(components) { | ||
const defaultComponents = getDefaultComponents(); | ||
for (const rawKey of SIMPLE_COMPONENTS){ | ||
const defaultComponents = { | ||
formatElapsedTime, | ||
getDocumentIndexId, | ||
getDocumentProperties, | ||
validateSchema | ||
}; | ||
for (const rawKey of FUNCTION_COMPONENTS){ | ||
const key = rawKey; | ||
@@ -20,3 +25,3 @@ if (components[key]) { | ||
} | ||
for (const rawKey of SIMPLE_OR_ARRAY_COMPONENTS){ | ||
for (const rawKey of SINGLE_OR_ARRAY_COMPONENTS){ | ||
const key = rawKey; | ||
@@ -38,3 +43,3 @@ if (!components[key]) { | ||
for (const rawKey of Object.keys(components)){ | ||
if (!COMPLEX_COMPONENTS.includes(rawKey) && !SIMPLE_COMPONENTS.includes(rawKey) && !SIMPLE_OR_ARRAY_COMPONENTS.includes(rawKey)) { | ||
if (!OBJECT_COMPONENTS.includes(rawKey) && !FUNCTION_COMPONENTS.includes(rawKey) && !SINGLE_OR_ARRAY_COMPONENTS.includes(rawKey)) { | ||
throw createError('UNSUPPORTED_COMPONENT', rawKey); | ||
@@ -65,6 +70,6 @@ } | ||
if (!index) { | ||
index = createIndex(); | ||
index = await createIndex(); | ||
} | ||
if (!documentsStore) { | ||
documentsStore = createDocumentsStore(); | ||
documentsStore = await createDocumentsStore(); | ||
} | ||
@@ -71,0 +76,0 @@ // Validate all other components |
@@ -1,4 +0,4 @@ | ||
import { runMultipleHook, runSingleHook } from '../components/hooks.js'; | ||
import { trackInsertion } from '../components/sync-blocking-checker.js'; | ||
import { createError } from '../errors.js'; | ||
import { runMultipleHook, runSingleHook } from "../components/hooks.js"; | ||
import { trackInsertion } from "../components/sync-blocking-checker.js"; | ||
import { createError } from "../errors.js"; | ||
export async function insert(orama, doc, language, skipHooks) { | ||
@@ -5,0 +5,0 @@ await orama.validateSchema(doc, orama.schema); |
@@ -1,4 +0,4 @@ | ||
import { runMultipleHook, runSingleHook } from '../components/hooks.js'; | ||
import { trackRemoval } from '../components/sync-blocking-checker.js'; | ||
import { createError } from '../errors.js'; | ||
import { runMultipleHook, runSingleHook } from "../components/hooks.js"; | ||
import { trackRemoval } from "../components/sync-blocking-checker.js"; | ||
import { createError } from "../errors.js"; | ||
export async function remove(orama, id, language, skipHooks) { | ||
@@ -5,0 +5,0 @@ let result = true; |
@@ -1,6 +0,6 @@ | ||
import { prioritizeTokenScores } from '../components/algorithms.js'; | ||
import { getFacets } from '../components/facets.js'; | ||
import { intersectFilteredIDs } from '../components/filters.js'; | ||
import { createError } from '../errors.js'; | ||
import { getNanosecondsTime, sortTokenScorePredicate } from '../utils.js'; | ||
import { prioritizeTokenScores } from "../components/algorithms.js"; | ||
import { getFacets } from "../components/facets.js"; | ||
import { intersectFilteredIDs } from "../components/filters.js"; | ||
import { createError } from "../errors.js"; | ||
import { getNanosecondsTime, sortTokenScorePredicate } from "../utils.js"; | ||
const defaultBM25Params = { | ||
@@ -11,3 +11,3 @@ k: 1.2, | ||
}; | ||
function createSearchContext(properties, tokens, params, docsCount) { | ||
async function createSearchContext(properties, tokens, params, docsCount) { | ||
// If filters are enabled, we need to get the IDs of the documents that match the filters. | ||
@@ -52,3 +52,3 @@ // const hasFilters = Object.keys(params.where ?? {}).length > 0; | ||
return { | ||
timeStart: getNanosecondsTime(), | ||
timeStart: await getNanosecondsTime(), | ||
params, | ||
@@ -66,3 +66,3 @@ docsCount, | ||
const { index , docs } = orama.data; | ||
const tokens = orama.tokenizer.tokenize(term, language); | ||
const tokens = await orama.tokenizer.tokenize(term, language); | ||
// Get searchable string properties | ||
@@ -85,3 +85,3 @@ let propertiesToSearch = orama.caches['propertiesToSearch']; | ||
// Create the search context and the results | ||
const context = createSearchContext(propertiesToSearch, tokens, params, await orama.documentsStore.count(docs)); | ||
const context = await createSearchContext(propertiesToSearch, tokens, params, await orama.documentsStore.count(docs)); | ||
const results = Array.from({ | ||
@@ -94,3 +94,3 @@ length: limit | ||
if (hasFilters) { | ||
whereFiltersIDs = orama.index.searchByWhereClause(index, params.where); | ||
whereFiltersIDs = await orama.index.searchByWhereClause(index, params.where); | ||
} | ||
@@ -156,3 +156,3 @@ // Now it's time to loop over all the indices and get the documents IDs for every single term | ||
const searchResult = { | ||
elapsed: await orama.formatElapsedTime(getNanosecondsTime() - context.timeStart), | ||
elapsed: await orama.formatElapsedTime(await getNanosecondsTime() - context.timeStart), | ||
hits: results.filter(Boolean), | ||
@@ -159,0 +159,0 @@ count: uniqueDocsArray.length |
@@ -1,4 +0,4 @@ | ||
import { runMultipleHook, runSingleHook } from '../components/hooks.js'; | ||
import { insert, insertMultiple } from './insert.js'; | ||
import { remove, removeMultiple } from './remove.js'; | ||
import { runMultipleHook, runSingleHook } from "../components/hooks.js"; | ||
import { insert, insertMultiple } from "./insert.js"; | ||
import { remove, removeMultiple } from "./remove.js"; | ||
export async function update(orama, id, doc, language, skipHooks) { | ||
@@ -5,0 +5,0 @@ if (!skipHooks) { |
@@ -1,3 +0,3 @@ | ||
import { boundedLevenshtein } from '../components/levenshtein.js'; | ||
import { getOwnProperty, uniqueId } from '../utils.js'; | ||
import { syncBoundedLevenshtein } from "../components/levenshtein.js"; | ||
import { getOwnProperty, syncUniqueId } from "../utils.js"; | ||
/* c8 ignore next 5 */ function serialize() { | ||
@@ -42,3 +42,3 @@ const { word , subWord , children , docs , end } = this; | ||
// In that case, we don't need to add the word to the output | ||
if (difference <= tolerance && boundedLevenshtein(term, word, tolerance).isBounded) { | ||
if (difference <= tolerance && syncBoundedLevenshtein(term, word, tolerance).isBounded) { | ||
output[word] = []; | ||
@@ -82,3 +82,3 @@ } | ||
const node = { | ||
id: uniqueId(), | ||
id: syncUniqueId(), | ||
key, | ||
@@ -85,0 +85,0 @@ subWord, |
@@ -202,3 +202,3 @@ import { Language } from './components/tokenizer/languages.js'; | ||
Index: I; | ||
}>, schema: Schema) => I; | ||
}>, schema: Schema) => SyncOrAsyncValue<I>; | ||
beforeInsert?: IIndexInsertOrRemoveFunction<I>; | ||
@@ -211,7 +211,7 @@ insert: IIndexInsertOrRemoveFunction<I>; | ||
search(index: I, prop: string, term: string, context: SearchContext): SyncOrAsyncValue<TokenScore[]>; | ||
searchByWhereClause(index: I, filters: Record<string, boolean | ComparisonOperator>): string[]; | ||
searchByWhereClause(index: I, filters: Record<string, boolean | ComparisonOperator>): SyncOrAsyncValue<string[]>; | ||
getSearchableProperties(index: I): SyncOrAsyncValue<string[]>; | ||
getSearchablePropertiesWithTypes(index: I): SyncOrAsyncValue<Record<string, SearchableType>>; | ||
load<R = unknown>(raw: R): I | Promise<I>; | ||
save<R = unknown>(index: I): R | Promise<R>; | ||
load<R = unknown>(raw: R): SyncOrAsyncValue<I>; | ||
save<R = unknown>(index: I): SyncOrAsyncValue<R>; | ||
} | ||
@@ -221,3 +221,3 @@ export interface IDocumentsStore<D extends OpaqueDocumentStore = OpaqueDocumentStore> { | ||
DocumentStore: D; | ||
}>) => D; | ||
}>) => SyncOrAsyncValue<D>; | ||
get(store: D, id: string): SyncOrAsyncValue<Document | undefined>; | ||
@@ -228,4 +228,4 @@ getMultiple(store: D, ids: string[]): SyncOrAsyncValue<(Document | undefined)[]>; | ||
count(store: D): SyncOrAsyncValue<number>; | ||
load<R = unknown>(raw: R): D | Promise<D>; | ||
save<R = unknown>(store: D): R | Promise<R>; | ||
load<R = unknown>(raw: R): SyncOrAsyncValue<D>; | ||
save<R = unknown>(store: D): SyncOrAsyncValue<R>; | ||
} | ||
@@ -243,5 +243,5 @@ export type Stemmer = (word: string) => string; | ||
normalizationCache: Map<string, string>; | ||
tokenize: (raw: string, language?: string) => string[]; | ||
tokenize: (raw: string, language?: string) => SyncOrAsyncValue<string[]>; | ||
} | ||
export interface ComplexComponent { | ||
export interface ObjectComponents { | ||
tokenizer: Tokenizer | TokenizerConfig; | ||
@@ -251,3 +251,3 @@ index: IIndex; | ||
} | ||
export interface SimpleComponents<S extends Schema = Schema> { | ||
export interface FunctionComponents<S extends Schema = Schema> { | ||
validateSchema(doc: Document, schema: S): SyncOrAsyncValue<boolean>; | ||
@@ -258,3 +258,3 @@ getDocumentIndexId(doc: Document): SyncOrAsyncValue<string>; | ||
} | ||
export interface SimpleOrArrayCallbackComponents { | ||
export interface SingleOrArrayCallbackComponents { | ||
beforeInsert: SingleOrArray<SingleCallbackComponent>; | ||
@@ -287,3 +287,3 @@ afterInsert: SingleOrArray<SingleCallbackComponent>; | ||
} | ||
export type Components = Partial<ComplexComponent & SimpleComponents & SimpleOrArrayCallbackComponents>; | ||
export type Components = Partial<ObjectComponents & FunctionComponents & SingleOrArrayCallbackComponents>; | ||
export declare const kInsertions: unique symbol; | ||
@@ -296,2 +296,6 @@ export declare const kRemovals: unique symbol; | ||
}>; | ||
interface Data<I extends OpaqueIndex, D extends OpaqueDocumentStore> { | ||
index: I; | ||
docs: D; | ||
} | ||
type Internals<S extends Schema, I extends OpaqueIndex, D extends OpaqueDocumentStore> = { | ||
@@ -302,6 +306,3 @@ schema: S; | ||
documentsStore: IDocumentsStore<D>; | ||
data: { | ||
index: I; | ||
docs: D; | ||
}; | ||
data: Data<I, D>; | ||
caches: Record<string, unknown>; | ||
@@ -315,3 +316,3 @@ [kInsertions]: number | undefined; | ||
DocumentStore: OpaqueDocumentStore; | ||
}> = SimpleComponents & ArrayCallbackComponents & Internals<Schema & P['Schema'], OpaqueIndex & P['Index'], OpaqueDocumentStore & P['DocumentStore']>; | ||
}> = FunctionComponents & ArrayCallbackComponents & Internals<Schema & P['Schema'], OpaqueIndex & P['Index'], OpaqueDocumentStore & P['DocumentStore']>; | ||
export {}; |
import type { Document, TokenScore } from './types.js'; | ||
export declare const isServer: boolean; | ||
export declare function sprintf(template: string, ...args: (string | number)[]): string; | ||
export declare function formatBytes(bytes: number, decimals?: number): string; | ||
export declare function formatNanoseconds(value: number | bigint): string; | ||
export declare function getNanosecondsTime(): bigint; | ||
export declare function uniqueId(): string; | ||
export declare function formatBytes(bytes: number, decimals?: number): Promise<string>; | ||
export declare function formatNanoseconds(value: number | bigint): Promise<string>; | ||
export declare function getNanosecondsTime(): Promise<bigint>; | ||
export declare function uniqueId(): Promise<string>; | ||
export declare function syncUniqueId(): string; | ||
export declare function getOwnProperty<T = unknown>(object: Record<string, T>, property: string): T | undefined; | ||
@@ -13,4 +14,4 @@ export declare function getTokenFrequency(token: string, tokens: string[]): number; | ||
export declare function intersect<T>(arrays: ReadonlyArray<T>[]): T[]; | ||
export declare function getDocumentProperties(doc: Document, paths: string[]): Record<string, string | number | boolean>; | ||
export declare function getNested<T = 'string' | 'number' | 'boolean'>(obj: object, path: string): T | undefined; | ||
export declare function getDocumentProperties(doc: Document, paths: string[]): Promise<Record<string, string | number | boolean>>; | ||
export declare function getNested<T = 'string' | 'number' | 'boolean'>(obj: object, path: string): Promise<T | undefined>; | ||
export declare function flattenObject(obj: object, prefix?: string): Document; |
@@ -34,3 +34,3 @@ const baseId = Date.now().toString().slice(5); | ||
} | ||
export function formatBytes(bytes, decimals = 2) { | ||
export async function formatBytes(bytes, decimals = 2) { | ||
if (bytes === 0) { | ||
@@ -54,3 +54,3 @@ return '0 Bytes'; | ||
} | ||
export function formatNanoseconds(value) { | ||
export async function formatNanoseconds(value) { | ||
if (typeof value === 'number') { | ||
@@ -68,3 +68,3 @@ value = BigInt(value); | ||
} | ||
export function getNanosecondsTime() { | ||
export async function getNanosecondsTime() { | ||
if (typeof process !== 'undefined' && process.hrtime !== undefined) { | ||
@@ -79,5 +79,9 @@ return process.hrtime.bigint(); | ||
} | ||
export function uniqueId() { | ||
export async function uniqueId() { | ||
return `${baseId}-${lastId++}`; | ||
} | ||
// This is only used internally, keep in sync with the previous one | ||
export function syncUniqueId() { | ||
return `${baseId}-${lastId++}`; | ||
} | ||
export function getOwnProperty(object, property) { | ||
@@ -149,3 +153,3 @@ return hasOwn(object, property) ? object[property] : undefined; | ||
} | ||
export function getDocumentProperties(doc, paths) { | ||
export async function getDocumentProperties(doc, paths) { | ||
const properties = {}; | ||
@@ -176,4 +180,4 @@ const pathsLength = paths.length; | ||
} | ||
export function getNested(obj, path) { | ||
const props = getDocumentProperties(obj, [ | ||
export async function getNested(obj, path) { | ||
const props = await getDocumentProperties(obj, [ | ||
path | ||
@@ -180,0 +184,0 @@ ]); |
{ | ||
"name": "@orama/orama", | ||
"version": "1.0.0-beta.4", | ||
"version": "1.0.0-beta.5", | ||
"type": "module", | ||
"description": "Next generation full-text search engine, written in TypeScript", | ||
"sideEffects": false, | ||
"main": "./dist/cjs/index.cjs", | ||
"exports": { | ||
@@ -13,6 +14,2 @@ ".": { | ||
}, | ||
"./wasm": { | ||
"types": "./dist/wasm.d.ts", | ||
"import": "./dist/wasm.js" | ||
}, | ||
"./internals": { | ||
@@ -28,22 +25,6 @@ "types": "./dist/internals.d.ts", | ||
}, | ||
"./stemmer/*": { | ||
"types": "./dist/stemmer/*.d.ts", | ||
"import": "./dist/stemmer/*.js", | ||
"require": "./dist/cjs/stemmer/*.cjs" | ||
}, | ||
"./cjs": { | ||
"types": "./dist/cjs/index.d.cts", | ||
"require": "./dist/cjs/index.cjs" | ||
}, | ||
"./cjs/internals": { | ||
"types": "./dist/cjs/internals.d.cts", | ||
"require": "./dist/cjs/internals.cjs" | ||
}, | ||
"./cjs/components": { | ||
"types": "./dist/cjs/components.d.cts", | ||
"require": "./dist/cjs/components.cjs" | ||
}, | ||
"./cjs/stemmer/*": { | ||
"types": "./dist/cjs/stemmer/*.d.cts", | ||
"require": "./dist/cjs/stemmer/*.cjs" | ||
"./stemmers/*": { | ||
"types": "./dist/stemmers/*.d.ts", | ||
"import": "./dist/stemmers/*.js", | ||
"require": "./dist/cjs/stemmers/*.cjs" | ||
} | ||
@@ -108,3 +89,3 @@ }, | ||
"scripts": { | ||
"predev": "rm -rf dist && mkdir dist && cp -a stemmer/lib dist/stemmer", | ||
"predev": "rm -rf dist && mkdir dist && cp -a stemmers/lib dist/stemmers", | ||
"dev": "swc -s -w --extensions .ts,.cts -d dist src", | ||
@@ -111,0 +92,0 @@ "prebuild": "npm run lint", |
@@ -192,6 +192,6 @@ ![Orama. Search, everywhere.](https://github.com/oramasearch/orama/blob/main/misc/oramasearch.gif?raw=true) | ||
Orama methods can be required as CommonJS modules by requiring from `@orama/orama/cjs`. | ||
Orama methods can be required as CommonJS modules by requiring from `@orama/orama`. | ||
```js | ||
const { create, insert } = require("@orama/orama/cjs") | ||
const { create, insert } = require("@orama/orama") | ||
@@ -203,4 +203,2 @@ create(/* ... */) | ||
Note that only main methods are supported so for internals and other supported exports you still have to use `await import`. | ||
## Language | ||
@@ -223,3 +221,3 @@ | ||
import { create } from '@orama/orama' | ||
import { stemmer } from '@orama/orama/components/stemmer/it' | ||
import { stemmer } from '@orama/orama/stemmers/it' | ||
@@ -231,3 +229,3 @@ const db = await create({ | ||
}, | ||
defaultLanguage: 'italian', | ||
language: 'italian', | ||
components: { | ||
@@ -246,3 +244,3 @@ tokenizer: { | ||
const { create } = await import('@orama/orama') | ||
const { stemmer } = await import('@orama/orama/components/stemmer/it') | ||
const { stemmer } = await import('@orama/orama/stemmers/it') | ||
@@ -254,3 +252,3 @@ const db = await create({ | ||
}, | ||
defaultLanguage: 'italian', | ||
language: 'italian', | ||
components: { | ||
@@ -257,0 +255,0 @@ tokenizer: { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
2924214
358
124111
295