@llamaindex/core
Advanced tools
Comparing version 0.2.5 to 0.2.6
@@ -5,2 +5,3 @@ import { CustomEvent, Tokenizer } from '@llamaindex/env'; | ||
import { SynthesizeStartEvent, SynthesizeEndEvent } from '../response-synthesizers/index.js'; | ||
import { RetrieveStartEvent, RetrieveEndEvent } from '../retriever/index.js'; | ||
import { TextNode } from '../schema/index.js'; | ||
@@ -78,2 +79,4 @@ import { EventCaller } from '../utils/index.js'; | ||
"synthesize-end": SynthesizeEndEvent; | ||
"retrieve-start": RetrieveStartEvent; | ||
"retrieve-end": RetrieveEndEvent; | ||
} | ||
@@ -80,0 +83,0 @@ declare class LlamaIndexCustomEvent<T = any> extends CustomEvent<T> { |
import { MessageContent } from '../llms/index.js'; | ||
import { PromptMixin } from '../prompts/index.js'; | ||
import { EngineResponse } from '../schema/index.js'; | ||
import { NodeWithScore, EngineResponse } from '../schema/index.js'; | ||
@@ -16,2 +16,11 @@ /** | ||
type QueryType = string | QueryBundle; | ||
type BaseQueryParams = { | ||
query: QueryType; | ||
}; | ||
interface StreamingQueryParams extends BaseQueryParams { | ||
stream: true; | ||
} | ||
interface NonStreamingQueryParams extends BaseQueryParams { | ||
stream?: false; | ||
} | ||
type QueryFn = (strOrQueryBundle: QueryType, stream?: boolean) => Promise<AsyncIterable<EngineResponse> | EngineResponse>; | ||
@@ -21,4 +30,5 @@ declare abstract class BaseQueryEngine extends PromptMixin { | ||
protected constructor(_query: QueryFn); | ||
query(strOrQueryBundle: QueryType, stream: true): Promise<AsyncIterable<EngineResponse>>; | ||
query(strOrQueryBundle: QueryType, stream?: false): Promise<EngineResponse>; | ||
retrieve(params: QueryType): Promise<NodeWithScore[]>; | ||
query(params: StreamingQueryParams): Promise<AsyncIterable<EngineResponse>>; | ||
query(params: NonStreamingQueryParams): Promise<EngineResponse>; | ||
} | ||
@@ -25,0 +35,0 @@ |
@@ -394,3 +394,7 @@ import { randomUUID } from '@llamaindex/env'; | ||
} | ||
async query(strOrQueryBundle, stream = false) { | ||
async retrieve(params) { | ||
throw new Error("This query engine does not support retrieve, use query directly"); | ||
} | ||
async query(params) { | ||
const { stream, query } = params; | ||
const id = randomUUID(); | ||
@@ -400,5 +404,5 @@ const callbackManager = Settings.callbackManager; | ||
id, | ||
query: strOrQueryBundle | ||
query | ||
}); | ||
const response = await this._query(strOrQueryBundle, stream); | ||
const response = await this._query(query, stream); | ||
callbackManager.dispatchEvent("query-end", { | ||
@@ -405,0 +409,0 @@ id, |
{ | ||
"name": "@llamaindex/core", | ||
"type": "module", | ||
"version": "0.2.5", | ||
"version": "0.2.6", | ||
"description": "LlamaIndex Core Module", | ||
@@ -202,2 +202,30 @@ "exports": { | ||
} | ||
}, | ||
"./chat-engine": { | ||
"require": { | ||
"types": "./dist/chat-engine/index.d.cts", | ||
"default": "./dist/chat-engine/index.cjs" | ||
}, | ||
"import": { | ||
"types": "./dist/chat-engine/index.d.ts", | ||
"default": "./dist/chat-engine/index.js" | ||
}, | ||
"default": { | ||
"types": "./dist/chat-engine/index.d.ts", | ||
"default": "./dist/chat-engine/index.js" | ||
} | ||
}, | ||
"./retriever": { | ||
"require": { | ||
"types": "./dist/retriever/index.d.cts", | ||
"default": "./dist/retriever/index.cjs" | ||
}, | ||
"import": { | ||
"types": "./dist/retriever/index.d.ts", | ||
"default": "./dist/retriever/index.js" | ||
}, | ||
"default": { | ||
"types": "./dist/retriever/index.d.ts", | ||
"default": "./dist/retriever/index.js" | ||
} | ||
} | ||
@@ -204,0 +232,0 @@ }, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
451376
66
9978