@singlestore/ai
Advanced tools
Comparing version 0.0.18 to 0.0.19
@@ -1,2 +0,2 @@ | ||
import { z } from 'zod'; | ||
import z$1, { z } from 'zod'; | ||
import { ChatCompletionMessageParam } from 'openai/resources/chat/completions'; | ||
@@ -18,3 +18,4 @@ import { OpenAI } from 'openai'; | ||
} | ||
type AnyChatCompletionTool = ChatCompletionTool<string, z.AnyZodObject | undefined, any>; | ||
type AnyChatCompletionTool = ChatCompletionTool<string, z.AnyZodObject | undefined, ChatCompletionToolCall<string, any | undefined, any>>; | ||
type MergeChatCompletionTools<T extends AnyChatCompletionTool[] | undefined, U extends AnyChatCompletionTool[] | undefined> = T extends AnyChatCompletionTool[] ? U extends AnyChatCompletionTool[] ? [...T, ...U] : T : U extends AnyChatCompletionTool[] ? U : undefined; | ||
declare class ChatCompletionTool<T extends string, U extends z.AnyZodObject | undefined, K extends ChatCompletionToolCall<T, U, any>> { | ||
@@ -32,17 +33,22 @@ name: T; | ||
type ChatCompletionStream = AsyncGenerator<ChatCompletion>; | ||
type OnChatCompletionChunk = (chunk: ChatCompletion) => Promise<void> | void; | ||
interface ChatCompletionMessage { | ||
role: ChatCompletionMessageParam["role"]; | ||
content: string; | ||
role: Extract<ChatCompletionMessageParam["role"], "system" | "assistant" | "user">; | ||
content: string | null; | ||
} | ||
interface CreateChatCompletionOptions { | ||
interface CreateChatCompletionParams<T extends boolean | undefined, U extends AnyChatCompletionTool[] | undefined> { | ||
prompt?: string; | ||
model?: string; | ||
systemRole?: string; | ||
stream?: boolean; | ||
stream?: T; | ||
messages?: ChatCompletionMessage[]; | ||
tools?: AnyChatCompletionTool[]; | ||
tools?: U; | ||
toolCallHandlers?: U extends AnyChatCompletionTool[] ? { | ||
[K in U[number] as K["name"]]?: (tool: K, params: K["schema"] extends z$1.AnyZodObject ? z$1.infer<K["schema"]> : undefined) => Promise<void>; | ||
} : undefined; | ||
toolCallResultHandlers?: U extends AnyChatCompletionTool[] ? { | ||
[K in U[number] as K["name"]]?: (tool: K, result: Awaited<ReturnType<K["call"]>>, params: K["schema"] extends z$1.AnyZodObject ? z$1.infer<K["schema"]> : undefined) => Promise<void>; | ||
} : undefined; | ||
} | ||
type CreateChatCompletionResult<T extends CreateChatCompletionOptions> = T extends { | ||
stream: true; | ||
} ? ChatCompletionStream : ChatCompletion; | ||
type OnChatCompletionChunk = (chunk: ChatCompletion) => Promise<void> | void; | ||
type CreateChatCompletionResult<T extends boolean | undefined> = T extends true ? ChatCompletionStream : ChatCompletion; | ||
declare abstract class ChatCompletions<T extends AnyChatCompletionTool[] | undefined> { | ||
@@ -53,8 +59,9 @@ tools: T; | ||
handleStream(stream: ChatCompletionStream, onChunk?: OnChatCompletionChunk): Promise<ChatCompletion>; | ||
abstract create(prompt: string, options?: CreateChatCompletionOptions): Promise<CreateChatCompletionResult<any>>; | ||
abstract create(params: CreateChatCompletionParams<any, any>): Promise<CreateChatCompletionResult<any>>; | ||
} | ||
type OpenAIChatCompletionModel = ChatCompletionCreateParamsBase["model"]; | ||
type _OpenAICreateChatCompletionOptions = Omit<Partial<ChatCompletionCreateParamsBase>, keyof CreateChatCompletionOptions>; | ||
interface OpenAICreateChatCompletionOptions extends CreateChatCompletionOptions, _OpenAICreateChatCompletionOptions { | ||
interface _OpenAICreateChatCompletionParams extends Omit<Partial<ChatCompletionCreateParamsBase>, keyof CreateChatCompletionParams<any, any>> { | ||
} | ||
interface OpenAICreateChatCompletionParams<T extends boolean | undefined, U extends AnyChatCompletionTool[] | undefined> extends CreateChatCompletionParams<T, U>, _OpenAICreateChatCompletionParams { | ||
model?: OpenAIChatCompletionModel; | ||
@@ -66,7 +73,7 @@ } | ||
getModels(): OpenAIChatCompletionModel[]; | ||
create<U extends OpenAICreateChatCompletionOptions>(prompt: string, options?: U): Promise<CreateChatCompletionResult<U>>; | ||
create<U extends boolean | undefined = false, K extends AnyChatCompletionTool[] | undefined = undefined>({ prompt, systemRole, messages, tools, toolCallHandlers, toolCallResultHandlers, ...params }: OpenAICreateChatCompletionParams<U, MergeChatCompletionTools<T, K>>): Promise<CreateChatCompletionResult<U>>; | ||
} | ||
type Embedding = number[]; | ||
interface CreateEmbeddingsOptions { | ||
interface CreateEmbeddingsParams { | ||
model?: string; | ||
@@ -76,8 +83,8 @@ } | ||
abstract getModels(): string[]; | ||
abstract create(input: string | string[], options?: CreateEmbeddingsOptions): Promise<Embedding[]>; | ||
abstract create(input: string | string[], params?: CreateEmbeddingsParams): Promise<Embedding[]>; | ||
} | ||
type _OpenAICreateEmbeddingsOptions = Omit<Partial<EmbeddingCreateParams>, "input" | keyof CreateEmbeddingsOptions>; | ||
type _OpenAICreateEmbeddingsParams = Omit<Partial<EmbeddingCreateParams>, "input" | keyof CreateEmbeddingsParams>; | ||
type OpenAIEmbeddingModel = EmbeddingCreateParams["model"]; | ||
interface OpenAICreateEmbeddingsOptions extends CreateEmbeddingsOptions, _OpenAICreateEmbeddingsOptions { | ||
interface OpenAICreateEmbeddingsParams extends CreateEmbeddingsParams, _OpenAICreateEmbeddingsParams { | ||
model?: OpenAIEmbeddingModel; | ||
@@ -89,3 +96,3 @@ } | ||
getModels(): OpenAIEmbeddingModel[]; | ||
create<T extends OpenAICreateEmbeddingsOptions>(input: string | string[], options?: T): Promise<Embedding[]>; | ||
create(input: string | string[], params?: OpenAICreateEmbeddingsParams): Promise<Embedding[]>; | ||
} | ||
@@ -106,2 +113,2 @@ | ||
export { AI, type AIConfig, type AnyAI, type AnyChatCompletionTool, type ChatCompletion, type ChatCompletionMessage, type ChatCompletionStream, ChatCompletionTool, type ChatCompletionToolCall, ChatCompletions, type CreateChatCompletionOptions, type CreateChatCompletionResult, type CreateEmbeddingsOptions, type Embedding, Embeddings, type OnChatCompletionChunk, type OpenAIChatCompletionModel, OpenAIChatCompletions, type OpenAICreateChatCompletionOptions, type OpenAICreateEmbeddingsOptions, type OpenAIEmbeddingModel, OpenAIEmbeddings }; | ||
export { AI, type AIConfig, type AnyAI, type AnyChatCompletionTool, type ChatCompletion, type ChatCompletionMessage, type ChatCompletionStream, ChatCompletionTool, type ChatCompletionToolCall, ChatCompletions, type CreateChatCompletionParams, type CreateChatCompletionResult, type CreateEmbeddingsParams, type Embedding, Embeddings, type MergeChatCompletionTools, type OnChatCompletionChunk, type OpenAIChatCompletionModel, OpenAIChatCompletions, type OpenAICreateChatCompletionParams, type OpenAICreateEmbeddingsParams, type OpenAIEmbeddingModel, OpenAIEmbeddings }; |
@@ -84,16 +84,22 @@ "use strict"; | ||
} | ||
async create(prompt, options) { | ||
const { systemRole = "You are a helpful assistant", messages = [], tools, ..._options } = options ?? {}; | ||
const _messages = [ | ||
{ role: "system", content: systemRole }, | ||
...messages || [], | ||
{ role: "user", content: prompt } | ||
]; | ||
async create({ | ||
prompt, | ||
systemRole, | ||
messages, | ||
tools, | ||
toolCallHandlers, | ||
toolCallResultHandlers, | ||
...params | ||
}) { | ||
let _messages = []; | ||
if (systemRole) _messages.push({ role: "system", content: systemRole }); | ||
if (messages?.length) _messages = [..._messages, ...messages]; | ||
if (prompt) _messages.push({ role: "user", content: prompt }); | ||
let _tools = []; | ||
if (this.tools) _tools = [..._tools, ...this.tools]; | ||
if (tools) _tools = [..._tools, ...tools]; | ||
if (this.tools?.length) _tools = [..._tools, ...this.tools]; | ||
if (tools?.length) _tools = [..._tools, ...tools]; | ||
const response = await this._openai.chat.completions.create({ | ||
model: "gpt-4o-mini", | ||
temperature: 0, | ||
..._options, | ||
...params, | ||
messages: _messages, | ||
@@ -105,52 +111,99 @@ tools: _tools.length ? _tools.map(({ name, description, schema }) => ({ | ||
}); | ||
const handleToolCalls = (toolCalls) => { | ||
const handleToolCalls = (toolCalls = []) => { | ||
return Promise.all( | ||
toolCalls.map((toolCall) => { | ||
if (!toolCall.function) return ""; | ||
let params; | ||
if (toolCall.function.arguments) { | ||
toolCalls.map(async (call) => { | ||
const _call = { ...call, id: call.id || "" }; | ||
if (!call.function) { | ||
throw new Error(`Invalid tool call: ${JSON.stringify(call)}`); | ||
} | ||
let params2; | ||
if (call.function.arguments) { | ||
try { | ||
params = JSON.parse(toolCall.function.arguments); | ||
params2 = JSON.parse(call.function.arguments); | ||
} catch (error) { | ||
throw new Error(`Invalid parameters provided for the "${toolCall.function.name}" tool.`, { cause: error }); | ||
throw new Error(`Invalid parameters provided for the "${call.function.name}" tool.`, { cause: error }); | ||
} | ||
} | ||
const tool = _tools.find(({ name }) => name === toolCall.function?.name); | ||
const tool = _tools.find(({ name }) => name === call.function?.name); | ||
if (!tool) { | ||
throw new Error(`The "${toolCall.function.name}" tool is undefined.`); | ||
throw new Error(`The "${call.function.name}" tool is undefined.`); | ||
} | ||
return tool.call(params); | ||
try { | ||
await toolCallHandlers?.[tool.name]?.(tool, params2); | ||
const result = await tool.call(params2); | ||
await toolCallResultHandlers?.[tool.name]?.(tool, result, params2); | ||
return [{ tool, params: params2, value: result.value }, _call]; | ||
} catch (error) { | ||
let _error = error; | ||
if (typeof error !== "string") { | ||
if (error instanceof Error) { | ||
_error = `Error: ${error.message}`; | ||
} else { | ||
try { | ||
_error = JSON.stringify(error, Object.getOwnPropertyNames(error)); | ||
} catch (error2) { | ||
_error = JSON.stringify(error2, Object.getOwnPropertyNames(error2)); | ||
} | ||
} | ||
} | ||
return [{ tool, params: params2, error: _error }, _call]; | ||
} | ||
}) | ||
); | ||
}; | ||
const handleToolCallResults = (results, message) => { | ||
return this.create({ | ||
...params, | ||
tools, | ||
messages: [ | ||
..._messages, | ||
message, | ||
...results.map(([result, { id }]) => { | ||
return { | ||
role: "tool", | ||
tool_call_id: id, | ||
content: "error" in result ? result.error : result.value | ||
}; | ||
}) | ||
] | ||
}); | ||
}; | ||
if (typeof response === "object" && response && "choices" in response) { | ||
const message = response.choices[0]?.message; | ||
if (message && "tool_calls" in message && message.tool_calls?.length) { | ||
await handleToolCalls(message.tool_calls); | ||
const toolCallResults = await handleToolCalls(message.tool_calls); | ||
return await handleToolCallResults(toolCallResults, message); | ||
} | ||
return { content: message?.content || "" }; | ||
} | ||
return async function* () { | ||
const defaultToolCallRecord = { function: { arguments: "" } }; | ||
const toolCallRecords = {}; | ||
for await (const chunk of response) { | ||
const delta = chunk.choices[0]?.delta; | ||
if (delta && "tool_calls" in delta && delta.tool_calls?.length) { | ||
delta.tool_calls.forEach((toolCall) => { | ||
const toolCallRecord = toolCallRecords[toolCall.index] ?? defaultToolCallRecord; | ||
toolCallRecords[toolCall.index] = { | ||
...toolCallRecord, | ||
async function* handleStream(stream) { | ||
let delta = void 0; | ||
for await (const chunk of stream) { | ||
const _delta = chunk.choices[0]?.delta; | ||
if (!delta) delta = { ..._delta, content: _delta?.content || "", tool_calls: [] }; | ||
if (_delta && "tool_calls" in _delta && _delta.tool_calls?.length) { | ||
for (const toolCall of _delta.tool_calls) { | ||
if (!delta || !delta.tool_calls) break; | ||
const deltaToolCall = delta.tool_calls[toolCall.index] || { function: { arguments: "" } }; | ||
delta.tool_calls[toolCall.index] = { | ||
...deltaToolCall, | ||
...toolCall, | ||
function: { | ||
...toolCallRecord.function, | ||
...deltaToolCall.function, | ||
...toolCall.function, | ||
arguments: `${toolCallRecord.function?.arguments || ""}${toolCall.function?.arguments || ""}` | ||
arguments: `${deltaToolCall.function?.arguments || ""}${toolCall.function?.arguments || ""}` | ||
} | ||
}; | ||
}); | ||
} | ||
} else { | ||
yield { content: _delta?.content || "" }; | ||
} | ||
yield { content: delta?.content || "" }; | ||
} | ||
await handleToolCalls(Object.values(toolCallRecords)); | ||
}(); | ||
if (delta?.tool_calls?.length) { | ||
const toolCallResults = await handleToolCalls(delta.tool_calls); | ||
const toolCallResultsStream = await handleToolCallResults(toolCallResults, delta); | ||
for await (const chunk of toolCallResultsStream) yield chunk; | ||
} | ||
} | ||
return handleStream(response); | ||
} | ||
@@ -186,5 +239,5 @@ }; | ||
} | ||
async create(input, options) { | ||
async create(input, params) { | ||
const _input = Array.isArray(input) ? input : [input]; | ||
const response = await this._openai.embeddings.create({ model: "text-embedding-3-small", ...options, input: _input }); | ||
const response = await this._openai.embeddings.create({ model: "text-embedding-3-small", ...params, input: _input }); | ||
return response.data.map((data) => data.embedding); | ||
@@ -191,0 +244,0 @@ } |
{ | ||
"name": "@singlestore/ai", | ||
"version": "0.0.18", | ||
"version": "0.0.19", | ||
"license": "Apache-2.0", | ||
@@ -5,0 +5,0 @@ "sideEffects": false, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
68931
578