@langchain/core
Advanced tools
Comparing version 0.3.0-rc.0 to 0.3.0
@@ -50,3 +50,3 @@ import { getCallbackManagerForConfig, } from "../../runnables/config.js"; | ||
"\n |", | ||
"\n β-> https://js.langchain.com/v0.2/docs/how_to/custom_tools#tool-function", | ||
"\n β-> https://js.langchain.com/docs/how_to/custom_tools#tool-function", | ||
"\n", | ||
@@ -53,0 +53,0 @@ ].join(" ")); |
@@ -92,10 +92,2 @@ import type { TiktokenModel } from "js-tiktoken/lite"; | ||
description?: string; | ||
/** | ||
* Whether to enable strict schema adherence when generating the function call. If | ||
* set to true, the model will follow the exact schema defined in the `parameters` | ||
* field. Only a subset of JSON Schema is supported when `strict` is `true`. Learn | ||
* more about Structured Outputs in the | ||
* [function calling guide](https://platform.openai.com/docs/guides/function-calling). | ||
*/ | ||
strict?: boolean; | ||
} | ||
@@ -102,0 +94,0 @@ export interface ToolDefinition { |
@@ -5,3 +5,3 @@ import { z } from "zod"; | ||
import { LLMResult, ChatGenerationChunk, type ChatResult, type Generation } from "../outputs.js"; | ||
import { BaseLanguageModel, StructuredOutputMethodOptions, ToolDefinition, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from "./base.js"; | ||
import { BaseLanguageModel, type StructuredOutputMethodOptions, type ToolDefinition, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type BaseLanguageModelParams } from "./base.js"; | ||
import { type CallbackManagerForLLMRun, type Callbacks } from "../callbacks/manager.js"; | ||
@@ -8,0 +8,0 @@ import type { RunnableConfig } from "../runnables/config.js"; |
@@ -44,2 +44,3 @@ import { BaseMessage, BaseMessageChunk, type MessageType, BaseMessageFields } from "./base.js"; | ||
export declare function isAIMessage(x: BaseMessage): x is AIMessage; | ||
export declare function isAIMessageChunk(x: BaseMessageChunk): x is AIMessageChunk; | ||
export type AIMessageChunkFields = AIMessageFields & { | ||
@@ -46,0 +47,0 @@ tool_call_chunks?: ToolCallChunk[]; |
@@ -110,2 +110,5 @@ import { parsePartialJson } from "../utils/json.js"; | ||
} | ||
export function isAIMessageChunk(x) { | ||
return x._getType() === "ai"; | ||
} | ||
/** | ||
@@ -112,0 +115,0 @@ * Represents a chunk of an AI message, which can be concatenated with |
@@ -62,3 +62,9 @@ import { Serializable } from "../load/serializable.js"; | ||
additional_kwargs?: { | ||
/** | ||
* @deprecated Use "tool_calls" field on AIMessages instead | ||
*/ | ||
function_call?: FunctionCall; | ||
/** | ||
* @deprecated Use "tool_calls" field on AIMessages instead | ||
*/ | ||
tool_calls?: OpenAIToolCall[]; | ||
@@ -123,2 +129,5 @@ [key: string]: unknown; | ||
} | ||
/** | ||
* @deprecated Use "tool_calls" field on AIMessages instead | ||
*/ | ||
export type OpenAIToolCall = { | ||
@@ -159,9 +168,13 @@ /** | ||
export declare function _isMessageFieldWithRole(x: BaseMessageLike): x is MessageFieldWithRole; | ||
export type BaseMessageLike = BaseMessage | ({ | ||
type: MessageType | "user" | "assistant" | "placeholder"; | ||
} & BaseMessageFields & Record<string, unknown>) | MessageFieldWithRole | [ | ||
export type BaseMessageLike = BaseMessage | MessageFieldWithRole | [ | ||
StringWithAutocomplete<MessageType | "user" | "assistant" | "placeholder">, | ||
MessageContent | ||
] | string; | ||
] | string | ||
/** | ||
* @deprecated Specifying "type" is deprecated and will be removed in 0.4.0. | ||
*/ | ||
| ({ | ||
type: MessageType | "user" | "assistant" | "placeholder"; | ||
} & BaseMessageFields & Record<string, unknown>); | ||
export declare function isBaseMessage(messageLike?: unknown): messageLike is BaseMessage; | ||
export declare function isBaseMessageChunk(messageLike?: unknown): messageLike is BaseMessageChunk; |
import { z } from "zod"; | ||
import { ChatGeneration } from "../../outputs.js"; | ||
import { BaseLLMOutputParser } from "../base.js"; | ||
import { ChatGeneration, ChatGenerationChunk } from "../../outputs.js"; | ||
import { InvalidToolCall, ToolCall } from "../../messages/tool.js"; | ||
import { BaseCumulativeTransformOutputParser, BaseCumulativeTransformOutputParserInput } from "../transform.js"; | ||
export type ParsedToolCall = { | ||
@@ -9,6 +9,2 @@ id?: string; | ||
args: Record<string, any>; | ||
/** @deprecated Use `type` instead. Will be removed in 0.2.0. */ | ||
name: string; | ||
/** @deprecated Use `args` instead. Will be removed in 0.2.0. */ | ||
arguments: Record<string, any>; | ||
}; | ||
@@ -18,3 +14,3 @@ export type JsonOutputToolsParserParams = { | ||
returnId?: boolean; | ||
}; | ||
} & BaseCumulativeTransformOutputParserInput; | ||
export declare function parseToolCall(rawToolCall: Record<string, any>, options: { | ||
@@ -28,2 +24,6 @@ returnId?: boolean; | ||
}): ToolCall; | ||
export declare function parseToolCall(rawToolCall: Record<string, any>, options?: { | ||
returnId?: boolean; | ||
partial?: boolean; | ||
}): ToolCall | undefined; | ||
export declare function convertLangChainToolCallToOpenAI(toolCall: ToolCall): { | ||
@@ -41,3 +41,3 @@ id: string; | ||
*/ | ||
export declare class JsonOutputToolsParser extends BaseLLMOutputParser<ParsedToolCall[]> { | ||
export declare class JsonOutputToolsParser<T> extends BaseCumulativeTransformOutputParser<T> { | ||
static lc_name(): string; | ||
@@ -48,2 +48,5 @@ returnId: boolean; | ||
constructor(fields?: JsonOutputToolsParserParams); | ||
protected _diff(): void; | ||
parse(): Promise<T>; | ||
parseResult(generations: ChatGeneration[]): Promise<T>; | ||
/** | ||
@@ -55,3 +58,3 @@ * Parses the output and returns a JSON object. If `argsOnly` is true, | ||
*/ | ||
parseResult(generations: ChatGeneration[]): Promise<ParsedToolCall[]>; | ||
parsePartialResult(generations: ChatGenerationChunk[] | ChatGeneration[], partial?: boolean): Promise<any>; | ||
} | ||
@@ -61,6 +64,4 @@ export type JsonOutputKeyToolsParserParams<T extends Record<string, any> = Record<string, any>> = { | ||
returnSingle?: boolean; | ||
/** Whether to return the tool call id. */ | ||
returnId?: boolean; | ||
zodSchema?: z.ZodType<T>; | ||
}; | ||
} & JsonOutputToolsParserParams; | ||
/** | ||
@@ -70,3 +71,3 @@ * Class for parsing the output of a tool-calling LLM into a JSON object if you are | ||
*/ | ||
export declare class JsonOutputKeyToolsParser<T extends Record<string, any> = Record<string, any>> extends BaseLLMOutputParser<T> { | ||
export declare class JsonOutputKeyToolsParser<T extends Record<string, any> = Record<string, any>> extends JsonOutputToolsParser<T> { | ||
static lc_name(): string; | ||
@@ -80,7 +81,7 @@ lc_namespace: string[]; | ||
returnSingle: boolean; | ||
initialParser: JsonOutputToolsParser; | ||
zodSchema?: z.ZodType<T>; | ||
constructor(params: JsonOutputKeyToolsParserParams<T>); | ||
protected _validateResult(result: unknown): Promise<T>; | ||
parsePartialResult(generations: ChatGeneration[]): Promise<any>; | ||
parseResult(generations: ChatGeneration[]): Promise<any>; | ||
} |
@@ -1,3 +0,5 @@ | ||
import { BaseLLMOutputParser, OutputParserException } from "../base.js"; | ||
import { OutputParserException } from "../base.js"; | ||
import { parsePartialJson } from "../json.js"; | ||
import { BaseCumulativeTransformOutputParser, } from "../transform.js"; | ||
import { isAIMessage } from "../../messages/ai.js"; | ||
export function parseToolCall( | ||
@@ -71,3 +73,3 @@ // eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
*/ | ||
export class JsonOutputToolsParser extends BaseLLMOutputParser { | ||
export class JsonOutputToolsParser extends BaseCumulativeTransformOutputParser { | ||
static lc_name() { | ||
@@ -98,2 +100,12 @@ return "JsonOutputToolsParser"; | ||
} | ||
_diff() { | ||
throw new Error("Not supported."); | ||
} | ||
async parse() { | ||
throw new Error("Not implemented."); | ||
} | ||
async parseResult(generations) { | ||
const result = await this.parsePartialResult(generations, false); | ||
return result; | ||
} | ||
/** | ||
@@ -105,30 +117,36 @@ * Parses the output and returns a JSON object. If `argsOnly` is true, | ||
*/ | ||
async parseResult(generations) { | ||
const toolCalls = generations[0].message.additional_kwargs.tool_calls; | ||
async parsePartialResult(generations, partial = true | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
) { | ||
const message = generations[0].message; | ||
let toolCalls; | ||
if (isAIMessage(message) && message.tool_calls?.length) { | ||
toolCalls = message.tool_calls.map((toolCall) => { | ||
const { id, ...rest } = toolCall; | ||
if (!this.returnId) { | ||
return rest; | ||
} | ||
return { | ||
id, | ||
...rest, | ||
}; | ||
}); | ||
} | ||
else if (message.additional_kwargs.tool_calls !== undefined) { | ||
const rawToolCalls = JSON.parse(JSON.stringify(message.additional_kwargs.tool_calls)); | ||
toolCalls = rawToolCalls.map((rawToolCall) => { | ||
return parseToolCall(rawToolCall, { returnId: this.returnId, partial }); | ||
}); | ||
} | ||
if (!toolCalls) { | ||
throw new Error(`No tools_call in message ${JSON.stringify(generations)}`); | ||
return []; | ||
} | ||
const clonedToolCalls = JSON.parse(JSON.stringify(toolCalls)); | ||
const parsedToolCalls = []; | ||
for (const toolCall of clonedToolCalls) { | ||
const parsedToolCall = parseToolCall(toolCall, { partial: true }); | ||
if (parsedToolCall !== undefined) { | ||
// backward-compatibility with previous | ||
// versions of Langchain JS, which uses `name` and `arguments` | ||
// @ts-expect-error name and arguemnts are defined by Object.defineProperty | ||
for (const toolCall of toolCalls) { | ||
if (toolCall !== undefined) { | ||
const backwardsCompatibleToolCall = { | ||
type: parsedToolCall.name, | ||
args: parsedToolCall.args, | ||
id: parsedToolCall.id, | ||
type: toolCall.name, | ||
args: toolCall.args, | ||
id: toolCall.id, | ||
}; | ||
Object.defineProperty(backwardsCompatibleToolCall, "name", { | ||
get() { | ||
return this.type; | ||
}, | ||
}); | ||
Object.defineProperty(backwardsCompatibleToolCall, "arguments", { | ||
get() { | ||
return this.args; | ||
}, | ||
}); | ||
parsedToolCalls.push(backwardsCompatibleToolCall); | ||
@@ -144,3 +162,3 @@ } | ||
*/ | ||
export class JsonOutputKeyToolsParser extends BaseLLMOutputParser { | ||
export class JsonOutputKeyToolsParser extends JsonOutputToolsParser { | ||
static lc_name() { | ||
@@ -183,8 +201,2 @@ return "JsonOutputKeyToolsParser"; | ||
}); | ||
Object.defineProperty(this, "initialParser", { | ||
enumerable: true, | ||
configurable: true, | ||
writable: true, | ||
value: void 0 | ||
}); | ||
Object.defineProperty(this, "zodSchema", { | ||
@@ -198,3 +210,2 @@ enumerable: true, | ||
this.returnSingle = params.returnSingle ?? this.returnSingle; | ||
this.initialParser = new JsonOutputToolsParser(params); | ||
this.zodSchema = params.zodSchema; | ||
@@ -215,7 +226,27 @@ } | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
async parsePartialResult(generations) { | ||
const results = await super.parsePartialResult(generations); | ||
const matchingResults = results.filter((result) => result.type === this.keyName); | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let returnedValues = matchingResults; | ||
if (!matchingResults.length) { | ||
return undefined; | ||
} | ||
if (!this.returnId) { | ||
returnedValues = matchingResults.map((result) => result.args); | ||
} | ||
if (this.returnSingle) { | ||
return returnedValues[0]; | ||
} | ||
return returnedValues; | ||
} | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
async parseResult(generations) { | ||
const results = await this.initialParser.parseResult(generations); | ||
const results = await super.parsePartialResult(generations, false); | ||
const matchingResults = results.filter((result) => result.type === this.keyName); | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
let returnedValues = matchingResults; | ||
if (!matchingResults.length) { | ||
return undefined; | ||
} | ||
if (!this.returnId) { | ||
@@ -222,0 +253,0 @@ returnedValues = matchingResults.map((result) => result.args); |
@@ -33,2 +33,3 @@ import { BaseOutputParser } from "./base.js"; | ||
_transform(inputGenerator: AsyncGenerator<string | BaseMessage>): AsyncGenerator<T>; | ||
getFormatInstructions(): string; | ||
} |
@@ -104,2 +104,5 @@ import { BaseOutputParser } from "./base.js"; | ||
} | ||
getFormatInstructions() { | ||
return ""; | ||
} | ||
} |
@@ -185,35 +185,35 @@ import { z } from "zod"; | ||
* ```md | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | event | name | chunk | input | output | | ||
* +======================+==================+=================================+===============================================+=================================================+ | ||
* | on_chat_model_start | [model name] | | {"messages": [[SystemMessage, HumanMessage]]} | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_chat_model_stream | [model name] | AIMessageChunk(content="hello") | | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_chat_model_end | [model name] | | {"messages": [[SystemMessage, HumanMessage]]} | AIMessageChunk(content="hello world") | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_llm_start | [model name] | | {'input': 'hello'} | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_llm_stream | [model name] | 'Hello' | | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_llm_end | [model name] | | 'Hello human!' | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_chain_start | some_runnable | | | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_chain_stream | some_runnable | "hello world!, goodbye world!" | | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_chain_end | some_runnable | | [Document(...)] | "hello world!, goodbye world!" | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_tool_start | some_tool | | {"x": 1, "y": "2"} | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_tool_end | some_tool | | | {"x": 1, "y": "2"} | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_retriever_start | [retriever name] | | {"query": "hello"} | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_retriever_end | [retriever name] | | {"query": "hello"} | [Document(...), ..] | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_prompt_start | [template_name] | | {"question": "hello"} | | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* | on_prompt_end | [template_name] | | {"question": "hello"} | ChatPromptValue(messages: [SystemMessage, ...]) | | ||
* +----------------------+------------------+---------------------------------+-----------------------------------------------+-------------------------------------------------+ | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | event | input | output/chunk | | ||
* +======================+=============================+==========================================+ | ||
* | on_chat_model_start | {"messages": BaseMessage[]} | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_chat_model_stream | | AIMessageChunk("hello") | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_chat_model_end | {"messages": BaseMessage[]} | AIMessageChunk("hello world") | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_llm_start | {'input': 'hello'} | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_llm_stream | | 'Hello' | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_llm_end | 'Hello human!' | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_chain_start | | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_chain_stream | | "hello world!" | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_chain_end | [Document(...)] | "hello world!, goodbye world!" | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_tool_start | {"x": 1, "y": "2"} | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_tool_end | | {"x": 1, "y": "2"} | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_retriever_start | {"query": "hello"} | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_retriever_end | {"query": "hello"} | [Document(...), ..] | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_prompt_start | {"question": "hello"} | | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* | on_prompt_end | {"question": "hello"} | ChatPromptValue(messages: BaseMessage[]) | | ||
* +----------------------+-----------------------------+------------------------------------------+ | ||
* ``` | ||
@@ -230,9 +230,9 @@ * | ||
* ```md | ||
* +-----------+------+-----------------------------------------------------------------------------------------------------------+ | ||
* | Attribute | Type | Description | | ||
* +===========+======+===========================================================================================================+ | ||
* | name | str | A user defined name for the event. | | ||
* +-----------+------+-----------------------------------------------------------------------------------------------------------+ | ||
* | data | Any | The data associated with the event. This can be anything, though we suggest making it JSON serializable. | | ||
* +-----------+------+-----------------------------------------------------------------------------------------------------------+ | ||
* +-----------+------+------------------------------------------------------------+ | ||
* | Attribute | Type | Description | | ||
* +===========+======+============================================================+ | ||
* | name | str | A user defined name for the event. | | ||
* +-----------+------+------------------------------------------------------------+ | ||
* | data | Any | The data associated with the event. This can be anything. | | ||
* +-----------+------+------------------------------------------------------------+ | ||
* ``` | ||
@@ -239,0 +239,0 @@ * |
@@ -50,2 +50,3 @@ import { zodToJsonSchema } from "zod-to-json-schema"; | ||
if (fieldsCopy?.strict !== undefined) { | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
toolDef.function.strict = fieldsCopy.strict; | ||
@@ -52,0 +53,0 @@ } |
@@ -9,2 +9,3 @@ export type IterableReadableStreamInterface<T> = ReadableStream<T> & AsyncIterable<T>; | ||
[Symbol.asyncIterator](): this; | ||
[Symbol.asyncDispose](): Promise<void>; | ||
static fromReadableStream<T>(stream: ReadableStream<T>): IterableReadableStream<T>; | ||
@@ -29,5 +30,6 @@ static fromAsyncGenerator<T>(generator: AsyncGenerator<T>): IterableReadableStream<T>; | ||
next(...args: [] | [TNext]): Promise<IteratorResult<T>>; | ||
return(value: TReturn | PromiseLike<TReturn>): Promise<IteratorResult<T>>; | ||
return(value?: TReturn | PromiseLike<TReturn>): Promise<IteratorResult<T>>; | ||
throw(e: Error): Promise<IteratorResult<T>>; | ||
[Symbol.asyncIterator](): this; | ||
[Symbol.asyncDispose](): Promise<void>; | ||
} | ||
@@ -34,0 +36,0 @@ export declare function pipeGeneratorWithSetup<S, A extends unknown[], T, TReturn, TNext, U, UReturn, UNext>(to: (g: AsyncGenerator<T, TReturn, TNext>, s: S, ...args: A) => AsyncGenerator<U, UReturn, UNext>, generator: AsyncGenerator<T, TReturn, TNext>, startSetup: () => Promise<S>, signal: AbortSignal | undefined, ...args: A): Promise<{ |
@@ -68,2 +68,7 @@ import { AsyncLocalStorageProviderSingleton } from "../singletons/index.js"; | ||
} | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
// @ts-ignore Not present in Node 18 types, required in latest Node 22 | ||
async [Symbol.asyncDispose]() { | ||
await this.return(); | ||
} | ||
static fromReadableStream(stream) { | ||
@@ -247,2 +252,7 @@ // From https://developer.mozilla.org/en-US/docs/Web/API/Streams_API/Using_readable_streams#reading_the_stream | ||
} | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
// @ts-ignore Not present in Node 18 types, required in latest Node 22 | ||
async [Symbol.asyncDispose]() { | ||
await this.return(); | ||
} | ||
} | ||
@@ -249,0 +259,0 @@ export async function pipeGeneratorWithSetup(to, generator, startSetup, signal, ...args) { |
{ | ||
"name": "@langchain/core", | ||
"version": "0.3.0-rc.0", | ||
"version": "0.3.0", | ||
"description": "Core LangChain.js abstractions and schemas", | ||
@@ -5,0 +5,0 @@ "type": "module", |
@@ -86,3 +86,3 @@ # π¦ποΈ @langchain/core | ||
For more check out the [LCEL docs](https://js.langchain.com/v0.2/docs/concepts#langchain-expression-language). | ||
For more check out the [LCEL docs](https://js.langchain.com/docs/concepts#langchain-expression-language). | ||
@@ -89,0 +89,0 @@ ![LangChain Stack](../docs/core_docs/static/svg/langchain_stack_062024.svg) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
2179198
54586