@langchain/anthropic
Advanced tools
Comparing version 0.2.1 to 0.2.2
@@ -7,3 +7,3 @@ import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk"; | ||
import { BaseChatModel, LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models"; | ||
import { StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, BaseLanguageModelInput } from "@langchain/core/language_models/base"; | ||
import { type StructuredOutputMethodOptions, type BaseLanguageModelCallOptions, type BaseLanguageModelInput, type ToolDefinition } from "@langchain/core/language_models/base"; | ||
import { StructuredToolInterface } from "@langchain/core/tools"; | ||
@@ -13,11 +13,4 @@ import { Runnable } from "@langchain/core/runnables"; | ||
import { z } from "zod"; | ||
import type { Tool as AnthropicTool } from "@anthropic-ai/sdk/resources/index.mjs"; | ||
import { AnthropicToolResponse } from "./types.js"; | ||
type AnthropicTool = { | ||
name: string; | ||
description: string; | ||
/** | ||
* JSON schema. | ||
*/ | ||
input_schema: Record<string, unknown>; | ||
}; | ||
type AnthropicMessageCreateParams = Anthropic.MessageCreateParamsNonStreaming; | ||
@@ -31,4 +24,4 @@ type AnthropicStreamingMessageCreateParams = Anthropic.MessageCreateParamsStreaming; | ||
} | "any" | "auto"; | ||
export interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions { | ||
tools?: (StructuredToolInterface | AnthropicTool)[]; | ||
export interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions, Pick<AnthropicInput, "streamUsage"> { | ||
tools?: (StructuredToolInterface | AnthropicTool | Record<string, unknown> | ToolDefinition)[]; | ||
/** | ||
@@ -95,2 +88,7 @@ * Whether or not to specify what tool the model should use | ||
invocationKwargs?: Kwargs; | ||
/** | ||
* Whether or not to include token usage data in streamed chunks. | ||
* @default true | ||
*/ | ||
streamUsage?: boolean; | ||
} | ||
@@ -148,2 +146,3 @@ /** | ||
protected streamingClient: Anthropic; | ||
streamUsage: boolean; | ||
constructor(fields?: Partial<AnthropicInput> & BaseChatModelParams); | ||
@@ -159,3 +158,3 @@ getLsParams(options: this["ParsedCallOptions"]): LangSmithParams; | ||
formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined; | ||
bindTools(tools: (AnthropicTool | StructuredToolInterface)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>; | ||
bindTools(tools: (AnthropicTool | Record<string, unknown> | StructuredToolInterface | ToolDefinition)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>; | ||
/** | ||
@@ -167,9 +166,11 @@ * Get the parameters used to invoke the model | ||
_identifyingParams(): { | ||
tools?: Anthropic.Messages.Tool[] | undefined; | ||
tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined; | ||
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined; | ||
temperature?: number | undefined; | ||
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2"; | ||
system?: string | undefined; | ||
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined; | ||
stream?: boolean | undefined; | ||
max_tokens: number; | ||
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2"; | ||
stop_sequences?: string[] | undefined; | ||
temperature?: number | undefined; | ||
top_k?: number | undefined; | ||
@@ -183,9 +184,11 @@ top_p?: number | undefined; | ||
identifyingParams(): { | ||
tools?: Anthropic.Messages.Tool[] | undefined; | ||
tool_choice?: Anthropic.Messages.MessageCreateParams.ToolChoiceAuto | Anthropic.Messages.MessageCreateParams.ToolChoiceAny | Anthropic.Messages.MessageCreateParams.ToolChoiceTool | undefined; | ||
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined; | ||
temperature?: number | undefined; | ||
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2"; | ||
system?: string | undefined; | ||
metadata?: Anthropic.Messages.MessageCreateParams.Metadata | undefined; | ||
stream?: boolean | undefined; | ||
max_tokens: number; | ||
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2"; | ||
stop_sequences?: string[] | undefined; | ||
temperature?: number | undefined; | ||
top_k?: number | undefined; | ||
@@ -202,3 +205,3 @@ top_p?: number | undefined; | ||
model: string; | ||
stop_reason: "max_tokens" | "stop_sequence" | "end_turn" | null; | ||
stop_reason: "tool_use" | "max_tokens" | "stop_sequence" | "end_turn" | null; | ||
stop_sequence: string | null; | ||
@@ -205,0 +208,0 @@ usage: Anthropic.Messages.Usage; |
@@ -6,2 +6,3 @@ import { Anthropic } from "@anthropic-ai/sdk"; | ||
import { BaseChatModel, } from "@langchain/core/language_models/chat_models"; | ||
import { isOpenAITool, } from "@langchain/core/language_models/base"; | ||
import { zodToJsonSchema } from "zod-to-json-schema"; | ||
@@ -380,2 +381,8 @@ import { RunnablePassthrough, RunnableSequence, } from "@langchain/core/runnables"; | ||
}); | ||
Object.defineProperty(this, "streamUsage", { | ||
enumerable: true, | ||
configurable: true, | ||
writable: true, | ||
value: true | ||
}); | ||
this.anthropicApiKey = | ||
@@ -404,2 +411,3 @@ fields?.apiKey ?? | ||
this.clientOptions = fields?.clientOptions ?? {}; | ||
this.streamUsage = fields?.streamUsage ?? this.streamUsage; | ||
} | ||
@@ -409,3 +417,3 @@ getLsParams(options) { | ||
return { | ||
ls_provider: "openai", | ||
ls_provider: "anthropic", | ||
ls_model_name: this.model, | ||
@@ -435,4 +443,13 @@ ls_model_type: "chat", | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
if (tools.every((tool) => isOpenAITool(tool))) { | ||
// Formatted as OpenAI tool, convert to Anthropic tool | ||
return tools.map((tc) => ({ | ||
name: tc.function.name, | ||
description: tc.function.description, | ||
input_schema: tc.function.parameters, | ||
})); | ||
} | ||
// eslint-disable-next-line @typescript-eslint/no-explicit-any | ||
if (tools.some((tool) => isAnthropicTool(tool))) { | ||
throw new Error(`Can not pass in a mix of AnthropicTools and StructuredTools`); | ||
throw new Error(`Can not pass in a mix of tool schemas to ChatAnthropic`); | ||
} | ||
@@ -546,2 +563,10 @@ return tools.map((tool) => ({ | ||
usageData = usage; | ||
let usageMetadata; | ||
if (this.streamUsage || options.streamUsage) { | ||
usageMetadata = { | ||
input_tokens: usage.input_tokens, | ||
output_tokens: usage.output_tokens, | ||
total_tokens: usage.input_tokens + usage.output_tokens, | ||
}; | ||
} | ||
yield new ChatGenerationChunk({ | ||
@@ -551,2 +576,3 @@ message: new AIMessageChunk({ | ||
additional_kwargs: filteredAdditionalKwargs, | ||
usage_metadata: usageMetadata, | ||
}), | ||
@@ -557,2 +583,10 @@ text: "", | ||
else if (data.type === "message_delta") { | ||
let usageMetadata; | ||
if (this.streamUsage || options.streamUsage) { | ||
usageMetadata = { | ||
input_tokens: data.usage.output_tokens, | ||
output_tokens: 0, | ||
total_tokens: data.usage.output_tokens, | ||
}; | ||
} | ||
yield new ChatGenerationChunk({ | ||
@@ -562,2 +596,3 @@ message: new AIMessageChunk({ | ||
additional_kwargs: { ...data.delta }, | ||
usage_metadata: usageMetadata, | ||
}), | ||
@@ -570,3 +605,4 @@ text: "", | ||
} | ||
else if (data.type === "content_block_delta") { | ||
else if (data.type === "content_block_delta" && | ||
data.delta.type === "text_delta") { | ||
const content = data.delta?.text; | ||
@@ -585,2 +621,10 @@ if (content !== undefined) { | ||
} | ||
let usageMetadata; | ||
if (this.streamUsage || options.streamUsage) { | ||
usageMetadata = { | ||
input_tokens: usageData.input_tokens, | ||
output_tokens: usageData.output_tokens, | ||
total_tokens: usageData.input_tokens + usageData.output_tokens, | ||
}; | ||
} | ||
yield new ChatGenerationChunk({ | ||
@@ -590,2 +634,3 @@ message: new AIMessageChunk({ | ||
additional_kwargs: { usage: usageData }, | ||
usage_metadata: usageMetadata, | ||
}), | ||
@@ -592,0 +637,0 @@ text: "", |
{ | ||
"name": "@langchain/anthropic", | ||
"version": "0.2.1", | ||
"version": "0.2.2", | ||
"description": "Anthropic integrations for LangChain.js", | ||
@@ -38,4 +38,4 @@ "type": "module", | ||
"dependencies": { | ||
"@anthropic-ai/sdk": "^0.21.0", | ||
"@langchain/core": ">=0.2.5 <0.3.0", | ||
"@anthropic-ai/sdk": "^0.22.0", | ||
"@langchain/core": ">=0.2.9 <0.3.0", | ||
"fast-xml-parser": "^4.3.5", | ||
@@ -49,3 +49,3 @@ "zod": "^3.22.4", | ||
"@langchain/scripts": "~0.0.14", | ||
"@langchain/standard-tests": "workspace:*", | ||
"@langchain/standard-tests": "0.0.0", | ||
"@swc/core": "^1.3.90", | ||
@@ -66,2 +66,3 @@ "@swc/jest": "^0.2.29", | ||
"rimraf": "^5.0.1", | ||
"ts-jest": "^29.1.0", | ||
"typescript": "~5.1.6" | ||
@@ -68,0 +69,0 @@ }, |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
146332
3447
21
+ Added@anthropic-ai/sdk@0.22.0(transitive)
- Removed@anthropic-ai/sdk@0.21.1(transitive)
Updated@anthropic-ai/sdk@^0.22.0