Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@langchain/anthropic

Package Overview
Dependencies
Maintainers
8
Versions
66
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/anthropic - npm Package Compare versions

Comparing version 0.2.3 to 0.2.4

12

dist/chat_models.d.ts

@@ -9,3 +9,3 @@ import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";

import { StructuredToolInterface } from "@langchain/core/tools";
import { Runnable } from "@langchain/core/runnables";
import { Runnable, RunnableToolLike } from "@langchain/core/runnables";
import { ToolCall } from "@langchain/core/messages/tool";

@@ -24,3 +24,3 @@ import { z } from "zod";

export interface ChatAnthropicCallOptions extends BaseLanguageModelCallOptions, Pick<AnthropicInput, "streamUsage"> {
tools?: (StructuredToolInterface | AnthropicTool | Record<string, unknown> | ToolDefinition)[];
tools?: (StructuredToolInterface | AnthropicTool | Record<string, unknown> | ToolDefinition | RunnableToolLike)[];
/**

@@ -155,3 +155,3 @@ * Whether or not to specify what tool the model should use

formatStructuredToolToAnthropic(tools: ChatAnthropicCallOptions["tools"]): AnthropicTool[] | undefined;
bindTools(tools: (AnthropicTool | Record<string, unknown> | StructuredToolInterface | ToolDefinition)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
bindTools(tools: (AnthropicTool | Record<string, unknown> | StructuredToolInterface | ToolDefinition | RunnableToolLike)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
/**

@@ -169,4 +169,4 @@ * Get the parameters used to invoke the model

system?: string | undefined;
max_tokens: number;
stream?: boolean | undefined;
max_tokens: number;
stop_sequences?: string[] | undefined;

@@ -187,4 +187,4 @@ top_k?: number | undefined;

system?: string | undefined;
max_tokens: number;
stream?: boolean | undefined;
max_tokens: number;
stop_sequences?: string[] | undefined;

@@ -202,3 +202,3 @@ top_k?: number | undefined;

model: string;
stop_reason: "tool_use" | "max_tokens" | "stop_sequence" | "end_turn" | null;
stop_reason: "tool_use" | "stop_sequence" | "end_turn" | "max_tokens" | null;
stop_sequence: string | null;

@@ -205,0 +205,0 @@ usage: Anthropic.Messages.Usage;

@@ -10,3 +10,7 @@ import { Anthropic } from "@anthropic-ai/sdk";

import { isZodSchema } from "@langchain/core/utils/types";
import { concat } from "@langchain/core/utils/stream";
import { AnthropicToolsOutputParser, extractToolCalls, } from "./output_parsers.js";
function _toolsInParams(params) {
return !!(params.tools && params.tools.length > 0);
}
function _formatImage(imageUrl) {

@@ -45,2 +49,3 @@ const regex = /^data:(image\/.+);base64,(.+)$/;

usage_metadata: usageMetadata,
response_metadata: additionalKwargs,
}),

@@ -61,2 +66,3 @@ },

usage_metadata: usageMetadata,
response_metadata: additionalKwargs,
}),

@@ -72,2 +78,111 @@ },

}
function _makeMessageChunkFromAnthropicEvent(data, fields) {
let usageDataCopy = { ...fields.usageData };
if (data.type === "message_start") {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { content, usage, ...additionalKwargs } = data.message;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const filteredAdditionalKwargs = {};
for (const [key, value] of Object.entries(additionalKwargs)) {
if (value !== undefined && value !== null) {
filteredAdditionalKwargs[key] = value;
}
}
usageDataCopy = usage;
let usageMetadata;
if (fields.streamUsage) {
usageMetadata = {
input_tokens: usage.input_tokens,
output_tokens: usage.output_tokens,
total_tokens: usage.input_tokens + usage.output_tokens,
};
}
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString ? "" : [],
additional_kwargs: filteredAdditionalKwargs,
usage_metadata: usageMetadata,
}),
usageData: usageDataCopy,
};
}
else if (data.type === "message_delta") {
let usageMetadata;
if (fields.streamUsage) {
usageMetadata = {
input_tokens: data.usage.output_tokens,
output_tokens: 0,
total_tokens: data.usage.output_tokens,
};
}
if (data?.usage !== undefined) {
usageDataCopy.output_tokens += data.usage.output_tokens;
}
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString ? "" : [],
additional_kwargs: { ...data.delta },
usage_metadata: usageMetadata,
}),
usageData: usageDataCopy,
};
}
else if (data.type === "content_block_start" &&
data.content_block.type === "tool_use") {
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString
? ""
: [
{
index: data.index,
...data.content_block,
input: "",
},
],
additional_kwargs: {},
}),
usageData: usageDataCopy,
};
}
else if (data.type === "content_block_delta" &&
data.delta.type === "text_delta") {
const content = data.delta?.text;
if (content !== undefined) {
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString
? content
: [
{
index: data.index,
...data.delta,
},
],
additional_kwargs: {},
}),
usageData: usageDataCopy,
};
}
}
else if (data.type === "content_block_delta" &&
data.delta.type === "input_json_delta") {
return {
chunk: new AIMessageChunk({
content: fields.coerceContentToString
? ""
: [
{
index: data.index,
input: data.delta.partial_json,
type: data.delta.type,
},
],
additional_kwargs: {},
}),
usageData: usageDataCopy,
};
}
return null;
}
function _mergeMessages(messages) {

@@ -246,2 +361,86 @@ // Merge runs of human/tool messages into single human messages with content blocks.

}
function extractToolCallChunk(chunk) {
let newToolCallChunk;
// Initial chunk for tool calls from anthropic contains identifying information like ID and name.
// This chunk does not contain any input JSON.
const toolUseChunks = Array.isArray(chunk.content)
? chunk.content.find((c) => c.type === "tool_use")
: undefined;
if (toolUseChunks &&
"index" in toolUseChunks &&
"name" in toolUseChunks &&
"id" in toolUseChunks) {
newToolCallChunk = {
args: "",
id: toolUseChunks.id,
name: toolUseChunks.name,
index: toolUseChunks.index,
type: "tool_call_chunk",
};
}
// Chunks after the initial chunk only contain the index and partial JSON.
const inputJsonDeltaChunks = Array.isArray(chunk.content)
? chunk.content.find((c) => c.type === "input_json_delta")
: undefined;
if (inputJsonDeltaChunks &&
"index" in inputJsonDeltaChunks &&
"input" in inputJsonDeltaChunks) {
if (typeof inputJsonDeltaChunks.input === "string") {
newToolCallChunk = {
args: inputJsonDeltaChunks.input,
index: inputJsonDeltaChunks.index,
type: "tool_call_chunk",
};
}
else {
newToolCallChunk = {
args: JSON.stringify(inputJsonDeltaChunks.input, null, 2),
index: inputJsonDeltaChunks.index,
type: "tool_call_chunk",
};
}
}
return newToolCallChunk;
}
function extractToken(chunk) {
return typeof chunk.content === "string" && chunk.content !== ""
? chunk.content
: undefined;
}
function extractToolUseContent(chunk, concatenatedChunks) {
let newConcatenatedChunks = concatenatedChunks;
// Remove `tool_use` content types until the last chunk.
let toolUseContent;
if (!newConcatenatedChunks) {
newConcatenatedChunks = chunk;
}
else {
newConcatenatedChunks = concat(newConcatenatedChunks, chunk);
}
if (Array.isArray(newConcatenatedChunks.content) &&
newConcatenatedChunks.content.find((c) => c.type === "tool_use")) {
try {
const toolUseMsg = newConcatenatedChunks.content.find((c) => c.type === "tool_use");
if (!toolUseMsg ||
!("input" in toolUseMsg || "name" in toolUseMsg || "id" in toolUseMsg))
return;
const parsedArgs = JSON.parse(toolUseMsg.input);
if (parsedArgs) {
toolUseContent = {
type: "tool_use",
id: toolUseMsg.id,
name: toolUseMsg.name,
input: parsedArgs,
};
}
}
catch (_) {
// no-op
}
}
return {
toolUseContent,
concatenatedChunks: newConcatenatedChunks,
};
}
/**

@@ -520,115 +719,78 @@ * Wrapper around Anthropic large language models.

const formattedMessages = _formatMessagesForAnthropic(messages);
if (options.tools !== undefined && options.tools.length > 0) {
const { generations } = await this._generateNonStreaming(messages, params, {
signal: options.signal,
const coerceContentToString = !_toolsInParams({
...params,
...formattedMessages,
stream: false,
});
const stream = await this.createStreamWithRetry({
...params,
...formattedMessages,
stream: true,
});
let usageData = { input_tokens: 0, output_tokens: 0 };
let concatenatedChunks;
for await (const data of stream) {
if (options.signal?.aborted) {
stream.controller.abort();
throw new Error("AbortError: User aborted the request.");
}
const result = _makeMessageChunkFromAnthropicEvent(data, {
streamUsage: !!(this.streamUsage || options.streamUsage),
coerceContentToString,
usageData,
});
const result = generations[0].message;
const toolCallChunks = result.tool_calls?.map((toolCall, index) => ({
name: toolCall.name,
args: JSON.stringify(toolCall.args),
id: toolCall.id,
index,
}));
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content: result.content,
additional_kwargs: result.additional_kwargs,
tool_call_chunks: toolCallChunks,
}),
text: generations[0].text,
});
}
else {
const stream = await this.createStreamWithRetry({
...params,
...formattedMessages,
stream: true,
});
let usageData = { input_tokens: 0, output_tokens: 0 };
for await (const data of stream) {
if (options.signal?.aborted) {
stream.controller.abort();
throw new Error("AbortError: User aborted the request.");
}
if (data.type === "message_start") {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { content, usage, ...additionalKwargs } = data.message;
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const filteredAdditionalKwargs = {};
for (const [key, value] of Object.entries(additionalKwargs)) {
if (value !== undefined && value !== null) {
filteredAdditionalKwargs[key] = value;
}
}
usageData = usage;
let usageMetadata;
if (this.streamUsage || options.streamUsage) {
usageMetadata = {
input_tokens: usage.input_tokens,
output_tokens: usage.output_tokens,
total_tokens: usage.input_tokens + usage.output_tokens,
};
}
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content: "",
additional_kwargs: filteredAdditionalKwargs,
usage_metadata: usageMetadata,
}),
text: "",
});
}
else if (data.type === "message_delta") {
let usageMetadata;
if (this.streamUsage || options.streamUsage) {
usageMetadata = {
input_tokens: data.usage.output_tokens,
output_tokens: 0,
total_tokens: data.usage.output_tokens,
};
}
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content: "",
additional_kwargs: { ...data.delta },
usage_metadata: usageMetadata,
}),
text: "",
});
if (data?.usage !== undefined) {
usageData.output_tokens += data.usage.output_tokens;
}
}
else if (data.type === "content_block_delta" &&
data.delta.type === "text_delta") {
const content = data.delta?.text;
if (content !== undefined) {
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content,
additional_kwargs: {},
}),
text: content,
});
await runManager?.handleLLMNewToken(content);
}
}
if (!result)
continue;
const { chunk, usageData: updatedUsageData } = result;
usageData = updatedUsageData;
const newToolCallChunk = extractToolCallChunk(chunk);
// Maintain concatenatedChunks for accessing the complete `tool_use` content block.
concatenatedChunks = concatenatedChunks
? concat(concatenatedChunks, chunk)
: chunk;
let toolUseContent;
const extractedContent = extractToolUseContent(chunk, concatenatedChunks);
if (extractedContent) {
toolUseContent = extractedContent.toolUseContent;
concatenatedChunks = extractedContent.concatenatedChunks;
}
let usageMetadata;
if (this.streamUsage || options.streamUsage) {
usageMetadata = {
input_tokens: usageData.input_tokens,
output_tokens: usageData.output_tokens,
total_tokens: usageData.input_tokens + usageData.output_tokens,
};
// Filter partial `tool_use` content, and only add `tool_use` chunks if complete JSON available.
const chunkContent = Array.isArray(chunk.content)
? chunk.content.filter((c) => c.type !== "tool_use")
: chunk.content;
if (Array.isArray(chunkContent) && toolUseContent) {
chunkContent.push(toolUseContent);
}
// Extract the text content token for text field and runManager.
const token = extractToken(chunk);
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content: "",
additional_kwargs: { usage: usageData },
usage_metadata: usageMetadata,
content: chunkContent,
additional_kwargs: chunk.additional_kwargs,
tool_call_chunks: newToolCallChunk ? [newToolCallChunk] : undefined,
usage_metadata: chunk.usage_metadata,
response_metadata: chunk.response_metadata,
}),
text: "",
text: token ?? "",
});
if (token) {
await runManager?.handleLLMNewToken(token);
}
}
let usageMetadata;
if (this.streamUsage || options.streamUsage) {
usageMetadata = {
input_tokens: usageData.input_tokens,
output_tokens: usageData.output_tokens,
total_tokens: usageData.input_tokens + usageData.output_tokens,
};
}
yield new ChatGenerationChunk({
message: new AIMessageChunk({
content: coerceContentToString ? "" : [],
additional_kwargs: { usage: usageData },
usage_metadata: usageMetadata,
}),
text: "",
});
}

@@ -635,0 +797,0 @@ /** @ignore */

@@ -78,3 +78,8 @@ import { BaseLLMOutputParser, OutputParserException, } from "@langchain/core/output_parsers";

if (block.type === "tool_use") {
toolCalls.push({ name: block.name, args: block.input, id: block.id });
toolCalls.push({
name: block.name,
args: block.input,
id: block.id,
type: "tool_call",
});
}

@@ -81,0 +86,0 @@ }

{
"name": "@langchain/anthropic",
"version": "0.2.3",
"version": "0.2.4",
"description": "Anthropic integrations for LangChain.js",

@@ -39,3 +39,3 @@ "type": "module",

"@anthropic-ai/sdk": "^0.22.0",
"@langchain/core": ">=0.2.9 <0.3.0",
"@langchain/core": ">=0.2.16 <0.3.0",
"fast-xml-parser": "^4.3.5",

@@ -42,0 +42,0 @@ "zod": "^3.22.4",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc