Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@langchain/anthropic

Package Overview
Dependencies
Maintainers
10
Versions
66
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/anthropic - npm Package Compare versions

Comparing version 0.2.14 to 0.2.15

53

dist/chat_models.d.ts

@@ -69,8 +69,8 @@ import { Anthropic, type ClientOptions } from "@anthropic-ai/sdk";

anthropicApiUrl?: string;
/** @deprecated Use "model" instead */
modelName?: string;
/** Model name to use */
modelName: string;
/** Model name to use */
model: string;
model?: string;
/** Overridable Anthropic ClientOptions */
clientOptions: ClientOptions;
clientOptions?: ClientOptions;
/** Holds any additional parameters that are valid to pass to {@link

@@ -96,3 +96,3 @@ * https://console.anthropic.com/docs/api/reference |

* Setup:
* Install `@langchain/anthropic` and set environment variable `ANTHROPIC_API_KEY`.
* Install `@langchain/anthropic` and set an environment variable named `ANTHROPIC_API_KEY`.
*

@@ -104,5 +104,5 @@ * ```bash

*
* ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor)
* ## [Constructor args](https://api.js.langchain.com/classes/langchain_anthropic.ChatAnthropic.html#constructor)
*
* ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
* ## [Runtime args](https://api.js.langchain.com/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
*

@@ -154,13 +154,6 @@ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.

* ```typescript
* const messages = [
* {
* type: "system" as const,
* content: "You are a helpful translator. Translate the user sentence to French.",
* },
* {
* type: "human" as const,
* content: "I love programming.",
* },
* ];
* const result = await llm.invoke(messages);
* const input = `Translate "I love programming" into French.`;
*
* // Models also accept a list of chat messages or a formatted prompt
* const result = await llm.invoke(input);
* console.log(result);

@@ -200,3 +193,3 @@ * ```

* ```typescript
* for await (const chunk of await llm.stream(messages)) {
* for await (const chunk of await llm.stream(input)) {
* console.log(chunk);

@@ -267,3 +260,3 @@ * }

*
* const stream = await llm.stream(messages);
* const stream = await llm.stream(input);
* let full: AIMessageChunk | undefined;

@@ -372,3 +365,3 @@ * for await (const chunk of stream) {

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");

@@ -424,3 +417,3 @@ * console.log(jokeResult);

* ```typescript
* const aiMsgForMetadata = await llm.invoke(messages);
* const aiMsgForMetadata = await llm.invoke(input);
* console.log(aiMsgForMetadata.usage_metadata);

@@ -441,3 +434,3 @@ * ```

* const streamForMetadata = await llm.stream(
* messages,
* input,
* {

@@ -465,3 +458,3 @@ * streamUsage: true

* ```typescript
* const aiMsgForResponseMetadata = await llm.invoke(messages);
* const aiMsgForResponseMetadata = await llm.invoke(input);
* console.log(aiMsgForResponseMetadata.response_metadata);

@@ -508,3 +501,3 @@ * ```

streamUsage: boolean;
constructor(fields?: Partial<AnthropicInput> & BaseChatModelParams);
constructor(fields?: AnthropicInput & BaseChatModelParams);
getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;

@@ -526,4 +519,4 @@ /**

_identifyingParams(): {
system?: string | undefined;
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2";
system?: string | Anthropic.Messages.TextBlockParam[] | undefined;
model: Anthropic.Messages.Model;
max_tokens: number;

@@ -544,4 +537,4 @@ tools?: Anthropic.Messages.Tool[] | undefined;

identifyingParams(): {
system?: string | undefined;
model: "claude-2.1" | (string & {}) | "claude-3-opus-20240229" | "claude-3-sonnet-20240229" | "claude-3-haiku-20240307" | "claude-2.0" | "claude-instant-1.2";
system?: string | Anthropic.Messages.TextBlockParam[] | undefined;
model: Anthropic.Messages.Model;
max_tokens: number;

@@ -564,3 +557,3 @@ tools?: Anthropic.Messages.Tool[] | undefined;

id: string;
model: string;
model: Anthropic.Messages.Model;
stop_reason: "tool_use" | "stop_sequence" | "end_turn" | "max_tokens" | null;

@@ -567,0 +560,0 @@ stop_sequence: string | null;

@@ -44,3 +44,3 @@ import { Anthropic } from "@anthropic-ai/sdk";

* Setup:
* Install `@langchain/anthropic` and set environment variable `ANTHROPIC_API_KEY`.
* Install `@langchain/anthropic` and set an environment variable named `ANTHROPIC_API_KEY`.
*

@@ -52,5 +52,5 @@ * ```bash

*
* ## [Constructor args](/classes/langchain_anthropic.ChatAnthropic.html#constructor)
* ## [Constructor args](https://api.js.langchain.com/classes/langchain_anthropic.ChatAnthropic.html#constructor)
*
* ## [Runtime args](/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
* ## [Runtime args](https://api.js.langchain.com/interfaces/langchain_anthropic.ChatAnthropicCallOptions.html)
*

@@ -102,13 +102,6 @@ * Runtime args can be passed as the second argument to any of the base runnable methods `.invoke`. `.stream`, `.batch`, etc.

* ```typescript
* const messages = [
* {
* type: "system" as const,
* content: "You are a helpful translator. Translate the user sentence to French.",
* },
* {
* type: "human" as const,
* content: "I love programming.",
* },
* ];
* const result = await llm.invoke(messages);
* const input = `Translate "I love programming" into French.`;
*
* // Models also accept a list of chat messages or a formatted prompt
* const result = await llm.invoke(input);
* console.log(result);

@@ -148,3 +141,3 @@ * ```

* ```typescript
* for await (const chunk of await llm.stream(messages)) {
* for await (const chunk of await llm.stream(input)) {
* console.log(chunk);

@@ -215,3 +208,3 @@ * }

*
* const stream = await llm.stream(messages);
* const stream = await llm.stream(input);
* let full: AIMessageChunk | undefined;

@@ -320,3 +313,3 @@ * for await (const chunk of stream) {

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");

@@ -372,3 +365,3 @@ * console.log(jokeResult);

* ```typescript
* const aiMsgForMetadata = await llm.invoke(messages);
* const aiMsgForMetadata = await llm.invoke(input);
* console.log(aiMsgForMetadata.usage_metadata);

@@ -389,3 +382,3 @@ * ```

* const streamForMetadata = await llm.stream(
* messages,
* input,
* {

@@ -413,3 +406,3 @@ * streamUsage: true

* ```typescript
* const aiMsgForResponseMetadata = await llm.invoke(messages);
* const aiMsgForResponseMetadata = await llm.invoke(input);
* console.log(aiMsgForResponseMetadata.response_metadata);

@@ -416,0 +409,0 @@ * ```

@@ -6,3 +6,3 @@ /**

import { ToolCall } from "@langchain/core/messages/tool";
import { AnthropicMessageParam, AnthropicToolResponse } from "../types.js";
import { AnthropicMessageCreateParams, AnthropicToolResponse } from "../types.js";
export declare function _convertLangChainToolCallToAnthropic(toolCall: ToolCall): AnthropicToolResponse;

@@ -14,5 +14,2 @@ /**

*/
export declare function _formatMessagesForAnthropic(messages: BaseMessage[]): {
system?: string;
messages: AnthropicMessageParam[];
};
export declare function _formatMessagesForAnthropic(messages: BaseMessage[]): AnthropicMessageCreateParams;

@@ -165,5 +165,2 @@ /**

if (mergedMessages.length > 0 && mergedMessages[0]._getType() === "system") {
if (typeof messages[0].content !== "string") {
throw new Error("System message content must be a string.");
}
system = messages[0].content;

@@ -170,0 +167,0 @@ }

{
"name": "@langchain/anthropic",
"version": "0.2.14",
"version": "0.2.15",
"description": "Anthropic integrations for LangChain.js",

@@ -38,3 +38,3 @@ "type": "module",

"dependencies": {
"@anthropic-ai/sdk": "^0.22.0",
"@anthropic-ai/sdk": "^0.25.2",
"@langchain/core": ">=0.2.21 <0.3.0",

@@ -41,0 +41,0 @@ "fast-xml-parser": "^4.4.1",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc