You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
11
Versions
152
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
0.6.3
to
0.6.4
+3
-2
dist/chat_models.d.ts

@@ -274,3 +274,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

_generate(messages: BaseMessage[], options: this["ParsedCallOptions"]): Promise<ChatResult>;
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"]): AsyncGenerator<ChatGenerationChunk>;
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
/**

@@ -876,2 +876,3 @@ * Calls the Responses API with retry logic in case of failures.

export declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatOpenAI<CallOptions> {
protected fields?: ChatOpenAIFields | undefined;
/**

@@ -885,3 +886,3 @@ * Whether to use the responses API for all requests. If `false` the responses API will be used

get lc_serializable_keys(): string[];
constructor(fields?: ChatOpenAIFields);
constructor(fields?: ChatOpenAIFields | undefined);
protected _useResponsesApi(options: this["ParsedCallOptions"] | undefined): boolean;

@@ -888,0 +889,0 @@ getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;

+1
-1
{
"name": "@langchain/openai",
"version": "0.6.3",
"version": "0.6.4",
"description": "OpenAI integrations for LangChain.js",

@@ -5,0 +5,0 @@ "type": "module",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display