You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
10
Versions
152
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
0.6.1
to
0.6.2
+1
-0
dist/azure/chat_models.cjs

@@ -534,2 +534,3 @@ "use strict";

}
/** @internal */
_getClientOptions(options) {

@@ -536,0 +537,0 @@ if (!this.client) {

+2
-1

@@ -445,3 +445,4 @@ import { type ClientOptions } from "openai";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
/** @internal */
_getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
toJSON(): any;

@@ -448,0 +449,0 @@ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;

@@ -531,2 +531,3 @@ import { AzureOpenAI as AzureOpenAIClient } from "openai";

}
/** @internal */
_getClientOptions(options) {

@@ -533,0 +534,0 @@ if (!this.client) {

@@ -171,2 +171,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

service_tier?: OpenAIClient.Chat.ChatCompletionCreateParams["service_tier"];
protected defaultOptions: CallOptions;
_llmType(): string;

@@ -203,3 +204,5 @@ static lc_name(): string;

protected _getResponseFormat(resFormat?: CallOptions["response_format"]): ResponseFormatText | ResponseFormatJSONObject | ResponseFormatJSONSchema | undefined;
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
protected _combineCallOptions(additionalOptions?: this["ParsedCallOptions"]): this["ParsedCallOptions"];
/** @internal */
_getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
protected _convertChatOpenAIToolToCompletionsTool(tool: ChatOpenAIToolType, fields?: {

@@ -209,2 +212,4 @@ strict?: boolean;

bindTools(tools: ChatOpenAIToolType[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
stream(input: BaseLanguageModelInput, options?: CallOptions): Promise<import("@langchain/core/utils/stream").IterableReadableStream<AIMessageChunk>>;
invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<AIMessageChunk>;
/** @ignore */

@@ -869,2 +874,4 @@ _combineLLMOutput(...llmOutputs: OpenAILLMOutput[]): OpenAILLMOutput;

protected _useResponsesApi(options: this["ParsedCallOptions"] | undefined): boolean;
getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
invocationParams(options?: this["ParsedCallOptions"]): ChatResponsesInvocationParams | ChatCompletionsInvocationParams;
/** @ignore */

@@ -871,0 +878,0 @@ _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;

{
"name": "@langchain/openai",
"version": "0.6.1",
"version": "0.6.2",
"description": "OpenAI integrations for LangChain.js",

@@ -5,0 +5,0 @@ "type": "module",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display