Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
12
Versions
150
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
1.2.8
to
1.2.9
+9
-0
CHANGELOG.md
# @langchain/openai
## 1.2.9
### Patch Changes
- [#10080](https://github.com/langchain-ai/langchainjs/pull/10080) [`b583729`](https://github.com/langchain-ai/langchainjs/commit/b583729e99cf0c035630f6b311c4d069a1980cca) Thanks [@hntrl](https://github.com/hntrl)! - Add string-model constructor overloads for chat models (with supporting tests where applicable).
- Updated dependencies [[`fb2226e`](https://github.com/langchain-ai/langchainjs/commit/fb2226e6decdaba21e78b3f01877b45fa1eed6d3)]:
- @langchain/core@1.1.27
## 1.2.8

@@ -4,0 +13,0 @@

@@ -28,2 +28,12 @@ const require_azure = require('../../utils/azure.cjs');

];
function getAzureChatOpenAIParams(modelOrFields, fieldsArg) {
if (typeof modelOrFields === "string") return {
model: modelOrFields,
deploymentName: modelOrFields,
azureOpenAIApiDeploymentName: modelOrFields,
...fieldsArg ?? {}
};
if (modelOrFields == null) return fieldsArg;
return modelOrFields;
}
function _constructAzureFields(fields) {

@@ -125,2 +135,3 @@ this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? (typeof fields?.openAIApiKey === "string" ? fields?.openAIApiKey : void 0) ?? (typeof fields?.apiKey === "string" ? fields?.apiKey : void 0) ?? (0, _langchain_core_utils_env.getEnvironmentVariable)("AZURE_OPENAI_API_KEY");

exports._serializeAzureChat = _serializeAzureChat;
exports.getAzureChatOpenAIParams = getAzureChatOpenAIParams;
//# sourceMappingURL=common.cjs.map
+11
-1

@@ -28,2 +28,12 @@ import { getEndpoint, getHeadersWithUserAgent } from "../../utils/azure.js";

];
function getAzureChatOpenAIParams(modelOrFields, fieldsArg) {
if (typeof modelOrFields === "string") return {
model: modelOrFields,
deploymentName: modelOrFields,
azureOpenAIApiDeploymentName: modelOrFields,
...fieldsArg ?? {}
};
if (modelOrFields == null) return fieldsArg;
return modelOrFields;
}
function _constructAzureFields(fields) {

@@ -119,3 +129,3 @@ this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? (typeof fields?.openAIApiKey === "string" ? fields?.openAIApiKey : void 0) ?? (typeof fields?.apiKey === "string" ? fields?.apiKey : void 0) ?? getEnvironmentVariable("AZURE_OPENAI_API_KEY");

//#endregion
export { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat };
export { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat, getAzureChatOpenAIParams };
//# sourceMappingURL=common.js.map
+2
-1

@@ -36,3 +36,4 @@ const require_completions = require('../../chat_models/completions.cjs');

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = require_common.getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super(fields);

@@ -39,0 +40,0 @@ require_common._constructAzureFields.call(this, fields);

@@ -23,2 +23,3 @@ import { AzureOpenAIChatInput, OpenAICoreRequestOptions } from "../../types.cjs";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -25,0 +26,0 @@ _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;

@@ -23,2 +23,3 @@ import { AzureOpenAIChatInput, OpenAICoreRequestOptions } from "../../types.js";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -25,0 +26,0 @@ _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;

import { ChatOpenAICompletions } from "../../chat_models/completions.js";
import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat } from "./common.js";
import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat, getAzureChatOpenAIParams } from "./common.js";

@@ -36,3 +36,4 @@ //#region src/azure/chat_models/completions.ts

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super(fields);

@@ -39,0 +40,0 @@ _constructAzureFields.call(this, fields);

@@ -450,3 +450,4 @@ const require_common = require('./common.cjs');

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = require_common.getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super({

@@ -453,0 +454,0 @@ ...fields,

@@ -436,2 +436,3 @@ import { AzureOpenAIChatInput } from "../../types.cjs";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -438,0 +439,0 @@ /** @internal */

@@ -436,2 +436,3 @@ import { AzureOpenAIChatInput } from "../../types.js";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -438,0 +439,0 @@ /** @internal */

@@ -1,2 +0,2 @@

import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _serializeAzureChat } from "./common.js";
import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _serializeAzureChat, getAzureChatOpenAIParams } from "./common.js";
import { AzureChatOpenAICompletions } from "./completions.js";

@@ -450,3 +450,4 @@ import { AzureChatOpenAIResponses } from "./responses.js";

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super({

@@ -453,0 +454,0 @@ ...fields,

@@ -36,3 +36,4 @@ const require_common = require('./common.cjs');

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = require_common.getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super(fields);

@@ -39,0 +40,0 @@ require_common._constructAzureFields.call(this, fields);

@@ -23,2 +23,3 @@ import { AzureOpenAIChatInput, OpenAICoreRequestOptions } from "../../types.cjs";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -25,0 +26,0 @@ _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;

@@ -23,2 +23,3 @@ import { AzureOpenAIChatInput, OpenAICoreRequestOptions } from "../../types.js";

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(deploymentName: string, fields?: Omit<AzureChatOpenAIFields, "deploymentName" | "azureOpenAIApiDeploymentName" | "model">);
constructor(fields?: AzureChatOpenAIFields);

@@ -25,0 +26,0 @@ _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;

@@ -1,2 +0,2 @@

import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat } from "./common.js";
import { AZURE_ALIASES, AZURE_SECRETS, AZURE_SERIALIZABLE_KEYS, _constructAzureFields, _getAzureClientOptions, _serializeAzureChat, getAzureChatOpenAIParams } from "./common.js";
import { ChatOpenAIResponses } from "../../chat_models/responses.js";

@@ -36,3 +36,4 @@

}
constructor(fields) {
constructor(deploymentOrFields, fieldsArg) {
const fields = getAzureChatOpenAIParams(deploymentOrFields, fieldsArg);
super(fields);

@@ -39,0 +40,0 @@ _constructAzureFields.call(this, fields);

@@ -18,2 +18,10 @@ const require_client = require('../utils/client.cjs');

//#region src/chat_models/base.ts
function getChatOpenAIModelParams(modelOrParams, paramsArg) {
if (typeof modelOrParams === "string") return {
model: modelOrParams,
...paramsArg ?? {}
};
if (modelOrParams == null) return paramsArg;
return modelOrParams;
}
/** @internal */

@@ -619,2 +627,3 @@ var BaseChatOpenAI = class extends _langchain_core_language_models_chat_models.BaseChatModel {

exports.BaseChatOpenAI = BaseChatOpenAI;
exports.getChatOpenAIModelParams = getChatOpenAIModelParams;
//# sourceMappingURL=base.cjs.map

@@ -18,2 +18,10 @@ import { wrapOpenAIClientError } from "../utils/client.js";

//#region src/chat_models/base.ts
function getChatOpenAIModelParams(modelOrParams, paramsArg) {
if (typeof modelOrParams === "string") return {
model: modelOrParams,
...paramsArg ?? {}
};
if (modelOrParams == null) return paramsArg;
return modelOrParams;
}
/** @internal */

@@ -618,3 +626,3 @@ var BaseChatOpenAI = class extends BaseChatModel {

//#endregion
export { BaseChatOpenAI };
export { BaseChatOpenAI, getChatOpenAIModelParams };
//# sourceMappingURL=base.js.map

@@ -15,2 +15,5 @@ const require_client = require('../utils/client.cjs');

var ChatOpenAICompletions = class extends require_base.BaseChatOpenAI {
constructor(modelOrFields, fieldsArg) {
super(require_base.getChatOpenAIModelParams(modelOrFields, fieldsArg));
}
/** @internal */

@@ -17,0 +20,0 @@ invocationParams(options, extra) {

@@ -1,2 +0,2 @@

import { BaseChatOpenAI, BaseChatOpenAICallOptions } from "./base.cjs";
import { BaseChatOpenAI, BaseChatOpenAICallOptions, BaseChatOpenAIFields } from "./base.cjs";
import { OpenAI as OpenAI$1 } from "openai";

@@ -15,2 +15,4 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";

declare class ChatOpenAICompletions<CallOptions extends ChatOpenAICompletionsCallOptions = ChatOpenAICompletionsCallOptions> extends BaseChatOpenAI<CallOptions> {
constructor(model: string, fields?: Omit<BaseChatOpenAIFields, "model">);
constructor(fields?: BaseChatOpenAIFields);
/** @internal */

@@ -17,0 +19,0 @@ invocationParams(options?: this["ParsedCallOptions"], extra?: {

@@ -1,2 +0,2 @@

import { BaseChatOpenAI, BaseChatOpenAICallOptions } from "./base.js";
import { BaseChatOpenAI, BaseChatOpenAICallOptions, BaseChatOpenAIFields } from "./base.js";
import { OpenAI as OpenAI$1 } from "openai";

@@ -15,2 +15,4 @@ import { BaseMessage, BaseMessageChunk } from "@langchain/core/messages";

declare class ChatOpenAICompletions<CallOptions extends ChatOpenAICompletionsCallOptions = ChatOpenAICompletionsCallOptions> extends BaseChatOpenAI<CallOptions> {
constructor(model: string, fields?: Omit<BaseChatOpenAIFields, "model">);
constructor(fields?: BaseChatOpenAIFields);
/** @internal */

@@ -17,0 +19,0 @@ invocationParams(options?: this["ParsedCallOptions"], extra?: {

import { wrapOpenAIClientError } from "../utils/client.js";
import { formatToOpenAIToolChoice } from "../utils/tools.js";
import { isReasoningModel } from "../utils/misc.js";
import { BaseChatOpenAI } from "./base.js";
import { BaseChatOpenAI, getChatOpenAIModelParams } from "./base.js";
import { convertCompletionsDeltaToBaseMessageChunk, convertCompletionsMessageToBaseMessage, convertMessagesToCompletionsMessageParams } from "../converters/completions.js";

@@ -15,2 +15,5 @@ import { AIMessage, AIMessageChunk, isAIMessage } from "@langchain/core/messages";

var ChatOpenAICompletions = class extends BaseChatOpenAI {
constructor(modelOrFields, fieldsArg) {
super(getChatOpenAIModelParams(modelOrFields, fieldsArg));
}
/** @internal */

@@ -17,0 +20,0 @@ invocationParams(options, extra) {

@@ -562,3 +562,5 @@ const require_tools = require('../utils/tools.cjs');

}
constructor(fields) {
fields;
constructor(modelOrFields, fieldsArg) {
const fields = require_base.getChatOpenAIModelParams(modelOrFields, fieldsArg);
super(fields);

@@ -565,0 +567,0 @@ this.fields = fields;

@@ -573,3 +573,2 @@ import { OpenAICallOptions, OpenAIChatInput } from "../types.cjs";

declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatOpenAI<CallOptions> {
protected fields?: ChatOpenAIFields | undefined;
/**

@@ -584,3 +583,5 @@ * Whether to use the responses API for all requests. If `false` the responses API will be used

get callKeys(): string[];
constructor(fields?: ChatOpenAIFields | undefined);
protected fields?: ChatOpenAIFields;
constructor(model: string, fields?: Omit<ChatOpenAIFields, "model">);
constructor(fields?: ChatOpenAIFields);
protected _useResponsesApi(options: this["ParsedCallOptions"] | undefined): boolean;

@@ -587,0 +588,0 @@ getLsParams(options: this["ParsedCallOptions"]): _langchain_core_language_models_chat_models0.LangSmithParams;

@@ -573,3 +573,2 @@ import { OpenAICallOptions, OpenAIChatInput } from "../types.js";

declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatOpenAI<CallOptions> {
protected fields?: ChatOpenAIFields | undefined;
/**

@@ -584,3 +583,5 @@ * Whether to use the responses API for all requests. If `false` the responses API will be used

get callKeys(): string[];
constructor(fields?: ChatOpenAIFields | undefined);
protected fields?: ChatOpenAIFields;
constructor(model: string, fields?: Omit<ChatOpenAIFields, "model">);
constructor(fields?: ChatOpenAIFields);
protected _useResponsesApi(options: this["ParsedCallOptions"] | undefined): boolean;

@@ -587,0 +588,0 @@ getLsParams(options: this["ParsedCallOptions"]): _langchain_core_language_models_chat_models0.LangSmithParams;

import { isBuiltInTool, isCustomTool, isOpenAICustomTool } from "../utils/tools.js";
import { _modelPrefersResponsesAPI } from "../utils/misc.js";
import { BaseChatOpenAI } from "./base.js";
import { BaseChatOpenAI, getChatOpenAIModelParams } from "./base.js";
import { ChatOpenAICompletions } from "./completions.js";

@@ -562,3 +562,5 @@ import { ChatOpenAIResponses } from "./responses.js";

}
constructor(fields) {
fields;
constructor(modelOrFields, fieldsArg) {
const fields = getChatOpenAIModelParams(modelOrFields, fieldsArg);
super(fields);

@@ -565,0 +567,0 @@ this.fields = fields;

@@ -16,2 +16,5 @@ const require_client = require('../utils/client.cjs');

var ChatOpenAIResponses = class extends require_base.BaseChatOpenAI {
constructor(modelOrFields, fieldsArg) {
super(require_base.getChatOpenAIModelParams(modelOrFields, fieldsArg));
}
invocationParams(options) {

@@ -18,0 +21,0 @@ let strict;

import { ChatOpenAIToolType, ResponsesTool } from "../utils/tools.cjs";
import { OpenAIVerbosityParam } from "../types.cjs";
import { BaseChatOpenAI, BaseChatOpenAICallOptions } from "./base.cjs";
import { BaseChatOpenAI, BaseChatOpenAICallOptions, BaseChatOpenAIFields } from "./base.cjs";
import { OpenAI as OpenAI$1 } from "openai";

@@ -43,2 +43,4 @@ import { CallbackManagerForLLMRun } from "@langchain/core/callbacks/manager";

declare class ChatOpenAIResponses<CallOptions extends ChatOpenAIResponsesCallOptions = ChatOpenAIResponsesCallOptions> extends BaseChatOpenAI<CallOptions> {
constructor(model: string, fields?: Omit<BaseChatOpenAIFields, "model">);
constructor(fields?: BaseChatOpenAIFields);
invocationParams(options?: this["ParsedCallOptions"]): ChatResponsesInvocationParams;

@@ -45,0 +47,0 @@ _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;

import { ChatOpenAIToolType, ResponsesTool } from "../utils/tools.js";
import { OpenAIVerbosityParam } from "../types.js";
import { BaseChatOpenAI, BaseChatOpenAICallOptions } from "./base.js";
import { BaseChatOpenAI, BaseChatOpenAICallOptions, BaseChatOpenAIFields } from "./base.js";
import { OpenAI as OpenAI$1 } from "openai";

@@ -43,2 +43,4 @@ import { BaseMessage } from "@langchain/core/messages";

declare class ChatOpenAIResponses<CallOptions extends ChatOpenAIResponsesCallOptions = ChatOpenAIResponsesCallOptions> extends BaseChatOpenAI<CallOptions> {
constructor(model: string, fields?: Omit<BaseChatOpenAIFields, "model">);
constructor(fields?: BaseChatOpenAIFields);
invocationParams(options?: this["ParsedCallOptions"]): ChatResponsesInvocationParams;

@@ -45,0 +47,0 @@ _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;

import { wrapOpenAIClientError } from "../utils/client.js";
import { convertCompletionsCustomTool, formatToOpenAIToolChoice, isBuiltInTool, isBuiltInToolChoice, isCustomTool, isOpenAICustomTool } from "../utils/tools.js";
import { BaseChatOpenAI } from "./base.js";
import { BaseChatOpenAI, getChatOpenAIModelParams } from "./base.js";
import { convertMessagesToResponsesInput, convertResponsesDeltaToChatGenerationChunk, convertResponsesMessageToAIMessage } from "../converters/responses.js";

@@ -16,2 +16,5 @@ import { isOpenAITool } from "@langchain/core/language_models/base";

var ChatOpenAIResponses = class extends BaseChatOpenAI {
constructor(modelOrFields, fieldsArg) {
super(getChatOpenAIModelParams(modelOrFields, fieldsArg));
}
invocationParams(options) {

@@ -18,0 +21,0 @@ let strict;

{
"name": "@langchain/openai",
"version": "1.2.8",
"version": "1.2.9",
"description": "OpenAI integrations for LangChain.js",

@@ -22,3 +22,3 @@ "author": "LangChain",

"peerDependencies": {
"@langchain/core": "^1.0.0"
"@langchain/core": "^1.1.27"
},

@@ -40,5 +40,5 @@ "devDependencies": {

"zod-to-json-schema": "^3.24.6",
"@langchain/core": "^1.1.25",
"@langchain/core": "^1.1.27",
"@langchain/eslint": "0.1.1",
"@langchain/standard-tests": "0.0.23",
"@langchain/eslint": "0.1.1",
"@langchain/tsconfig": "0.0.1"

@@ -45,0 +45,0 @@ },

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet