You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
10
Versions
152
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
0.6.2
to
0.6.3
+330
-161
dist/azure/chat_models.cjs

@@ -9,2 +9,312 @@ "use strict";

const headers_js_1 = require("../utils/headers.cjs");
const AZURE_ALIASES = {
openAIApiKey: "openai_api_key",
openAIApiVersion: "openai_api_version",
openAIBasePath: "openai_api_base",
deploymentName: "deployment_name",
azureOpenAIEndpoint: "azure_endpoint",
azureOpenAIApiVersion: "openai_api_version",
azureOpenAIBasePath: "openai_api_base",
azureOpenAIApiDeploymentName: "deployment_name",
};
const AZURE_SECRETS = {
azureOpenAIApiKey: "AZURE_OPENAI_API_KEY",
};
const AZURE_SERIALIZABLE_KEYS = [
"azureOpenAIApiKey",
"azureOpenAIApiVersion",
"azureOpenAIBasePath",
"azureOpenAIEndpoint",
"azureOpenAIApiInstanceName",
"azureOpenAIApiDeploymentName",
"deploymentName",
"openAIApiKey",
"openAIApiVersion",
];
function _constructAzureFields(fields) {
this.azureOpenAIApiKey =
fields?.azureOpenAIApiKey ??
fields?.openAIApiKey ??
fields?.apiKey ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_KEY");
this.azureOpenAIApiInstanceName =
fields?.azureOpenAIApiInstanceName ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_INSTANCE_NAME");
this.azureOpenAIApiDeploymentName =
fields?.azureOpenAIApiDeploymentName ??
fields?.deploymentName ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_DEPLOYMENT_NAME");
this.azureOpenAIApiVersion =
fields?.azureOpenAIApiVersion ??
fields?.openAIApiVersion ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_VERSION");
this.azureOpenAIBasePath =
fields?.azureOpenAIBasePath ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_BASE_PATH");
this.azureOpenAIEndpoint =
fields?.azureOpenAIEndpoint ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_ENDPOINT");
this.azureADTokenProvider = fields?.azureADTokenProvider;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("Azure OpenAI API key or Token Provider not found");
}
}
function _getAzureClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
azureADTokenProvider: this.azureADTokenProvider,
baseURL: this.clientConfig.baseURL,
azureOpenAIEndpoint: this.azureOpenAIEndpoint,
};
const endpoint = (0, azure_js_1.getEndpoint)(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
let env = (0, env_1.getEnv)();
if (env === "node" || env === "deno") {
env = `(${env}/${process.version}; ${process.platform}; ${process.arch})`;
}
const defaultHeaders = (0, headers_js_1.normalizeHeaders)(params.defaultHeaders);
params.defaultHeaders = {
...params.defaultHeaders,
"User-Agent": defaultHeaders["User-Agent"]
? `langchainjs-azure-openai/2.0.0 (${env})${defaultHeaders["User-Agent"]}`
: `langchainjs-azure-openai/2.0.0 (${env})`,
};
this.client = new openai_1.AzureOpenAI({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
deployment: this.azureOpenAIApiDeploymentName,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
function _serializeAzureChat(input) {
const json = input;
function isRecord(obj) {
return typeof obj === "object" && obj != null;
}
if (isRecord(json) && isRecord(json.kwargs)) {
delete json.kwargs.azure_openai_base_path;
delete json.kwargs.azure_openai_api_deployment_name;
delete json.kwargs.azure_openai_api_key;
delete json.kwargs.azure_openai_api_version;
delete json.kwargs.azure_open_ai_base_path;
if (!json.kwargs.azure_endpoint && this.azureOpenAIEndpoint) {
json.kwargs.azure_endpoint = this.azureOpenAIEndpoint;
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2 && parts[0].startsWith("http")) {
const [endpoint] = parts;
json.kwargs.azure_endpoint = endpoint;
}
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIApiInstanceName) {
json.kwargs.azure_endpoint = `https://${this.azureOpenAIApiInstanceName}.openai.azure.com/`;
}
if (!json.kwargs.deployment_name && this.azureOpenAIApiDeploymentName) {
json.kwargs.deployment_name = this.azureOpenAIApiDeploymentName;
}
if (!json.kwargs.deployment_name && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2) {
const [, deployment] = parts;
json.kwargs.deployment_name = deployment;
}
}
if (json.kwargs.azure_endpoint &&
json.kwargs.deployment_name &&
json.kwargs.openai_api_base) {
delete json.kwargs.openai_api_base;
}
if (json.kwargs.azure_openai_api_instance_name &&
json.kwargs.azure_endpoint) {
delete json.kwargs.azure_openai_api_instance_name;
}
}
return json;
}
class AzureChatOpenAIResponses extends chat_models_js_1.ChatOpenAIResponses {
_llmType() {
return "azure_openai";
}
get lc_aliases() {
return {
...super.lc_aliases,
...AZURE_ALIASES,
};
}
get lc_secrets() {
return {
...super.lc_secrets,
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
Object.defineProperty(this, "azureOpenAIApiVersion", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiKey", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiDeploymentName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIBasePath", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIEndpoint", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
_constructAzureFields.call(this, fields);
}
_getClientOptions(options) {
return _getAzureClientOptions.call(this, options);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}
class AzureChatOpenAICompletions extends chat_models_js_1.ChatOpenAICompletions {
_llmType() {
return "azure_openai";
}
get lc_aliases() {
return {
...super.lc_aliases,
...AZURE_ALIASES,
};
}
get lc_secrets() {
return {
...super.lc_secrets,
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
Object.defineProperty(this, "azureOpenAIApiVersion", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiKey", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiDeploymentName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIBasePath", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIEndpoint", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
_constructAzureFields.call(this, fields);
}
_getClientOptions(options) {
return _getAzureClientOptions.call(this, options);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}
/**

@@ -429,10 +739,3 @@ * Azure OpenAI chat model integration.

...super.lc_aliases,
openAIApiKey: "openai_api_key",
openAIApiVersion: "openai_api_version",
openAIBasePath: "openai_api_base",
deploymentName: "deployment_name",
azureOpenAIEndpoint: "azure_endpoint",
azureOpenAIApiVersion: "openai_api_version",
azureOpenAIBasePath: "openai_api_base",
azureOpenAIApiDeploymentName: "deployment_name",
...AZURE_ALIASES,
};

@@ -443,21 +746,19 @@ }

...super.lc_secrets,
azureOpenAIApiKey: "AZURE_OPENAI_API_KEY",
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [
...super.lc_serializable_keys,
"azureOpenAIApiKey",
"azureOpenAIApiVersion",
"azureOpenAIBasePath",
"azureOpenAIEndpoint",
"azureOpenAIApiInstanceName",
"azureOpenAIApiDeploymentName",
"deploymentName",
"openAIApiKey",
"openAIApiVersion",
];
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
super({
...fields,
completions: new AzureChatOpenAICompletions(fields),
responses: new AzureChatOpenAIResponses(fields),
});
Object.defineProperty(this, "azureOpenAIApiVersion", {

@@ -505,141 +806,6 @@ enumerable: true,

});
this.azureOpenAIApiKey =
fields?.azureOpenAIApiKey ??
fields?.openAIApiKey ??
fields?.apiKey ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_KEY");
this.azureOpenAIApiInstanceName =
fields?.azureOpenAIApiInstanceName ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_INSTANCE_NAME");
this.azureOpenAIApiDeploymentName =
fields?.azureOpenAIApiDeploymentName ??
fields?.deploymentName ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_DEPLOYMENT_NAME");
this.azureOpenAIApiVersion =
fields?.azureOpenAIApiVersion ??
fields?.openAIApiVersion ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_API_VERSION");
this.azureOpenAIBasePath =
fields?.azureOpenAIBasePath ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_BASE_PATH");
this.azureOpenAIEndpoint =
fields?.azureOpenAIEndpoint ??
(0, env_1.getEnvironmentVariable)("AZURE_OPENAI_ENDPOINT");
this.azureADTokenProvider = fields?.azureADTokenProvider;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("Azure OpenAI API key or Token Provider not found");
}
_constructAzureFields.call(this, fields);
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
/** @internal */
_getClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
azureADTokenProvider: this.azureADTokenProvider,
baseURL: this.clientConfig.baseURL,
azureOpenAIEndpoint: this.azureOpenAIEndpoint,
};
const endpoint = (0, azure_js_1.getEndpoint)(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
let env = (0, env_1.getEnv)();
if (env === "node" || env === "deno") {
env = `(${env}/${process.version}; ${process.platform}; ${process.arch})`;
}
const defaultHeaders = (0, headers_js_1.normalizeHeaders)(params.defaultHeaders);
params.defaultHeaders = {
...params.defaultHeaders,
"User-Agent": defaultHeaders["User-Agent"]
? `langchainjs-azure-openai/2.0.0 (${env})${defaultHeaders["User-Agent"]}`
: `langchainjs-azure-openai/2.0.0 (${env})`,
};
this.client = new openai_1.AzureOpenAI({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
deployment: this.azureOpenAIApiDeploymentName,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
toJSON() {
const json = super.toJSON();
function isRecord(obj) {
return typeof obj === "object" && obj != null;
}
if (isRecord(json) && isRecord(json.kwargs)) {
delete json.kwargs.azure_openai_base_path;
delete json.kwargs.azure_openai_api_deployment_name;
delete json.kwargs.azure_openai_api_key;
delete json.kwargs.azure_openai_api_version;
delete json.kwargs.azure_open_ai_base_path;
if (!json.kwargs.azure_endpoint && this.azureOpenAIEndpoint) {
json.kwargs.azure_endpoint = this.azureOpenAIEndpoint;
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2 && parts[0].startsWith("http")) {
const [endpoint] = parts;
json.kwargs.azure_endpoint = endpoint;
}
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIApiInstanceName) {
json.kwargs.azure_endpoint = `https://${this.azureOpenAIApiInstanceName}.openai.azure.com/`;
}
if (!json.kwargs.deployment_name && this.azureOpenAIApiDeploymentName) {
json.kwargs.deployment_name = this.azureOpenAIApiDeploymentName;
}
if (!json.kwargs.deployment_name && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2) {
const [, deployment] = parts;
json.kwargs.deployment_name = deployment;
}
}
if (json.kwargs.azure_endpoint &&
json.kwargs.deployment_name &&
json.kwargs.openai_api_base) {
delete json.kwargs.openai_api_base;
}
if (json.kwargs.azure_openai_api_instance_name &&
json.kwargs.azure_endpoint) {
delete json.kwargs.azure_openai_api_instance_name;
}
}
return json;
}
withStructuredOutput(outputSchema, config) {
_getStructuredOutputMethod(config) {
const ensuredConfig = { ...config };

@@ -649,8 +815,11 @@ // Not all Azure gpt-4o deployments models support jsonSchema yet

if (ensuredConfig?.method === undefined) {
ensuredConfig.method = "functionCalling";
return "functionCalling";
}
}
return super.withStructuredOutput(outputSchema, ensuredConfig);
return super._getStructuredOutputMethod(ensuredConfig);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}
exports.AzureChatOpenAI = AzureChatOpenAI;

@@ -1,10 +0,14 @@

import { type ClientOptions } from "openai";
import { LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import { BaseLanguageModelInput, StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
import { BaseMessage } from "@langchain/core/messages";
import { Runnable } from "@langchain/core/runnables";
import { InteropZodType } from "@langchain/core/utils/types";
import { ChatOpenAI } from "../chat_models.js";
import { AzureOpenAIInput, OpenAIChatInput, OpenAICoreRequestOptions } from "../types.js";
import { Serialized } from "@langchain/core/load/serializable";
import { LangSmithParams } from "@langchain/core/language_models/chat_models";
import { StructuredOutputMethodOptions } from "@langchain/core/language_models/base";
import { BaseChatOpenAIFields, ChatOpenAI, ChatOpenAICallOptions } from "../chat_models.js";
import { AzureOpenAIChatInput, AzureOpenAIInput } from "../types.js";
export type { AzureOpenAIInput };
interface AzureChatOpenAIFields extends BaseChatOpenAIFields, Partial<AzureOpenAIChatInput> {
/**
* Whether to use the responses API for all requests. If `false` the responses API will be used
* only when required in order to fulfill the request.
*/
useResponsesApi?: boolean;
}
/**

@@ -422,3 +426,3 @@ * Azure OpenAI chat model integration.

*/
export declare class AzureChatOpenAI extends ChatOpenAI {
export declare class AzureChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends ChatOpenAI<CallOptions> implements Partial<AzureOpenAIChatInput> {
azureOpenAIApiVersion?: string;

@@ -437,23 +441,7 @@ azureOpenAIApiKey?: string;

get lc_serializable_keys(): string[];
constructor(fields?: Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & {
openAIApiKey?: string;
openAIApiVersion?: string;
openAIBasePath?: string;
deploymentName?: string;
} & BaseChatModelParams & {
configuration?: ClientOptions;
});
getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
constructor(fields?: AzureChatOpenAIFields);
/** @internal */
_getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
toJSON(): any;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
raw: BaseMessage;
parsed: RunOutput;
}>;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
raw: BaseMessage;
parsed: RunOutput;
}>;
_getStructuredOutputMethod(config: StructuredOutputMethodOptions<boolean>): string | undefined;
toJSON(): Serialized;
}
import { AzureOpenAI as AzureOpenAIClient } from "openai";
import { getEnv, getEnvironmentVariable } from "@langchain/core/utils/env";
import { ChatOpenAI } from "../chat_models.js";
import { ChatOpenAI, ChatOpenAICompletions, ChatOpenAIResponses, } from "../chat_models.js";
import { getEndpoint } from "../utils/azure.js";
import { normalizeHeaders } from "../utils/headers.js";
const AZURE_ALIASES = {
openAIApiKey: "openai_api_key",
openAIApiVersion: "openai_api_version",
openAIBasePath: "openai_api_base",
deploymentName: "deployment_name",
azureOpenAIEndpoint: "azure_endpoint",
azureOpenAIApiVersion: "openai_api_version",
azureOpenAIBasePath: "openai_api_base",
azureOpenAIApiDeploymentName: "deployment_name",
};
const AZURE_SECRETS = {
azureOpenAIApiKey: "AZURE_OPENAI_API_KEY",
};
const AZURE_SERIALIZABLE_KEYS = [
"azureOpenAIApiKey",
"azureOpenAIApiVersion",
"azureOpenAIBasePath",
"azureOpenAIEndpoint",
"azureOpenAIApiInstanceName",
"azureOpenAIApiDeploymentName",
"deploymentName",
"openAIApiKey",
"openAIApiVersion",
];
function _constructAzureFields(fields) {
this.azureOpenAIApiKey =
fields?.azureOpenAIApiKey ??
fields?.openAIApiKey ??
fields?.apiKey ??
getEnvironmentVariable("AZURE_OPENAI_API_KEY");
this.azureOpenAIApiInstanceName =
fields?.azureOpenAIApiInstanceName ??
getEnvironmentVariable("AZURE_OPENAI_API_INSTANCE_NAME");
this.azureOpenAIApiDeploymentName =
fields?.azureOpenAIApiDeploymentName ??
fields?.deploymentName ??
getEnvironmentVariable("AZURE_OPENAI_API_DEPLOYMENT_NAME");
this.azureOpenAIApiVersion =
fields?.azureOpenAIApiVersion ??
fields?.openAIApiVersion ??
getEnvironmentVariable("AZURE_OPENAI_API_VERSION");
this.azureOpenAIBasePath =
fields?.azureOpenAIBasePath ??
getEnvironmentVariable("AZURE_OPENAI_BASE_PATH");
this.azureOpenAIEndpoint =
fields?.azureOpenAIEndpoint ??
getEnvironmentVariable("AZURE_OPENAI_ENDPOINT");
this.azureADTokenProvider = fields?.azureADTokenProvider;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("Azure OpenAI API key or Token Provider not found");
}
}
function _getAzureClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
azureADTokenProvider: this.azureADTokenProvider,
baseURL: this.clientConfig.baseURL,
azureOpenAIEndpoint: this.azureOpenAIEndpoint,
};
const endpoint = getEndpoint(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
let env = getEnv();
if (env === "node" || env === "deno") {
env = `(${env}/${process.version}; ${process.platform}; ${process.arch})`;
}
const defaultHeaders = normalizeHeaders(params.defaultHeaders);
params.defaultHeaders = {
...params.defaultHeaders,
"User-Agent": defaultHeaders["User-Agent"]
? `langchainjs-azure-openai/2.0.0 (${env})${defaultHeaders["User-Agent"]}`
: `langchainjs-azure-openai/2.0.0 (${env})`,
};
this.client = new AzureOpenAIClient({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
deployment: this.azureOpenAIApiDeploymentName,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
function _serializeAzureChat(input) {
const json = input;
function isRecord(obj) {
return typeof obj === "object" && obj != null;
}
if (isRecord(json) && isRecord(json.kwargs)) {
delete json.kwargs.azure_openai_base_path;
delete json.kwargs.azure_openai_api_deployment_name;
delete json.kwargs.azure_openai_api_key;
delete json.kwargs.azure_openai_api_version;
delete json.kwargs.azure_open_ai_base_path;
if (!json.kwargs.azure_endpoint && this.azureOpenAIEndpoint) {
json.kwargs.azure_endpoint = this.azureOpenAIEndpoint;
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2 && parts[0].startsWith("http")) {
const [endpoint] = parts;
json.kwargs.azure_endpoint = endpoint;
}
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIApiInstanceName) {
json.kwargs.azure_endpoint = `https://${this.azureOpenAIApiInstanceName}.openai.azure.com/`;
}
if (!json.kwargs.deployment_name && this.azureOpenAIApiDeploymentName) {
json.kwargs.deployment_name = this.azureOpenAIApiDeploymentName;
}
if (!json.kwargs.deployment_name && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2) {
const [, deployment] = parts;
json.kwargs.deployment_name = deployment;
}
}
if (json.kwargs.azure_endpoint &&
json.kwargs.deployment_name &&
json.kwargs.openai_api_base) {
delete json.kwargs.openai_api_base;
}
if (json.kwargs.azure_openai_api_instance_name &&
json.kwargs.azure_endpoint) {
delete json.kwargs.azure_openai_api_instance_name;
}
}
return json;
}
class AzureChatOpenAIResponses extends ChatOpenAIResponses {
_llmType() {
return "azure_openai";
}
get lc_aliases() {
return {
...super.lc_aliases,
...AZURE_ALIASES,
};
}
get lc_secrets() {
return {
...super.lc_secrets,
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
Object.defineProperty(this, "azureOpenAIApiVersion", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiKey", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiDeploymentName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIBasePath", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIEndpoint", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
_constructAzureFields.call(this, fields);
}
_getClientOptions(options) {
return _getAzureClientOptions.call(this, options);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}
class AzureChatOpenAICompletions extends ChatOpenAICompletions {
_llmType() {
return "azure_openai";
}
get lc_aliases() {
return {
...super.lc_aliases,
...AZURE_ALIASES,
};
}
get lc_secrets() {
return {
...super.lc_secrets,
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
Object.defineProperty(this, "azureOpenAIApiVersion", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiKey", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiDeploymentName", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIBasePath", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIEndpoint", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
_constructAzureFields.call(this, fields);
}
_getClientOptions(options) {
return _getAzureClientOptions.call(this, options);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}
/**

@@ -425,10 +735,3 @@ * Azure OpenAI chat model integration.

...super.lc_aliases,
openAIApiKey: "openai_api_key",
openAIApiVersion: "openai_api_version",
openAIBasePath: "openai_api_base",
deploymentName: "deployment_name",
azureOpenAIEndpoint: "azure_endpoint",
azureOpenAIApiVersion: "openai_api_version",
azureOpenAIBasePath: "openai_api_base",
azureOpenAIApiDeploymentName: "deployment_name",
...AZURE_ALIASES,
};

@@ -439,21 +742,19 @@ }

...super.lc_secrets,
azureOpenAIApiKey: "AZURE_OPENAI_API_KEY",
...AZURE_SECRETS,
};
}
get lc_serializable_keys() {
return [
...super.lc_serializable_keys,
"azureOpenAIApiKey",
"azureOpenAIApiVersion",
"azureOpenAIBasePath",
"azureOpenAIEndpoint",
"azureOpenAIApiInstanceName",
"azureOpenAIApiDeploymentName",
"deploymentName",
"openAIApiKey",
"openAIApiVersion",
];
return [...super.lc_serializable_keys, ...AZURE_SERIALIZABLE_KEYS];
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
constructor(fields) {
super(fields);
super({
...fields,
completions: new AzureChatOpenAICompletions(fields),
responses: new AzureChatOpenAIResponses(fields),
});
Object.defineProperty(this, "azureOpenAIApiVersion", {

@@ -501,141 +802,6 @@ enumerable: true,

});
this.azureOpenAIApiKey =
fields?.azureOpenAIApiKey ??
fields?.openAIApiKey ??
fields?.apiKey ??
getEnvironmentVariable("AZURE_OPENAI_API_KEY");
this.azureOpenAIApiInstanceName =
fields?.azureOpenAIApiInstanceName ??
getEnvironmentVariable("AZURE_OPENAI_API_INSTANCE_NAME");
this.azureOpenAIApiDeploymentName =
fields?.azureOpenAIApiDeploymentName ??
fields?.deploymentName ??
getEnvironmentVariable("AZURE_OPENAI_API_DEPLOYMENT_NAME");
this.azureOpenAIApiVersion =
fields?.azureOpenAIApiVersion ??
fields?.openAIApiVersion ??
getEnvironmentVariable("AZURE_OPENAI_API_VERSION");
this.azureOpenAIBasePath =
fields?.azureOpenAIBasePath ??
getEnvironmentVariable("AZURE_OPENAI_BASE_PATH");
this.azureOpenAIEndpoint =
fields?.azureOpenAIEndpoint ??
getEnvironmentVariable("AZURE_OPENAI_ENDPOINT");
this.azureADTokenProvider = fields?.azureADTokenProvider;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("Azure OpenAI API key or Token Provider not found");
}
_constructAzureFields.call(this, fields);
}
getLsParams(options) {
const params = super.getLsParams(options);
params.ls_provider = "azure";
return params;
}
/** @internal */
_getClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
azureADTokenProvider: this.azureADTokenProvider,
baseURL: this.clientConfig.baseURL,
azureOpenAIEndpoint: this.azureOpenAIEndpoint,
};
const endpoint = getEndpoint(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
let env = getEnv();
if (env === "node" || env === "deno") {
env = `(${env}/${process.version}; ${process.platform}; ${process.arch})`;
}
const defaultHeaders = normalizeHeaders(params.defaultHeaders);
params.defaultHeaders = {
...params.defaultHeaders,
"User-Agent": defaultHeaders["User-Agent"]
? `langchainjs-azure-openai/2.0.0 (${env})${defaultHeaders["User-Agent"]}`
: `langchainjs-azure-openai/2.0.0 (${env})`,
};
this.client = new AzureOpenAIClient({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
deployment: this.azureOpenAIApiDeploymentName,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
toJSON() {
const json = super.toJSON();
function isRecord(obj) {
return typeof obj === "object" && obj != null;
}
if (isRecord(json) && isRecord(json.kwargs)) {
delete json.kwargs.azure_openai_base_path;
delete json.kwargs.azure_openai_api_deployment_name;
delete json.kwargs.azure_openai_api_key;
delete json.kwargs.azure_openai_api_version;
delete json.kwargs.azure_open_ai_base_path;
if (!json.kwargs.azure_endpoint && this.azureOpenAIEndpoint) {
json.kwargs.azure_endpoint = this.azureOpenAIEndpoint;
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2 && parts[0].startsWith("http")) {
const [endpoint] = parts;
json.kwargs.azure_endpoint = endpoint;
}
}
if (!json.kwargs.azure_endpoint && this.azureOpenAIApiInstanceName) {
json.kwargs.azure_endpoint = `https://${this.azureOpenAIApiInstanceName}.openai.azure.com/`;
}
if (!json.kwargs.deployment_name && this.azureOpenAIApiDeploymentName) {
json.kwargs.deployment_name = this.azureOpenAIApiDeploymentName;
}
if (!json.kwargs.deployment_name && this.azureOpenAIBasePath) {
const parts = this.azureOpenAIBasePath.split("/openai/deployments/");
if (parts.length === 2) {
const [, deployment] = parts;
json.kwargs.deployment_name = deployment;
}
}
if (json.kwargs.azure_endpoint &&
json.kwargs.deployment_name &&
json.kwargs.openai_api_base) {
delete json.kwargs.openai_api_base;
}
if (json.kwargs.azure_openai_api_instance_name &&
json.kwargs.azure_endpoint) {
delete json.kwargs.azure_openai_api_instance_name;
}
}
return json;
}
withStructuredOutput(outputSchema, config) {
_getStructuredOutputMethod(config) {
const ensuredConfig = { ...config };

@@ -645,7 +811,10 @@ // Not all Azure gpt-4o deployments models support jsonSchema yet

if (ensuredConfig?.method === undefined) {
ensuredConfig.method = "functionCalling";
return "functionCalling";
}
}
return super.withStructuredOutput(outputSchema, ensuredConfig);
return super._getStructuredOutputMethod(ensuredConfig);
}
toJSON() {
return _serializeAzureChat.call(this, super.toJSON());
}
}

@@ -25,3 +25,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

export declare function _convertMessagesToOpenAIParams(messages: BaseMessage[], model?: string): OpenAIClient.Chat.Completions.ChatCompletionMessageParam[];
interface BaseChatOpenAICallOptions extends OpenAICallOptions, BaseFunctionCallOptions {
export interface BaseChatOpenAICallOptions extends OpenAICallOptions, BaseFunctionCallOptions {
/**

@@ -124,3 +124,3 @@ * A list of tools that the model may use to generate responses.

/** @internal */
declare abstract class BaseChatOpenAI<CallOptions extends BaseChatOpenAICallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements Partial<OpenAIChatInput> {
export declare abstract class BaseChatOpenAI<CallOptions extends BaseChatOpenAICallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements Partial<OpenAIChatInput> {
temperature?: number;

@@ -146,4 +146,6 @@ topP?: number;

__includeRawResponse?: boolean;
protected client: OpenAIClient;
protected clientConfig: ClientOptions;
/** @internal */
client: OpenAIClient;
/** @internal */
clientConfig: ClientOptions;
/**

@@ -225,2 +227,4 @@ * Whether the model supports the `strict` argument when passing in tools.

protected _getEstimatedTokenCountFromPrompt(messages: BaseMessage[], functions?: OpenAIClient.Chat.ChatCompletionCreateParams.Function[], function_call?: "none" | "auto" | OpenAIClient.Chat.ChatCompletionFunctionCallOption): Promise<number>;
/** @internal */
protected _getStructuredOutputMethod(config: StructuredOutputMethodOptions<boolean>): string | undefined;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;

@@ -243,3 +247,3 @@ withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: InteropZodType<RunOutput> | Record<string, any>, config?: StructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {

type ResponsesParseInvoke = ExcludeController<Awaited<ReturnType<ResponsesParse>>>;
interface ChatOpenAIResponsesCallOptions extends BaseChatOpenAICallOptions {
export interface ChatOpenAIResponsesCallOptions extends BaseChatOpenAICallOptions {
/**

@@ -298,3 +302,3 @@ * Configuration options for a text response from the model. Can be plain text or

}
interface ChatOpenAICompletionsCallOptions extends BaseChatOpenAICallOptions {
export interface ChatOpenAICompletionsCallOptions extends BaseChatOpenAICallOptions {
}

@@ -327,2 +331,12 @@ type ChatCompletionsInvocationParams = Omit<OpenAIClient.Chat.Completions.ChatCompletionCreateParams, "messages">;

useResponsesApi?: boolean;
/**
* The completions chat instance
* @internal
*/
completions?: ChatOpenAICompletions;
/**
* The responses chat instance
* @internal
*/
responses?: ChatOpenAIResponses;
}

@@ -867,3 +881,3 @@ /**

*/
export declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatOpenAI<CallOptions> implements Partial<OpenAIChatInput> {
export declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatOpenAI<CallOptions> {
/**

@@ -874,4 +888,4 @@ * Whether to use the responses API for all requests. If `false` the responses API will be used

useResponsesApi: boolean;
private responses;
private completions;
protected responses: ChatOpenAIResponses;
protected completions: ChatOpenAICompletions;
get lc_serializable_keys(): string[];

@@ -878,0 +892,0 @@ constructor(fields?: ChatOpenAIFields);

@@ -158,3 +158,3 @@ import type { OpenAI as OpenAIClient } from "openai";

}
export declare interface AzureOpenAIInput {
export interface AzureOpenAIInput {
/**

@@ -217,2 +217,8 @@ * API version to use when making requests to Azure OpenAI.

}
export interface AzureOpenAIChatInput extends OpenAIChatInput, AzureOpenAIInput {
openAIApiKey?: string;
openAIApiVersion?: string;
openAIBasePath?: string;
deploymentName?: string;
}
type ChatOpenAIResponseFormatJSONSchema = Omit<ResponseFormatJSONSchema, "json_schema"> & {

@@ -219,0 +225,0 @@ json_schema: Omit<ResponseFormatJSONSchema["json_schema"], "schema"> & {

{
"name": "@langchain/openai",
"version": "0.6.2",
"version": "0.6.3",
"description": "OpenAI integrations for LangChain.js",

@@ -5,0 +5,0 @@ "type": "module",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display