Socket
Socket
Sign inDemoInstall

@langchain/openai

Package Overview
Dependencies
Maintainers
6
Versions
69
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.0.28 to 0.0.29

dist/azure/embeddings.cjs

3

dist/azure/chat_models.d.ts
import { type ClientOptions } from "openai";
import { type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import { ChatOpenAI } from "../chat_models.js";
import { AzureOpenAIInput, LegacyOpenAIInput, OpenAIChatInput } from "../types.js";
import { AzureOpenAIInput, LegacyOpenAIInput, OpenAIChatInput, OpenAICoreRequestOptions } from "../types.js";
export declare class AzureChatOpenAI extends ChatOpenAI {

@@ -16,3 +16,4 @@ _llmType(): string;

});
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
toJSON(): any;
}

@@ -0,2 +1,4 @@

import { AzureOpenAI as AzureOpenAIClient } from "openai";
import { ChatOpenAI } from "../chat_models.js";
import { getEndpoint } from "../utils/azure.js";
export class AzureChatOpenAI extends ChatOpenAI {

@@ -14,11 +16,4 @@ _llmType() {

constructor(fields) {
// assume the base URL does not contain "openai" nor "deployments" prefix
let basePath = fields?.openAIBasePath ?? "";
if (!basePath.endsWith("/"))
basePath += "/";
if (!basePath.endsWith("openai/deployments"))
basePath += "openai/deployments";
const newFields = fields ? { ...fields } : fields;
if (newFields) {
newFields.azureOpenAIBasePath = basePath;
newFields.azureOpenAIApiDeploymentName = newFields.deploymentName;

@@ -30,2 +25,47 @@ newFields.azureOpenAIApiKey = newFields.openAIApiKey;

}
_getClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
baseURL: this.clientConfig.baseURL,
};
const endpoint = getEndpoint(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
this.client = new AzureOpenAIClient({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
deployment: this.azureOpenAIApiDeploymentName,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any

@@ -32,0 +72,0 @@ toJSON() {

import { type ClientOptions } from "openai";
import { type BaseLLMParams } from "@langchain/core/language_models/llms";
import { OpenAI } from "../llms.js";
import type { OpenAIInput, AzureOpenAIInput, LegacyOpenAIInput } from "../types.js";
import type { OpenAIInput, AzureOpenAIInput, OpenAICoreRequestOptions, LegacyOpenAIInput } from "../types.js";
export declare class AzureOpenAI extends OpenAI {

@@ -15,3 +15,4 @@ get lc_aliases(): Record<string, string>;

});
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
toJSON(): any;
}

@@ -0,2 +1,4 @@

import { AzureOpenAI as AzureOpenAIClient } from "openai";
import { OpenAI } from "../llms.js";
import { getEndpoint } from "../utils/azure.js";
export class AzureOpenAI extends OpenAI {

@@ -11,11 +13,4 @@ get lc_aliases() {

constructor(fields) {
// assume the base URL does not contain "openai" nor "deployments" prefix
let basePath = fields?.openAIBasePath ?? "";
if (!basePath.endsWith("/"))
basePath += "/";
if (!basePath.endsWith("openai/deployments"))
basePath += "openai/deployments";
const newFields = fields ? { ...fields } : fields;
if (newFields) {
newFields.azureOpenAIBasePath = basePath;
newFields.azureOpenAIApiDeploymentName = newFields.deploymentName;

@@ -27,2 +22,46 @@ newFields.azureOpenAIApiKey = newFields.openAIApiKey;

}
_getClientOptions(options) {
if (!this.client) {
const openAIEndpointConfig = {
azureOpenAIApiDeploymentName: this.azureOpenAIApiDeploymentName,
azureOpenAIApiInstanceName: this.azureOpenAIApiInstanceName,
azureOpenAIApiKey: this.azureOpenAIApiKey,
azureOpenAIBasePath: this.azureOpenAIBasePath,
baseURL: this.clientConfig.baseURL,
};
const endpoint = getEndpoint(openAIEndpointConfig);
const params = {
...this.clientConfig,
baseURL: endpoint,
timeout: this.timeout,
maxRetries: 0,
};
if (!this.azureADTokenProvider) {
params.apiKey = openAIEndpointConfig.azureOpenAIApiKey;
}
if (!params.baseURL) {
delete params.baseURL;
}
this.client = new AzureOpenAIClient({
apiVersion: this.azureOpenAIApiVersion,
azureADTokenProvider: this.azureADTokenProvider,
...params,
});
}
const requestOptions = {
...this.clientConfig,
...options,
};
if (this.azureOpenAIApiKey) {
requestOptions.headers = {
"api-key": this.azureOpenAIApiKey,
...requestOptions.headers,
};
requestOptions.query = {
"api-version": this.azureOpenAIApiVersion,
...requestOptions.query,
};
}
return requestOptions;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any

@@ -29,0 +68,0 @@ toJSON() {

@@ -94,2 +94,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

azureOpenAIApiKey?: string;
azureADTokenProvider?: () => Promise<string>;
azureOpenAIApiInstanceName?: string;

@@ -99,4 +100,4 @@ azureOpenAIApiDeploymentName?: string;

organization?: string;
private client;
private clientConfig;
protected client: OpenAIClient;
protected clientConfig: ClientOptions;
constructor(fields?: Partial<OpenAIChatInput> & Partial<AzureOpenAIInput> & BaseChatModelParams & {

@@ -147,3 +148,3 @@ configuration?: ClientOptions & LegacyOpenAIInput;

completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, options?: OpenAICoreRequestOptions): Promise<OpenAIClient.Chat.Completions.ChatCompletion>;
private _getClientOptions;
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
_llmType(): string;

@@ -150,0 +151,0 @@ /** @ignore */

@@ -370,2 +370,8 @@ import { OpenAI as OpenAIClient } from "openai";

});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {

@@ -415,4 +421,5 @@ enumerable: true,

getEnvironmentVariable("AZURE_OPENAI_API_KEY");
if (!this.azureOpenAIApiKey && !this.apiKey) {
throw new Error("OpenAI or Azure OpenAI API key not found");
this.azureADTokenProvider = fields?.azureADTokenProvider ?? undefined;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("OpenAI or Azure OpenAI API key or Token Provider not found");
}

@@ -451,3 +458,3 @@ this.azureOpenAIApiInstanceName =

this.streaming = fields?.streaming ?? false;
if (this.azureOpenAIApiKey) {
if (this.azureOpenAIApiKey || this.azureADTokenProvider) {
if (!this.azureOpenAIApiInstanceName && !this.azureOpenAIBasePath) {

@@ -454,0 +461,0 @@ throw new Error("Azure OpenAI API instance name not found");

@@ -1,2 +0,2 @@

import { type ClientOptions } from "openai";
import { type ClientOptions, OpenAI as OpenAIClient } from "openai";
import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings";

@@ -65,2 +65,3 @@ import { AzureOpenAIInput, LegacyOpenAIInput } from "./types.js";

azureOpenAIApiKey?: string;
azureADTokenProvider?: () => Promise<string>;
azureOpenAIApiInstanceName?: string;

@@ -70,4 +71,4 @@ azureOpenAIApiDeploymentName?: string;

organization?: string;
private client;
private clientConfig;
protected client: OpenAIClient;
protected clientConfig: ClientOptions;
constructor(fields?: Partial<OpenAIEmbeddingsParams> & Partial<AzureOpenAIInput> & {

@@ -106,3 +107,3 @@ verbose?: boolean;

*/
private embeddingWithRetry;
protected embeddingWithRetry(request: OpenAIClient.EmbeddingCreateParams): Promise<OpenAIClient.Embeddings.CreateEmbeddingResponse>;
}

@@ -79,2 +79,8 @@ import { OpenAI as OpenAIClient } from "openai";

});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {

@@ -121,4 +127,5 @@ enumerable: true,

getEnvironmentVariable("AZURE_OPENAI_API_KEY");
if (!azureApiKey && !apiKey) {
throw new Error("OpenAI or Azure OpenAI API key not found");
this.azureADTokenProvider = fields?.azureADTokenProvider ?? undefined;
if (!azureApiKey && !apiKey && !this.azureADTokenProvider) {
throw new Error("OpenAI or Azure OpenAI API key or Token Provider not found");
}

@@ -152,3 +159,3 @@ const azureApiInstanceName = fieldsWithDefaults?.azureOpenAIApiInstanceName ??

this.azureOpenAIApiDeploymentName = azureApiDeploymentName;
if (this.azureOpenAIApiKey) {
if (this.azureOpenAIApiKey || this.azureADTokenProvider) {
if (!this.azureOpenAIApiInstanceName && !this.azureOpenAIBasePath) {

@@ -155,0 +162,0 @@ throw new Error("Azure OpenAI API instance name not found");

@@ -6,2 +6,3 @@ export { OpenAI as OpenAIClient, type ClientOptions, toFile } from "openai";

export * from "./azure/llms.js";
export * from "./azure/embeddings.js";
export * from "./embeddings.js";

@@ -8,0 +9,0 @@ export * from "./types.js";

@@ -6,2 +6,3 @@ export { OpenAI as OpenAIClient, toFile } from "openai";

export * from "./azure/llms.js";
export * from "./azure/embeddings.js";
export * from "./embeddings.js";

@@ -8,0 +9,0 @@ export * from "./types.js";

@@ -70,2 +70,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

azureOpenAIApiKey?: string;
azureADTokenProvider?: () => Promise<string>;
azureOpenAIApiInstanceName?: string;

@@ -75,4 +76,4 @@ azureOpenAIApiDeploymentName?: string;

organization?: string;
private client;
private clientConfig;
protected client: OpenAIClient;
protected clientConfig: ClientOptions;
constructor(fields?: Partial<OpenAIInput> & Partial<AzureOpenAIInput> & BaseLLMParams & {

@@ -129,4 +130,4 @@ configuration?: ClientOptions & LegacyOpenAIInput;

*/
private _getClientOptions;
protected _getClientOptions(options: OpenAICoreRequestOptions | undefined): OpenAICoreRequestOptions;
_llmType(): string;
}

@@ -211,2 +211,8 @@ import { OpenAI as OpenAIClient } from "openai";

});
Object.defineProperty(this, "azureADTokenProvider", {
enumerable: true,
configurable: true,
writable: true,
value: void 0
});
Object.defineProperty(this, "azureOpenAIApiInstanceName", {

@@ -257,4 +263,5 @@ enumerable: true,

getEnvironmentVariable("AZURE_OPENAI_API_KEY");
if (!this.azureOpenAIApiKey && !this.apiKey) {
throw new Error("OpenAI or Azure OpenAI API key not found");
this.azureADTokenProvider = fields?.azureADTokenProvider ?? undefined;
if (!this.azureOpenAIApiKey && !this.apiKey && !this.azureADTokenProvider) {
throw new Error("OpenAI or Azure OpenAI API key or Token Provider not found");
}

@@ -298,3 +305,3 @@ this.azureOpenAIApiInstanceName =

}
if (this.azureOpenAIApiKey) {
if (this.azureOpenAIApiKey || this.azureADTokenProvider) {
if (!this.azureOpenAIApiInstanceName && !this.azureOpenAIBasePath) {

@@ -301,0 +308,0 @@ throw new Error("Azure OpenAI API instance name not found");

@@ -161,2 +161,7 @@ import type { OpenAI as OpenAIClient } from "openai";

azureOpenAIBasePath?: string;
/**
* A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory),
* which will be invoked on every request.
*/
azureADTokenProvider?: () => Promise<string>;
}
{
"name": "@langchain/openai",
"version": "0.0.28",
"version": "0.0.29",
"description": "OpenAI integrations for LangChain.js",

@@ -44,3 +44,3 @@ "type": "module",

"js-tiktoken": "^1.0.7",
"openai": "^4.32.1",
"openai": "^4.41.1",
"zod": "^3.22.4",

@@ -50,2 +50,3 @@ "zod-to-json-schema": "^3.22.3"

"devDependencies": {
"@azure/identity": "^4.2.0",
"@jest/globals": "^29.5.0",

@@ -52,0 +53,0 @@ "@langchain/scripts": "~0.0",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc