Socket
Socket
Sign inDemoInstall

@langchain/openai

Package Overview
Dependencies
Maintainers
0
Versions
62
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.1.3 to 0.2.0

9

dist/chat_models.d.ts

@@ -32,2 +32,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

* Additional options to pass to streamed completions.
* If provided takes precedence over "streamUsage" set at initialization time.
*/

@@ -51,10 +52,9 @@ stream_options?: {

*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
* To use you should have the `OPENAI_API_KEY` environment variable set.
*
* To use with Azure you should have the `openai` package installed, with the
* To use with Azure you should have the:
* `AZURE_OPENAI_API_KEY`,
* `AZURE_OPENAI_API_INSTANCE_NAME`,
* `AZURE_OPENAI_API_DEPLOYMENT_NAME`
* and `AZURE_OPENAI_API_VERSION` environment variable set.
* and `AZURE_OPENAI_API_VERSION` environment variables set.
* `AZURE_OPENAI_BASE_PATH` is optional and will override `AZURE_OPENAI_API_INSTANCE_NAME` if you need to use a custom endpoint.

@@ -105,2 +105,3 @@ *

streaming: boolean;
streamUsage: boolean;
maxTokens?: number;

@@ -107,0 +108,0 @@ logprobs?: boolean;

@@ -46,3 +46,3 @@ import { OpenAI as OpenAIClient } from "openai";

}
function openAIResponseToChatMessage(message) {
function openAIResponseToChatMessage(message, messageId) {
const rawToolCalls = message.tool_calls;

@@ -70,2 +70,3 @@ switch (message.role) {

},
id: messageId,
});

@@ -79,3 +80,3 @@ }

// eslint-disable-next-line @typescript-eslint/no-explicit-any
delta, defaultRole) {
delta, messageId, defaultRole) {
const role = delta.role ?? defaultRole;

@@ -116,2 +117,3 @@ const content = delta.content ?? "";

additional_kwargs,
id: messageId,
});

@@ -153,5 +155,7 @@ }

completionParam.function_call = message.additional_kwargs.function_call;
completionParam.content = null;
}
if (isAIMessage(message) && !!message.tool_calls?.length) {
completionParam.tool_calls = message.tool_calls.map(convertLangChainToolCallToOpenAI);
completionParam.content = null;
}

@@ -172,10 +176,9 @@ else {

*
* To use you should have the `openai` package installed, with the
* `OPENAI_API_KEY` environment variable set.
* To use you should have the `OPENAI_API_KEY` environment variable set.
*
* To use with Azure you should have the `openai` package installed, with the
* To use with Azure you should have the:
* `AZURE_OPENAI_API_KEY`,
* `AZURE_OPENAI_API_INSTANCE_NAME`,
* `AZURE_OPENAI_API_DEPLOYMENT_NAME`
* and `AZURE_OPENAI_API_VERSION` environment variable set.
* and `AZURE_OPENAI_API_VERSION` environment variables set.
* `AZURE_OPENAI_BASE_PATH` is optional and will override `AZURE_OPENAI_API_INSTANCE_NAME` if you need to use a custom endpoint.

@@ -334,2 +337,8 @@ *

});
Object.defineProperty(this, "streamUsage", {
enumerable: true,
configurable: true,
writable: true,
value: true
});
Object.defineProperty(this, "maxTokens", {

@@ -463,2 +472,3 @@ enumerable: true,

this.streaming = fields?.streaming ?? false;
this.streamUsage = fields?.streamUsage ?? this.streamUsage;
if (this.azureOpenAIApiKey || this.azureADTokenProvider) {

@@ -514,2 +524,9 @@ if (!this.azureOpenAIApiInstanceName && !this.azureOpenAIBasePath) {

}
let streamOptionsConfig = {};
if (options?.stream_options !== undefined) {
streamOptionsConfig = { stream_options: options.stream_options };
}
else if (this.streamUsage && this.streaming) {
streamOptionsConfig = { stream_options: { include_usage: true } };
}
const params = {

@@ -528,2 +545,3 @@ model: this.model,

user: this.user,
// if include_usage is set or streamUsage then stream must be set to true.
stream: this.streaming,

@@ -538,5 +556,3 @@ functions: options?.functions,

seed: options?.seed,
...(options?.stream_options !== undefined
? { stream_options: options.stream_options }
: {}),
...streamOptionsConfig,
parallel_tool_calls: options?.parallel_tool_calls,

@@ -577,3 +593,3 @@ ...this.modelKwargs,

}
const chunk = _convertDeltaToMessageChunk(delta, defaultRole);
const chunk = _convertDeltaToMessageChunk(delta, data.id, defaultRole);
defaultRole = delta.role ?? defaultRole;

@@ -689,3 +705,3 @@ const newTokenIndices = {

text,
message: openAIResponseToChatMessage(part.message ?? { role: "assistant" }),
message: openAIResponseToChatMessage(part.message ?? { role: "assistant" }, data.id),
};

@@ -692,0 +708,0 @@ generation.generationInfo = {

@@ -28,2 +28,7 @@ import type { OpenAI as OpenAIClient } from "openai";

/**
* Whether or not to include token usage data in streamed chunks.
* @default true
*/
streamUsage?: boolean;
/**
* Model name to use

@@ -30,0 +35,0 @@ * Alias for `model`

{
"name": "@langchain/openai",
"version": "0.1.3",
"version": "0.2.0",
"description": "OpenAI integrations for LangChain.js",

@@ -38,3 +38,3 @@ "type": "module",

"dependencies": {
"@langchain/core": ">=0.2.5 <0.3.0",
"@langchain/core": ">=0.2.8 <0.3.0",
"js-tiktoken": "^1.0.12",

@@ -41,0 +41,0 @@ "openai": "^4.49.1",

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc