Socket
Socket
Sign inDemoInstall

@langchain/openai

Package Overview
Dependencies
Maintainers
8
Versions
69
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.2.1 to 0.2.2

8

dist/chat_models.d.ts

@@ -9,3 +9,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

import { z } from "zod";
import { Runnable } from "@langchain/core/runnables";
import { Runnable, RunnableToolLike } from "@langchain/core/runnables";
import type { AzureOpenAIInput, OpenAICallOptions, OpenAIChatInput, OpenAICoreRequestOptions, LegacyOpenAIInput } from "./types.js";

@@ -125,7 +125,9 @@ export type { AzureOpenAIInput, OpenAICallOptions, OpenAIChatInput };

getLsParams(options: this["ParsedCallOptions"]): LangSmithParams;
bindTools(tools: (Record<string, unknown> | StructuredToolInterface)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
bindTools(tools: (Record<string, unknown> | StructuredToolInterface | RunnableToolLike)[], kwargs?: Partial<CallOptions>): Runnable<BaseLanguageModelInput, AIMessageChunk, CallOptions>;
/**
* Get the parameters used to invoke the model
*/
invocationParams(options?: this["ParsedCallOptions"]): Omit<OpenAIClient.Chat.ChatCompletionCreateParams, "messages">;
invocationParams(options?: this["ParsedCallOptions"], extra?: {
streaming?: boolean;
}): Omit<OpenAIClient.Chat.ChatCompletionCreateParams, "messages">;
/** @ignore */

@@ -132,0 +134,0 @@ _identifyingParams(): Omit<OpenAIClient.Chat.ChatCompletionCreateParams, "messages"> & {

@@ -107,2 +107,3 @@ import { OpenAI as OpenAIClient } from "openai";

index: rawToolCall.index,
type: "tool_call_chunk",
});

@@ -514,3 +515,3 @@ }

*/
invocationParams(options) {
invocationParams(options, extra) {
function isStructuredToolArray(tools) {

@@ -524,3 +525,3 @@ return (tools !== undefined &&

}
else if (this.streamUsage && this.streaming) {
else if (this.streamUsage && (this.streaming || extra?.streaming)) {
streamOptionsConfig = { stream_options: { include_usage: true } };

@@ -568,3 +569,5 @@ }

const params = {
...this.invocationParams(options),
...this.invocationParams(options, {
streaming: true,
}),
messages: messagesMapped,

@@ -612,4 +615,3 @@ stream: true,

yield generationChunk;
// eslint-disable-next-line no-void
void runManager?.handleLLMNewToken(generationChunk.text ?? "", newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
await runManager?.handleLLMNewToken(generationChunk.text ?? "", newTokenIndices, undefined, undefined, undefined, { chunk: generationChunk });
}

@@ -616,0 +618,0 @@ if (usage) {

@@ -61,4 +61,8 @@ import { OpenAI as OpenAIClient } from "openai";

*/
responseFormat?: "url" | "b64_json";
dallEResponseFormat?: "url" | "b64_json";
/**
* @deprecated Use dallEResponseFormat instead for the Dall-E response type.
*/
responseFormat?: any;
/**
* A unique identifier representing your end-user, which will help

@@ -87,3 +91,3 @@ * OpenAI to monitor and detect abuse.

private size;
private responseFormat;
private dallEResponseFormat;
private user?;

@@ -90,0 +94,0 @@ constructor(fields?: DallEAPIWrapperParams);

@@ -0,1 +1,2 @@

/* eslint-disable no-param-reassign */
import { getEnvironmentVariable } from "@langchain/core/utils/env";

@@ -12,2 +13,9 @@ import { OpenAI as OpenAIClient } from "openai";

constructor(fields) {
// Shim for new base tool param name
if (fields?.responseFormat !== undefined &&
["url", "b64_json"].includes(fields.responseFormat)) {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
fields.dallEResponseFormat = fields.responseFormat;
fields.responseFormat = "content";
}
super(fields);

@@ -62,3 +70,3 @@ Object.defineProperty(this, "name", {

});
Object.defineProperty(this, "responseFormat", {
Object.defineProperty(this, "dallEResponseFormat", {
enumerable: true,

@@ -90,3 +98,4 @@ configurable: true,

this.size = fields?.size ?? this.size;
this.responseFormat = fields?.responseFormat ?? this.responseFormat;
this.dallEResponseFormat =
fields?.dallEResponseFormat ?? this.dallEResponseFormat;
this.user = fields?.user;

@@ -101,3 +110,3 @@ }

size: this.size,
response_format: this.responseFormat,
response_format: this.dallEResponseFormat,
style: this.style,

@@ -108,3 +117,3 @@ quality: this.quality,

let data = "";
if (this.responseFormat === "url") {
if (this.dallEResponseFormat === "url") {
[data] = response.data

@@ -111,0 +120,0 @@ .map((item) => item.url)

{
"name": "@langchain/openai",
"version": "0.2.1",
"version": "0.2.2",
"description": "OpenAI integrations for LangChain.js",

@@ -38,3 +38,3 @@ "type": "module",

"dependencies": {
"@langchain/core": ">=0.2.8 <0.3.0",
"@langchain/core": ">=0.2.16 <0.3.0",
"js-tiktoken": "^1.0.12",

@@ -41,0 +41,0 @@ "openai": "^4.49.1",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc