Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
12
Versions
152
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
1.2.3-dev-1768440391024
to
1.2.3
+10
-0
CHANGELOG.md
# @langchain/openai
## 1.2.3
### Patch Changes
- [#9679](https://github.com/langchain-ai/langchainjs/pull/9679) [`a7c6ec5`](https://github.com/langchain-ai/langchainjs/commit/a7c6ec51ab9baa186ab5ebf815599c08f5c7e8ab) Thanks [@christian-bromann](https://github.com/christian-bromann)! - feat(openai): elevate OpenAI image generation outputs to proper image content blocks
- [#9810](https://github.com/langchain-ai/langchainjs/pull/9810) [`04923f9`](https://github.com/langchain-ai/langchainjs/commit/04923f9835e5b3677c180b601ae8f3e7d8be0236) Thanks [@christian-bromann](https://github.com/christian-bromann)! - Cb/OpenAI reasoning fix
- [#9827](https://github.com/langchain-ai/langchainjs/pull/9827) [`e16c218`](https://github.com/langchain-ai/langchainjs/commit/e16c218b81980a1c576af5192342019975bb95b9) Thanks [@sanjaiyan-dev](https://github.com/sanjaiyan-dev)! - optimize stream chunk aggregation and remove redundant sorting
## 1.2.2

@@ -4,0 +14,0 @@

+1
-2

@@ -340,4 +340,3 @@ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');

const countPerMessage = await Promise.all(messages.map(async (message) => {
const textCount = await this.getNumTokens(message.content);
const roleCount = await this.getNumTokens(require_misc.messageToOpenAIRole(message));
const [textCount, roleCount] = await Promise.all([this.getNumTokens(message.content), this.getNumTokens(require_misc.messageToOpenAIRole(message))]);
const nameCount = message.name !== void 0 ? tokensPerName + await this.getNumTokens(message.name) : 0;

@@ -344,0 +343,0 @@ let count = textCount + tokensPerMessage + roleCount + nameCount;

@@ -339,4 +339,3 @@ import { wrapOpenAIClientError } from "../utils/client.js";

const countPerMessage = await Promise.all(messages.map(async (message) => {
const textCount = await this.getNumTokens(message.content);
const roleCount = await this.getNumTokens(messageToOpenAIRole(message));
const [textCount, roleCount] = await Promise.all([this.getNumTokens(message.content), this.getNumTokens(messageToOpenAIRole(message))]);
const nameCount = message.name !== void 0 ? tokensPerName + await this.getNumTokens(message.name) : 0;

@@ -343,0 +342,0 @@ let count = textCount + tokensPerMessage + roleCount + nameCount;

@@ -12,2 +12,49 @@ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');

/**
* Converts an OpenAI annotation to a LangChain Citation or BaseContentBlock.
*
* OpenAI has several annotation types:
* - `url_citation`: Web citations with url, title, start_index, end_index
* - `file_citation`: File citations with file_id, filename, index
* - `container_file_citation`: Container file citations with container_id, file_id, filename, start_index, end_index
* - `file_path`: File paths with file_id, index
*
* This function maps them to LangChain's Citation format or preserves them as non-standard blocks.
*/
function convertOpenAIAnnotationToLangChain(annotation) {
if (annotation.type === "url_citation") return {
type: "citation",
source: "url_citation",
url: annotation.url,
title: annotation.title,
startIndex: annotation.start_index,
endIndex: annotation.end_index
};
if (annotation.type === "file_citation") return {
type: "citation",
source: "file_citation",
title: annotation.filename,
startIndex: annotation.index,
file_id: annotation.file_id
};
if (annotation.type === "container_file_citation") return {
type: "citation",
source: "container_file_citation",
title: annotation.filename,
startIndex: annotation.start_index,
endIndex: annotation.end_index,
file_id: annotation.file_id,
container_id: annotation.container_id
};
if (annotation.type === "file_path") return {
type: "citation",
source: "file_path",
startIndex: annotation.index,
file_id: annotation.file_id
};
return {
type: "non_standard",
value: annotation
};
}
/**
* Converts OpenAI Responses API usage statistics to LangChain's UsageMetadata format.

@@ -154,3 +201,3 @@ *

text: part.text,
annotations: part.annotations
annotations: part.annotations.map(convertOpenAIAnnotationToLangChain)
};

@@ -196,2 +243,12 @@ }

else invalid_tool_calls.push((0, __langchain_core_output_parsers_openai_tools.makeInvalidToolCall)(item, "Malformed computer call"));
} else if (item.type === "image_generation_call") {
if (item.result) content.push({
type: "image",
mimeType: "image/png",
data: item.result,
id: item.id,
metadata: { status: item.status }
});
additional_kwargs.tool_outputs ??= [];
additional_kwargs.tool_outputs.push(item);
} else {

@@ -353,3 +410,3 @@ additional_kwargs.tool_outputs ??= [];

text: "",
annotations: [event.annotation],
annotations: [convertOpenAIAnnotationToLangChain(event.annotation)],
index: event.content_index

@@ -376,2 +433,11 @@ });

additional_kwargs.tool_outputs = [event.item];
} else if (event.type === "response.output_item.done" && event.item.type === "image_generation_call") {
if (event.item.result) content.push({
type: "image",
mimeType: "image/png",
data: event.item.result,
id: event.item.id,
metadata: { status: event.item.status }
});
additional_kwargs.tool_outputs = [event.item];
} else if (event.type === "response.output_item.done" && [

@@ -384,3 +450,2 @@ "web_search_call",

"mcp_approval_request",
"image_generation_call",
"custom_tool_call"

@@ -387,0 +452,0 @@ ].includes(event.item.type)) additional_kwargs.tool_outputs = [event.item];

@@ -11,2 +11,49 @@ import { isComputerToolCall, isCustomToolCall, parseComputerCall, parseCustomToolCall } from "../utils/tools.js";

/**
* Converts an OpenAI annotation to a LangChain Citation or BaseContentBlock.
*
* OpenAI has several annotation types:
* - `url_citation`: Web citations with url, title, start_index, end_index
* - `file_citation`: File citations with file_id, filename, index
* - `container_file_citation`: Container file citations with container_id, file_id, filename, start_index, end_index
* - `file_path`: File paths with file_id, index
*
* This function maps them to LangChain's Citation format or preserves them as non-standard blocks.
*/
function convertOpenAIAnnotationToLangChain(annotation) {
if (annotation.type === "url_citation") return {
type: "citation",
source: "url_citation",
url: annotation.url,
title: annotation.title,
startIndex: annotation.start_index,
endIndex: annotation.end_index
};
if (annotation.type === "file_citation") return {
type: "citation",
source: "file_citation",
title: annotation.filename,
startIndex: annotation.index,
file_id: annotation.file_id
};
if (annotation.type === "container_file_citation") return {
type: "citation",
source: "container_file_citation",
title: annotation.filename,
startIndex: annotation.start_index,
endIndex: annotation.end_index,
file_id: annotation.file_id,
container_id: annotation.container_id
};
if (annotation.type === "file_path") return {
type: "citation",
source: "file_path",
startIndex: annotation.index,
file_id: annotation.file_id
};
return {
type: "non_standard",
value: annotation
};
}
/**
* Converts OpenAI Responses API usage statistics to LangChain's UsageMetadata format.

@@ -153,3 +200,3 @@ *

text: part.text,
annotations: part.annotations
annotations: part.annotations.map(convertOpenAIAnnotationToLangChain)
};

@@ -195,2 +242,12 @@ }

else invalid_tool_calls.push(makeInvalidToolCall(item, "Malformed computer call"));
} else if (item.type === "image_generation_call") {
if (item.result) content.push({
type: "image",
mimeType: "image/png",
data: item.result,
id: item.id,
metadata: { status: item.status }
});
additional_kwargs.tool_outputs ??= [];
additional_kwargs.tool_outputs.push(item);
} else {

@@ -352,3 +409,3 @@ additional_kwargs.tool_outputs ??= [];

text: "",
annotations: [event.annotation],
annotations: [convertOpenAIAnnotationToLangChain(event.annotation)],
index: event.content_index

@@ -375,2 +432,11 @@ });

additional_kwargs.tool_outputs = [event.item];
} else if (event.type === "response.output_item.done" && event.item.type === "image_generation_call") {
if (event.item.result) content.push({
type: "image",
mimeType: "image/png",
data: event.item.result,
id: event.item.id,
metadata: { status: event.item.status }
});
additional_kwargs.tool_outputs = [event.item];
} else if (event.type === "response.output_item.done" && [

@@ -383,3 +449,2 @@ "web_search_call",

"mcp_approval_request",
"image_generation_call",
"custom_tool_call"

@@ -386,0 +451,0 @@ ].includes(event.item.type)) additional_kwargs.tool_outputs = [event.item];

@@ -50,3 +50,3 @@ import { OpenAI as OpenAI$1 } from "openai";

*/
model?: "gpt-image-1" | "gpt-image-1-mini";
model?: "gpt-image-1" | "gpt-image-1-mini" | "gpt-image-1.5";
/**

@@ -53,0 +53,0 @@ * Moderation level for the generated image.

@@ -50,3 +50,3 @@ import { OpenAI as OpenAI$1 } from "openai";

*/
model?: "gpt-image-1" | "gpt-image-1-mini";
model?: "gpt-image-1" | "gpt-image-1-mini" | "gpt-image-1.5";
/**

@@ -53,0 +53,0 @@ * Moderation level for the generated image.

{
"name": "@langchain/openai",
"version": "1.2.3-dev-1768440391024",
"version": "1.2.3",
"description": "OpenAI integrations for LangChain.js",

@@ -18,3 +18,3 @@ "author": "LangChain",

"js-tiktoken": "^1.0.12",
"openai": "^6.10.0",
"openai": "^6.16.0",
"zod": "^3.25.76 || ^4"

@@ -40,5 +40,5 @@ },

"zod-to-json-schema": "^3.24.6",
"@langchain/core": "1.1.15",
"@langchain/core": "1.1.16",
"@langchain/eslint": "0.1.1",
"@langchain/standard-tests": "0.0.18",
"@langchain/standard-tests": "0.0.19",
"@langchain/tsconfig": "0.0.1"

@@ -45,0 +45,0 @@ },

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet