Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
12
Versions
150
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version
1.2.6
to
1.2.7
+8
-0
CHANGELOG.md
# @langchain/openai
## 1.2.7
### Patch Changes
- [#9954](https://github.com/langchain-ai/langchainjs/pull/9954) [`6939dab`](https://github.com/langchain-ai/langchainjs/commit/6939dabc8dc6481942e7e2c19e3dc61bc374d65a) Thanks [@akintunero](https://github.com/akintunero)! - fix(openai): store response.output in response_metadata for reasoning model round-trips
- [#9898](https://github.com/langchain-ai/langchainjs/pull/9898) [`ad581c7`](https://github.com/langchain-ai/langchainjs/commit/ad581c76138ea12ebdaee444c0dcdc4f6a280624) Thanks [@Muhammad-Kamran-Khan](https://github.com/Muhammad-Kamran-Khan)! - fix(openai): pass service_tier to API when using Responses API
## 1.2.6

@@ -4,0 +12,0 @@

+1
-0

@@ -26,2 +26,3 @@ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');

user: this.user,
service_tier: this.service_tier,
stream: this.streaming,

@@ -28,0 +29,0 @@ previous_response_id: options?.previous_response_id,

@@ -25,2 +25,3 @@ import { wrapOpenAIClientError } from "../utils/client.js";

user: this.user,
service_tier: this.service_tier,
stream: this.streaming,

@@ -27,0 +28,0 @@ previous_response_id: options?.previous_response_id,

@@ -219,2 +219,10 @@ const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');

const invalid_tool_calls = [];
const cleanedOutput = response.output.map((item) => {
if (item.type === "function_call" && "parsed_arguments" in item) {
const cleaned = { ...item };
delete cleaned.parsed_arguments;
return cleaned;
}
return item;
});
const response_metadata = {

@@ -228,2 +236,3 @@ model_provider: "openai",

object: response.object,
output: cleanedOutput,
status: response.status,

@@ -500,3 +509,7 @@ user: response.user,

if (event.response.text?.format?.type === "json_schema") additional_kwargs.parsed ??= JSON.parse(msg.text);
for (const [key, value] of Object.entries(event.response)) if (key !== "id") response_metadata[key] = value;
for (const [key, value] of Object.entries(event.response)) {
if (key === "id") continue;
if (key === "output") response_metadata[key] = msg.response_metadata.output;
else response_metadata[key] = value;
}
} else if (event.type === "response.function_call_arguments.delta" || event.type === "response.custom_tool_call_input.delta") tool_call_chunks.push({

@@ -503,0 +516,0 @@ type: "tool_call_chunk",

@@ -218,2 +218,10 @@ import { isComputerToolCall, isCustomToolCall, parseComputerCall, parseCustomToolCall } from "../utils/tools.js";

const invalid_tool_calls = [];
const cleanedOutput = response.output.map((item) => {
if (item.type === "function_call" && "parsed_arguments" in item) {
const cleaned = { ...item };
delete cleaned.parsed_arguments;
return cleaned;
}
return item;
});
const response_metadata = {

@@ -227,2 +235,3 @@ model_provider: "openai",

object: response.object,
output: cleanedOutput,
status: response.status,

@@ -499,3 +508,7 @@ user: response.user,

if (event.response.text?.format?.type === "json_schema") additional_kwargs.parsed ??= JSON.parse(msg.text);
for (const [key, value] of Object.entries(event.response)) if (key !== "id") response_metadata[key] = value;
for (const [key, value] of Object.entries(event.response)) {
if (key === "id") continue;
if (key === "output") response_metadata[key] = msg.response_metadata.output;
else response_metadata[key] = value;
}
} else if (event.type === "response.function_call_arguments.delta" || event.type === "response.custom_tool_call_input.delta") tool_call_chunks.push({

@@ -502,0 +515,0 @@ type: "tool_call_chunk",

+2
-2
{
"name": "@langchain/openai",
"version": "1.2.6",
"version": "1.2.7",
"description": "OpenAI integrations for LangChain.js",

@@ -39,3 +39,3 @@ "author": "LangChain",

"zod-to-json-schema": "^3.24.6",
"@langchain/core": "1.1.20",
"@langchain/core": "^1.1.21",
"@langchain/eslint": "0.1.1",

@@ -42,0 +42,0 @@ "@langchain/standard-tests": "0.0.23",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet