@langchain/openai
Advanced tools
Comparing version 0.2.10 to 0.2.11
import { OpenAI as OpenAIClient } from "openai"; | ||
import { AIMessage, AIMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk, HumanMessageChunk, SystemMessageChunk, ToolMessageChunk, isAIMessage, } from "@langchain/core/messages"; | ||
import { AIMessage, AIMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessageChunk, HumanMessageChunk, SystemMessageChunk, ToolMessageChunk, isAIMessage, convertToChunk, } from "@langchain/core/messages"; | ||
import { ChatGenerationChunk, } from "@langchain/core/outputs"; | ||
@@ -1065,2 +1065,12 @@ import { getEnvironmentVariable } from "@langchain/core/utils/env"; | ||
async *_streamResponseChunks(messages, options, runManager) { | ||
if (this.model.includes("o1-")) { | ||
console.warn("[WARNING]: OpenAI o1 models do not yet support token-level streaming. Streaming will yield single chunk."); | ||
const result = await this._generate(messages, options, runManager); | ||
const messageChunk = convertToChunk(result.generations[0].message); | ||
yield new ChatGenerationChunk({ | ||
message: messageChunk, | ||
text: typeof messageChunk.content === "string" ? messageChunk.content : "", | ||
}); | ||
return; | ||
} | ||
const messagesMapped = convertMessagesToOpenAIParams(messages); | ||
@@ -1067,0 +1077,0 @@ const params = { |
{ | ||
"name": "@langchain/openai", | ||
"version": "0.2.10", | ||
"version": "0.2.11", | ||
"description": "OpenAI integrations for LangChain.js", | ||
@@ -5,0 +5,0 @@ "type": "module", |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
360454
10004