@langchain/openai
Advanced tools
Comparing version 0.4.0 to 0.4.1
import { type ClientOptions } from "openai"; | ||
import { LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models"; | ||
import { ChatOpenAI } from "../chat_models.js"; | ||
import { BaseLanguageModelInput } from "@langchain/core/language_models/base"; | ||
import { BaseMessage } from "@langchain/core/messages"; | ||
import { Runnable } from "@langchain/core/runnables"; | ||
import { z } from "zod"; | ||
import { ChatOpenAI, ChatOpenAIStructuredOutputMethodOptions } from "../chat_models.js"; | ||
import { AzureOpenAIInput, OpenAIChatInput, OpenAICoreRequestOptions } from "../types.js"; | ||
@@ -442,2 +446,11 @@ export type { AzureOpenAIInput }; | ||
toJSON(): any; | ||
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>; | ||
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, { | ||
raw: BaseMessage; | ||
parsed: RunOutput; | ||
}>; | ||
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, { | ||
raw: BaseMessage; | ||
parsed: RunOutput; | ||
}>; | ||
} |
import { AzureOpenAI as AzureOpenAIClient } from "openai"; | ||
import { getEnvironmentVariable } from "@langchain/core/utils/env"; | ||
import { ChatOpenAI } from "../chat_models.js"; | ||
import { ChatOpenAI, } from "../chat_models.js"; | ||
import { getEndpoint } from "../utils/azure.js"; | ||
@@ -616,2 +616,12 @@ /** | ||
} | ||
withStructuredOutput(outputSchema, config) { | ||
const ensuredConfig = { ...config }; | ||
// Not all Azure gpt-4o deployments models support jsonSchema yet | ||
if (this.model.startsWith("gpt-4o")) { | ||
if (ensuredConfig?.method === undefined) { | ||
ensuredConfig.method = "functionCalling"; | ||
} | ||
} | ||
return super.withStructuredOutput(outputSchema, ensuredConfig); | ||
} | ||
} |
@@ -752,2 +752,6 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai"; | ||
}>; | ||
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, { | ||
raw: BaseMessage; | ||
parsed: RunOutput; | ||
}>; | ||
} |
{ | ||
"name": "@langchain/openai", | ||
"version": "0.4.0", | ||
"version": "0.4.1", | ||
"description": "OpenAI integrations for LangChain.js", | ||
@@ -5,0 +5,0 @@ "type": "module", |
Sorry, the diff of this file is not supported yet
346067
9623