New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
10
Versions
80
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.4.0 to 0.4.1

15

dist/azure/chat_models.d.ts
import { type ClientOptions } from "openai";
import { LangSmithParams, type BaseChatModelParams } from "@langchain/core/language_models/chat_models";
import { ChatOpenAI } from "../chat_models.js";
import { BaseLanguageModelInput } from "@langchain/core/language_models/base";
import { BaseMessage } from "@langchain/core/messages";
import { Runnable } from "@langchain/core/runnables";
import { z } from "zod";
import { ChatOpenAI, ChatOpenAIStructuredOutputMethodOptions } from "../chat_models.js";
import { AzureOpenAIInput, OpenAIChatInput, OpenAICoreRequestOptions } from "../types.js";

@@ -442,2 +446,11 @@ export type { AzureOpenAIInput };

toJSON(): any;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<false>): Runnable<BaseLanguageModelInput, RunOutput>;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<true>): Runnable<BaseLanguageModelInput, {
raw: BaseMessage;
parsed: RunOutput;
}>;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
raw: BaseMessage;
parsed: RunOutput;
}>;
}
import { AzureOpenAI as AzureOpenAIClient } from "openai";
import { getEnvironmentVariable } from "@langchain/core/utils/env";
import { ChatOpenAI } from "../chat_models.js";
import { ChatOpenAI, } from "../chat_models.js";
import { getEndpoint } from "../utils/azure.js";

@@ -616,2 +616,12 @@ /**

}
withStructuredOutput(outputSchema, config) {
const ensuredConfig = { ...config };
// Not all Azure gpt-4o deployments models support jsonSchema yet
if (this.model.startsWith("gpt-4o")) {
if (ensuredConfig?.method === undefined) {
ensuredConfig.method = "functionCalling";
}
}
return super.withStructuredOutput(outputSchema, ensuredConfig);
}
}

@@ -752,2 +752,6 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

}>;
withStructuredOutput<RunOutput extends Record<string, any> = Record<string, any>>(outputSchema: z.ZodType<RunOutput> | Record<string, any>, config?: ChatOpenAIStructuredOutputMethodOptions<boolean>): Runnable<BaseLanguageModelInput, RunOutput> | Runnable<BaseLanguageModelInput, {
raw: BaseMessage;
parsed: RunOutput;
}>;
}

2

package.json
{
"name": "@langchain/openai",
"version": "0.4.0",
"version": "0.4.1",
"description": "OpenAI integrations for LangChain.js",

@@ -5,0 +5,0 @@ "type": "module",

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc