Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@langchain/openai

Package Overview
Dependencies
Maintainers
10
Versions
70
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.2.7 to 0.2.8

4

dist/azure/chat_models.d.ts

@@ -260,4 +260,4 @@ import { type ClientOptions } from "openai";

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats", { name: "Joke" });
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
* console.log(jokeResult);

@@ -264,0 +264,0 @@ * ```

@@ -259,4 +259,4 @@ import { AzureOpenAI as AzureOpenAIClient } from "openai";

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats", { name: "Joke" });
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
* console.log(jokeResult);

@@ -263,0 +263,0 @@ * ```

@@ -337,4 +337,4 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats", { name: "Joke" });
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
* console.log(jokeResult);

@@ -496,2 +496,47 @@ * ```

* <br />
*
* <details>
* <summary><strong>JSON Schema Structured Output</strong></summary>
*
* ```typescript
* const llmForJsonSchema = new ChatOpenAI({
* model: "gpt-4o-2024-08-06",
* }).withStructuredOutput(
* z.object({
* command: z.string().describe("The command to execute"),
* expectedOutput: z.string().describe("The expected output of the command"),
* options: z
* .array(z.string())
* .describe("The options you can pass to the command"),
* }),
* {
* method: "jsonSchema",
* strict: true, // Optional when using the `jsonSchema` method
* }
* );
*
* const jsonSchemaRes = await llmForJsonSchema.invoke(
* "What is the command to list files in a directory?"
* );
* console.log(jsonSchemaRes);
* ```
*
* ```txt
* {
* command: 'ls',
* expectedOutput: 'A list of files and subdirectories within the specified directory.',
* options: [
* '-a: include directory entries whose names begin with a dot (.).',
* '-l: use a long listing format.',
* '-h: with -l, print sizes in human readable format (e.g., 1K, 234M, 2G).',
* '-t: sort by time, newest first.',
* '-r: reverse order while sorting.',
* '-S: sort by file size, largest first.',
* '-R: list subdirectories recursively.'
* ]
* }
* ```
* </details>
*
* <br />
*/

@@ -498,0 +543,0 @@ export declare class ChatOpenAI<CallOptions extends ChatOpenAICallOptions = ChatOpenAICallOptions> extends BaseChatModel<CallOptions, AIMessageChunk> implements OpenAIChatInput, AzureOpenAIInput {

@@ -455,4 +455,4 @@ import { OpenAI as OpenAIClient } from "openai";

*
* const structuredLlm = llm.withStructuredOutput(Joke);
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats", { name: "Joke" });
* const structuredLlm = llm.withStructuredOutput(Joke, { name: "Joke" });
* const jokeResult = await structuredLlm.invoke("Tell me a joke about cats");
* console.log(jokeResult);

@@ -614,2 +614,47 @@ * ```

* <br />
*
* <details>
* <summary><strong>JSON Schema Structured Output</strong></summary>
*
* ```typescript
* const llmForJsonSchema = new ChatOpenAI({
* model: "gpt-4o-2024-08-06",
* }).withStructuredOutput(
* z.object({
* command: z.string().describe("The command to execute"),
* expectedOutput: z.string().describe("The expected output of the command"),
* options: z
* .array(z.string())
* .describe("The options you can pass to the command"),
* }),
* {
* method: "jsonSchema",
* strict: true, // Optional when using the `jsonSchema` method
* }
* );
*
* const jsonSchemaRes = await llmForJsonSchema.invoke(
* "What is the command to list files in a directory?"
* );
* console.log(jsonSchemaRes);
* ```
*
* ```txt
* {
* command: 'ls',
* expectedOutput: 'A list of files and subdirectories within the specified directory.',
* options: [
* '-a: include directory entries whose names begin with a dot (.).',
* '-l: use a long listing format.',
* '-h: with -l, print sizes in human readable format (e.g., 1K, 234M, 2G).',
* '-t: sort by time, newest first.',
* '-r: reverse order while sorting.',
* '-S: sort by file size, largest first.',
* '-R: list subdirectories recursively.'
* ]
* }
* ```
* </details>
*
* <br />
*/

@@ -1070,3 +1115,3 @@ export class ChatOpenAI extends BaseChatModel {

const generationInfo = { ...newTokenIndices };
if (choice.finish_reason !== undefined) {
if (choice.finish_reason != null) {
generationInfo.finish_reason = choice.finish_reason;

@@ -1073,0 +1118,0 @@ // Only include system fingerprint in the last chunk for now

{
"name": "@langchain/openai",
"version": "0.2.7",
"version": "0.2.8",
"description": "OpenAI integrations for LangChain.js",

@@ -18,3 +18,3 @@ "type": "module",

"build": "yarn turbo:command build:internal --filter=@langchain/openai",
"build:internal": "yarn lc_build_v2 --create-entrypoints --pre --tree-shaking",
"build:internal": "yarn lc_build --create-entrypoints --pre --tree-shaking",
"lint:eslint": "NODE_OPTIONS=--max-old-space-size=4096 eslint --cache --ext .ts,.js src/",

@@ -48,3 +48,3 @@ "lint:dpdm": "dpdm --exit-code circular:1 --no-warning --no-tree src/*.ts src/**/*.ts",

"@jest/globals": "^29.5.0",
"@langchain/scripts": "^0.0.21",
"@langchain/scripts": ">=0.1.0 <0.2.0",
"@langchain/standard-tests": "0.0.0",

@@ -51,0 +51,0 @@ "@swc/core": "^1.3.90",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc