Socket
Socket
Sign inDemoInstall

@langchain/openai

Package Overview
Dependencies
Maintainers
5
Versions
69
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@langchain/openai - npm Package Compare versions

Comparing version 0.0.19 to 0.0.20

14

dist/chat_models.js

@@ -469,6 +469,14 @@ import { OpenAI as OpenAIClient } from "openai";

}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const generationInfo = { ...newTokenIndices };
if (choice.finish_reason !== undefined) {
generationInfo.finish_reason = choice.finish_reason;
}
if (this.logprobs) {
generationInfo.logprobs = choice.logprobs;
}
const generationChunk = new ChatGenerationChunk({
message: chunk,
text: chunk.content,
generationInfo: newTokenIndices,
generationInfo,
});

@@ -499,2 +507,6 @@ yield generationChunk;

for await (const chunk of stream) {
chunk.message.response_metadata = {
...chunk.generationInfo,
...chunk.message.response_metadata,
};
const index = chunk.generationInfo?.completion ?? 0;

@@ -501,0 +513,0 @@ if (finalChunks[index] === undefined) {

2

dist/legacy.d.ts

@@ -50,3 +50,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

*
* const res = await model.call(
* const res = await model.invoke(
* "What would be a good company name for a company that makes colorful socks?"

@@ -53,0 +53,0 @@ * );

@@ -42,3 +42,3 @@ import { OpenAI as OpenAIClient } from "openai";

*
* const res = await model.call(
* const res = await model.invoke(
* "What would be a good company name for a company that makes colorful socks?"

@@ -45,0 +45,0 @@ * );

@@ -35,3 +35,3 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai";

*
* const res = await model.call(
* const res = await model.invoke(
* "Question: What would be a good company name for a company that makes colorful socks?\nAnswer:"

@@ -38,0 +38,0 @@ * );

@@ -37,3 +37,3 @@ import { OpenAI as OpenAIClient } from "openai";

*
* const res = await model.call(
* const res = await model.invoke(
* "Question: What would be a good company name for a company that makes colorful socks?\nAnswer:"

@@ -40,0 +40,0 @@ * );

@@ -65,2 +65,13 @@ import { test, expect, jest } from "@jest/globals";

});
test("Test ChatOpenAI streaming logprobs", async () => {
const model = new ChatOpenAI({
maxTokens: 50,
modelName: "gpt-3.5-turbo",
streaming: true,
logprobs: true,
});
const res = await model.invoke("Print hello world.");
console.log(res.response_metadata.logprobs.content);
expect(res.response_metadata.logprobs.content.length).toBeGreaterThan(0);
});
test("Test ChatOpenAI tool calling with ToolMessages", async () => {

@@ -67,0 +78,0 @@ function getCurrentWeather(location) {

@@ -6,3 +6,3 @@ import { expect, test } from "@jest/globals";

const model = new OpenAIChat({ modelName: "gpt-3.5-turbo", maxTokens: 10 });
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -18,3 +18,3 @@ });

});
const res = await model.call("What is my name");
const res = await model.invoke("What is my name");
console.log({ res });

@@ -36,3 +36,3 @@ });

});
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -49,3 +49,3 @@ expect(nrNewTokens > 0).toBe(true);

const model = new OpenAIChat({ maxTokens: 5 });
const res = await model.call("Print hello world", { stop: ["world"] });
const res = await model.invoke("Print hello world", { stop: ["world"] });
console.log({ res });

@@ -55,3 +55,3 @@ });

const model = new OpenAIChat({ maxTokens: 5 });
await expect(() => model.call("Print hello world", {
await expect(() => model.invoke("Print hello world", {
timeout: 10,

@@ -62,3 +62,3 @@ })).rejects.toThrow();

const model = new OpenAIChat({ maxTokens: 5 });
await expect(() => model.call("Print hello world", {
await expect(() => model.invoke("Print hello world", {
timeout: 10,

@@ -71,3 +71,3 @@ })).rejects.toThrow();

await expect(() => {
const ret = model.call("Print hello world", {
const ret = model.invoke("Print hello world", {
signal: controller.signal,

@@ -83,3 +83,3 @@ });

await expect(() => {
const ret = model.call("Print hello world", {
const ret = model.invoke("Print hello world", {
signal: controller.signal,

@@ -86,0 +86,0 @@ });

@@ -11,3 +11,3 @@ import { test, expect } from "@jest/globals";

});
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -28,3 +28,3 @@ });

});
const res = await model.call("Print hello world", { stop: ["world"] });
const res = await model.invoke("Print hello world", { stop: ["world"] });
console.log({ res });

@@ -38,3 +38,3 @@ });

});
await expect(() => model.call("Print hello world", {
await expect(() => model.invoke("Print hello world", {
timeout: 10,

@@ -49,3 +49,3 @@ })).rejects.toThrow();

});
await expect(() => model.call("Print hello world", {
await expect(() => model.invoke("Print hello world", {
timeout: 10,

@@ -61,3 +61,3 @@ })).rejects.toThrow();

await expect(() => {
const ret = model.call("Print hello world", {
const ret = model.invoke("Print hello world", {
signal: controller.signal,

@@ -76,3 +76,3 @@ });

await expect(() => {
const ret = model.call("Print hello world", {
const ret = model.invoke("Print hello world", {
signal: controller.signal,

@@ -91,4 +91,4 @@ });

const res = await Promise.all([
model.call("Print hello world"),
model.call("Print hello world"),
model.invoke("Print hello world"),
model.invoke("Print hello world"),
]);

@@ -108,3 +108,3 @@ console.log({ res });

expect(model).toBeInstanceOf(OpenAIChat);
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -116,3 +116,3 @@ expect(typeof res).toBe("string");

expect(model).toBeInstanceOf(OpenAI);
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -124,3 +124,3 @@ expect(typeof res).toBe("string");

expect(model).toBeInstanceOf(OpenAI);
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -144,3 +144,3 @@ expect(typeof res).toBe("string");

});
const res = await model.call("Hello");
const res = await model.invoke("Hello");
console.log({ res });

@@ -163,3 +163,3 @@ expect(tokenUsage.promptTokens).toBe(1);

});
const res = await model.call("Print hello world");
const res = await model.invoke("Print hello world");
console.log({ res });

@@ -166,0 +166,0 @@ expect(nrNewTokens > 0).toBe(true);

{
"name": "@langchain/openai",
"version": "0.0.19",
"version": "0.0.20",
"description": "OpenAI integrations for LangChain.js",

@@ -42,3 +42,3 @@ "type": "module",

"dependencies": {
"@langchain/core": "~0.1.44",
"@langchain/core": "~0.1.45",
"js-tiktoken": "^1.0.7",

@@ -45,0 +45,0 @@ "openai": "^4.26.0",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc