Socket
Socket
Sign inDemoInstall

langsmith

Package Overview
Dependencies
Maintainers
5
Versions
151
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langsmith - npm Package Compare versions

Comparing version 0.1.62 to 0.1.63

2

dist/index.d.ts

@@ -5,2 +5,2 @@ export { Client, type ClientConfig } from "./client.js";

export { overrideFetchImplementation } from "./singletons/fetch.js";
export declare const __version__ = "0.1.62";
export declare const __version__ = "0.1.63";

@@ -5,2 +5,2 @@ export { Client } from "./client.js";

// Update using yarn bump-version
export const __version__ = "0.1.62";
export const __version__ = "0.1.63";

@@ -5,2 +5,9 @@ import { OpenAI } from "openai";

type OpenAIType = {
beta?: {
chat?: {
completions?: {
parse?: (...args: any[]) => any;
};
};
};
chat: {

@@ -7,0 +14,0 @@ completions: {

@@ -142,49 +142,99 @@ import { isTraceableFunction, traceable } from "../traceable.js";

}
openai.chat.completions.create = traceable(openai.chat.completions.create.bind(openai.chat.completions), {
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload;
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;
return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
// Some internal OpenAI methods call each other, so we need to preserve original
// OpenAI methods.
const tracedOpenAIClient = { ...openai };
if (openai.beta &&
openai.beta.chat &&
openai.beta.chat.completions &&
typeof openai.beta.chat.completions.parse === "function") {
tracedOpenAIClient.beta = {
...openai.beta,
chat: {
...openai.beta.chat,
completions: {
...openai.beta.chat.completions,
parse: traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload;
const ls_stop = (typeof params.stop === "string"
? [params.stop]
: params.stop) ?? undefined;
return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
}),
},
},
};
}
tracedOpenAIClient.chat = {
...openai.chat,
completions: {
...openai.chat.completions,
create: traceable(openai.chat.completions.create.bind(openai.chat.completions), {
name: "ChatOpenAI",
run_type: "llm",
aggregator: chatAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload;
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;
return {
ls_provider: "openai",
ls_model_type: "chat",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
}),
},
...options,
});
openai.completions.create = traceable(openai.completions.create.bind(openai.completions), {
name: "OpenAI",
run_type: "llm",
aggregator: textAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload;
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;
return {
ls_provider: "openai",
ls_model_type: "llm",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
});
return openai;
};
tracedOpenAIClient.completions = {
...openai.completions,
create: traceable(openai.completions.create.bind(openai.completions), {
name: "OpenAI",
run_type: "llm",
aggregator: textAggregator,
argsConfigPath: [1, "langsmithExtra"],
getInvocationParams: (payload) => {
if (typeof payload !== "object" || payload == null)
return undefined;
// we can safely do so, as the types are not exported in TSC
const params = payload;
const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
undefined;
return {
ls_provider: "openai",
ls_model_type: "llm",
ls_model_name: params.model,
ls_max_tokens: params.max_tokens ?? undefined,
ls_temperature: params.temperature ?? undefined,
ls_stop,
};
},
...options,
}),
};
return tracedOpenAIClient;
};
{
"name": "langsmith",
"version": "0.1.62",
"version": "0.1.63",
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",

@@ -130,3 +130,3 @@ "packageManager": "yarn@1.22.19",

"langchain": "^0.3.2",
"openai": "^4.38.5",
"openai": "^4.67.3",
"prettier": "^2.8.8",

@@ -133,0 +133,0 @@ "ts-jest": "^29.1.0",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc