🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
Book a DemoInstallSign in
Socket

langsmith

Package Overview
Dependencies
Maintainers
5
Versions
284
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

langsmith - npm Package Compare versions

Comparing version

to
0.1.66

2

dist/index.d.ts

@@ -5,2 +5,2 @@ export { Client, type ClientConfig } from "./client.js";

export { overrideFetchImplementation } from "./singletons/fetch.js";
export declare const __version__ = "0.1.65";
export declare const __version__ = "0.1.66";

@@ -5,2 +5,2 @@ export { Client } from "./client.js";

// Update using yarn bump-version
export const __version__ = "0.1.65";
export const __version__ = "0.1.66";

@@ -394,2 +394,73 @@ export interface TracerSession {

}
/**
* Breakdown of input token counts.
*
* Does not *need* to sum to full input token count. Does *not* need to have all keys.
*/
export type InputTokenDetails = {
/**
* Audio input tokens.
*/
audio?: number;
/**
* Input tokens that were cached and there was a cache hit.
*
* Since there was a cache hit, the tokens were read from the cache.
* More precisely, the model state given these tokens was read from the cache.
*/
cache_read?: number;
/**
* Input tokens that were cached and there was a cache miss.
*
* Since there was a cache miss, the cache was created from these tokens.
*/
cache_creation?: number;
};
/**
* Breakdown of output token counts.
*
* Does *not* need to sum to full output token count. Does *not* need to have all keys.
*/
export type OutputTokenDetails = {
/**
* Audio output tokens
*/
audio?: number;
/**
* Reasoning output tokens.
*
* Tokens generated by the model in a chain of thought process (i.e. by
* OpenAI's o1 models) that are not returned as part of model output.
*/
reasoning?: number;
};
/**
* Usage metadata for a message, such as token counts.
*/
export type UsageMetadata = {
/**
* Count of input (or prompt) tokens. Sum of all input token types.
*/
input_tokens: number;
/**
* Count of output (or completion) tokens. Sum of all output token types.
*/
output_tokens: number;
/**
* Total token count. Sum of input_tokens + output_tokens.
*/
total_tokens: number;
/**
* Breakdown of input token counts.
*
* Does *not* need to sum to full input token count. Does *not* need to have all keys.
*/
input_token_details?: InputTokenDetails;
/**
* Breakdown of output token counts.
*
* Does *not* need to sum to full output token count. Does *not* need to have all keys.
*/
output_token_details?: OutputTokenDetails;
};
export {};
import { RunTreeConfig } from "./run_trees.js";
import { InvocationParamsSchema } from "./schemas.js";
import { InvocationParamsSchema, KVMap } from "./schemas.js";
import { TraceableFunction } from "./singletons/types.js";

@@ -31,4 +31,22 @@ /**

getInvocationParams?: (...args: Parameters<Func>) => InvocationParamsSchema | undefined;
/**
* Apply transformations to the inputs before logging.
* This function should NOT mutate the inputs.
* `processInputs` is not inherited by nested traceable functions.
*
* @param inputs Key-value map of the function inputs.
* @returns Transformed key-value map
*/
processInputs?: (inputs: Readonly<KVMap>) => KVMap;
/**
* Apply transformations to the outputs before logging.
* This function should NOT mutate the outputs.
* `processOutputs` is not inherited by nested traceable functions.
*
* @param outputs Key-value map of the function outputs
* @returns Transformed key-value map
*/
processOutputs?: (outputs: Readonly<KVMap>) => KVMap;
}): TraceableFunction<Func>;
export { getCurrentRunTree, isTraceableFunction, withRunTree, ROOT, } from "./singletons/traceable.js";
export type { RunTreeLike, TraceableFunction } from "./singletons/types.js";

@@ -8,26 +8,46 @@ import { AsyncLocalStorage } from "node:async_hooks";

AsyncLocalStorageProviderSingleton.initializeGlobalInstance(new AsyncLocalStorage());
const handleRunInputs = (rawInputs) => {
const handleRunInputs = (rawInputs, processInputs) => {
const firstInput = rawInputs[0];
let inputs;
if (firstInput == null) {
return {};
inputs = {};
}
if (rawInputs.length > 1) {
return { args: rawInputs };
else if (rawInputs.length > 1) {
inputs = { args: rawInputs };
}
if (isKVMap(firstInput)) {
return firstInput;
else if (isKVMap(firstInput)) {
inputs = firstInput;
}
return { input: firstInput };
else {
inputs = { input: firstInput };
}
try {
return processInputs(inputs);
}
catch (e) {
console.error("Error occurred during processInputs. Sending raw inputs:", e);
return inputs;
}
};
const handleRunOutputs = (rawOutputs) => {
const handleRunOutputs = (rawOutputs, processOutputs) => {
let outputs;
if (isKVMap(rawOutputs)) {
return rawOutputs;
outputs = rawOutputs;
}
return { outputs: rawOutputs };
else {
outputs = { outputs: rawOutputs };
}
try {
return processOutputs(outputs);
}
catch (e) {
console.error("Error occurred during processOutputs. Sending raw outputs:", e);
return outputs;
}
};
const getTracingRunTree = (runTree, inputs, getInvocationParams) => {
const getTracingRunTree = (runTree, inputs, getInvocationParams, processInputs) => {
if (!isTracingEnabled(runTree.tracingEnabled)) {
return undefined;
}
runTree.inputs = handleRunInputs(inputs);
runTree.inputs = handleRunInputs(inputs, processInputs);
const invocationParams = getInvocationParams?.(...inputs);

@@ -194,3 +214,5 @@ if (invocationParams != null) {

export function traceable(wrappedFunc, config) {
const { aggregator, argsConfigPath, __finalTracedIteratorKey, ...runTreeConfig } = config ?? {};
const { aggregator, argsConfigPath, __finalTracedIteratorKey, processInputs, processOutputs, ...runTreeConfig } = config ?? {};
const processInputsFn = processInputs ?? ((x) => x);
const processOutputsFn = processOutputs ?? ((x) => x);
const traceableFunc = (...args) => {

@@ -253,3 +275,3 @@ let ensuredConfig;

return [
getTracingRunTree(RunTree.fromRunnableConfig(firstArg, ensuredConfig), restArgs, config?.getInvocationParams),
getTracingRunTree(RunTree.fromRunnableConfig(firstArg, ensuredConfig), restArgs, config?.getInvocationParams, processInputsFn),
restArgs,

@@ -270,3 +292,3 @@ ];

? new RunTree(ensuredConfig)
: firstArg.createChild(ensuredConfig), restArgs, config?.getInvocationParams);
: firstArg.createChild(ensuredConfig), restArgs, config?.getInvocationParams, processInputsFn);
return [currentRunTree, [currentRunTree, ...restArgs]];

@@ -279,7 +301,7 @@ }

return [
getTracingRunTree(prevRunFromStore.createChild(ensuredConfig), processedArgs, config?.getInvocationParams),
getTracingRunTree(prevRunFromStore.createChild(ensuredConfig), processedArgs, config?.getInvocationParams, processInputsFn),
processedArgs,
];
}
const currentRunTree = getTracingRunTree(new RunTree(ensuredConfig), processedArgs, config?.getInvocationParams);
const currentRunTree = getTracingRunTree(new RunTree(ensuredConfig), processedArgs, config?.getInvocationParams, processInputsFn);
// If a context var is set by LangChain outside of a traceable,

@@ -322,3 +344,3 @@ // it will be an object with a single property and we should copy

finished = true;
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks)));
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks), processOutputsFn));
await handleEnd();

@@ -335,3 +357,3 @@ controller.close();

await currentRunTree?.end(undefined, "Cancelled");
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks)));
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks), processOutputsFn));
await handleEnd();

@@ -366,3 +388,3 @@ return reader.cancel(reason);

await currentRunTree?.end(undefined, "Cancelled");
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks)));
await currentRunTree?.end(handleRunOutputs(await handleChunks(chunks), processOutputsFn));
await handleEnd();

@@ -452,3 +474,3 @@ }

return memo;
}, []))));
}, [])), processOutputsFn));
await handleEnd();

@@ -468,3 +490,3 @@ }

try {
await currentRunTree?.end(handleRunOutputs(rawOutput));
await currentRunTree?.end(handleRunOutputs(rawOutput, processOutputsFn));
await handleEnd();

@@ -471,0 +493,0 @@ }

@@ -113,2 +113,39 @@ import { isTraceableFunction, traceable } from "../traceable.js";

};
function processChatCompletion(outputs) {
const chatCompletion = outputs;
// copy the original object, minus usage
const result = { ...chatCompletion };
const usage = chatCompletion.usage;
if (usage) {
const inputTokenDetails = {
...(usage.prompt_tokens_details?.audio_tokens !== null && {
audio: usage.prompt_tokens_details?.audio_tokens,
}),
...(usage.prompt_tokens_details?.cached_tokens !== null && {
cache_read: usage.prompt_tokens_details?.cached_tokens,
}),
};
const outputTokenDetails = {
...(usage.completion_tokens_details?.audio_tokens !== null && {
audio: usage.completion_tokens_details?.audio_tokens,
}),
...(usage.completion_tokens_details?.reasoning_tokens !== null && {
reasoning: usage.completion_tokens_details?.reasoning_tokens,
}),
};
result.usage_metadata = {
input_tokens: usage.prompt_tokens ?? 0,
output_tokens: usage.completion_tokens ?? 0,
total_tokens: usage.total_tokens ?? 0,
...(Object.keys(inputTokenDetails).length > 0 && {
input_token_details: inputTokenDetails,
}),
...(Object.keys(outputTokenDetails).length > 0 && {
output_token_details: outputTokenDetails,
}),
};
}
delete result.usage;
return result;
}
/**

@@ -209,2 +246,3 @@ * Wraps an OpenAI client's completion methods, enabling automatic LangSmith

},
processOutputs: processChatCompletion,
...options,

@@ -211,0 +249,0 @@ }),

{
"name": "langsmith",
"version": "0.1.65",
"version": "0.1.66",
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",

@@ -5,0 +5,0 @@ "packageManager": "yarn@1.22.19",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet