You're Invited:Meet the Socket Team at BlackHat and DEF CON in Las Vegas, Aug 4-6.RSVP
Socket
Book a DemoInstallSign in
Socket

@empiricalrun/llm

Package Overview
Dependencies
Maintainers
1
Versions
100
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@empiricalrun/llm - npm Package Compare versions

Comparing version

to
0.17.3

7

CHANGELOG.md
# @empiricalrun/llm
## 0.17.3
### Patch Changes
- 387f475: chore: move more types to shared-types package
- 587532f: feat: move to parallel tool calls for claude and openai models
## 0.17.2

@@ -4,0 +11,0 @@

2

dist/chat/claude/index.d.ts
import type { Anthropic } from "@anthropic-ai/sdk";
import { PendingToolCall, ToolResult } from "@empiricalrun/shared-types";
import type OpenAI from "openai";
import { TraceClient } from "../..";
import { PendingToolCall, ToolResult } from "../tools";
import type { IChatModel } from "../types";

@@ -6,0 +6,0 @@ export type AnthropicMessageTypeV2 = {

@@ -45,6 +45,2 @@ "use strict";

max_tokens: 8000,
tool_choice: {
disable_parallel_tool_use: true,
type: "auto",
},
tools: allTools,

@@ -51,0 +47,0 @@ });

@@ -0,4 +1,4 @@

import { PendingToolCall, ToolResult } from "@empiricalrun/shared-types";
import OpenAI from "openai";
import { TraceClient } from "../..";
import { PendingToolCall, ToolResult } from "../tools";
import type { IChatModel } from "../types";

@@ -5,0 +5,0 @@ import { GeminiMessageType } from "./utils";

@@ -1,2 +0,1 @@

export type { Tool, ToolExecute } from "./tools";
export { convertOpenAISchemaToAnthropic, zodToOpenAITool } from "./tools";

@@ -3,0 +2,0 @@ export { getProviderForModel } from "./tools";

@@ -72,3 +72,3 @@ "use strict";

store: false,
parallel_tool_calls: false,
parallel_tool_calls: true,
tools: params.tools.map(chatCompletionToolToFunctionTool),

@@ -75,0 +75,0 @@ instructions: systemPrompt,

@@ -6,7 +6,5 @@ import Anthropic from "@anthropic-ai/sdk";

export declare function convertOpenAISchemaToAnthropic(openaiSchema: OpenAI.Chat.Completions.ChatCompletionTool): Anthropic.Tool;
export type { Tool, ToolExecute } from "./types";
export { zodToOpenAITool } from "./zod-schema";
export type { PendingToolCall, ToolResult } from "@empiricalrun/shared-types";
export declare function convertOpenAISchemaToGeminiTool(openaiSchema: OpenAI.Chat.Completions.ChatCompletionTool): FunctionDeclaration;
export declare function getProviderForModel(modelId: SupportedChatModels): string | undefined;
//# sourceMappingURL=index.d.ts.map

@@ -1,2 +0,3 @@

import { LangfuseGenerationClient, LangfuseSpanClient, LangfuseTraceClient } from "langfuse";
import { TraceClient } from "@empiricalrun/shared-types";
import { LangfuseGenerationClient } from "langfuse";
import OpenAI from "openai";

@@ -6,3 +7,2 @@ import { compilePrompt } from "./prompts/lib";

import { LLMModel, LLMProvider, ModelParameters } from "./types";
type TraceClient = LangfuseTraceClient | LangfuseSpanClient;
export declare class LLM {

@@ -9,0 +9,0 @@ private _trace;

{
"name": "@empiricalrun/llm",
"version": "0.17.2",
"version": "0.17.3",
"main": "dist/index.js",

@@ -51,3 +51,3 @@ "exports": {

"@types/async-retry": "^1.4.8",
"@empiricalrun/shared-types": "0.4.0"
"@empiricalrun/shared-types": "0.4.1"
},

@@ -54,0 +54,0 @@ "scripts": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet