New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@empiricalrun/llm

Package Overview
Dependencies
Maintainers
0
Versions
45
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@empiricalrun/llm - npm Package Compare versions

Comparing version 0.9.5 to 0.9.6

6

CHANGELOG.md
# @empiricalrun/llm
## 0.9.6
### Patch Changes
- e20abfb: feat: query now returns structured outputs
## 0.9.5

@@ -4,0 +10,0 @@

5

dist/vision/query/index.d.ts

@@ -1,2 +0,5 @@

export declare function query(base64Image: string, instruction: string): Promise<string>;
import { z, ZodType } from "zod";
type ExtractType<T> = T extends ZodType ? z.infer<T> : never;
export declare function query<T extends z.ZodType>(base64Image: string, instruction: string, responseFormat?: T): Promise<ExtractType<T>>;
export {};
//# sourceMappingURL=index.d.ts.map

56

dist/vision/query/index.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.query = void 0;
const zod_1 = require("openai/helpers/zod");
const zod_2 = require("zod");
const __1 = require("../..");
const utils_1 = require("../utils");
async function query(base64Image, instruction) {
async function query(base64Image, instruction, responseFormat = zod_2.z.string()) {
const llm = new __1.LLM({

@@ -13,2 +15,16 @@ provider: "openai",

think it through, and then respond with the "answer".`;
const isResponseString = responseFormat instanceof zod_2.z.ZodString;
let extendedResponseFormat;
if (isResponseString) {
extendedResponseFormat = zod_2.z.object({
explanation: zod_2.z.string(),
answer: zod_2.z.string(),
});
}
else {
extendedResponseFormat = zod_2.z.object({
explanation: zod_2.z.string(),
answer: responseFormat,
});
}
const llmResponse = await llm.createChatCompletion({

@@ -39,38 +55,12 @@ messages: [

temperature: 0.5,
tool_choice: {
type: "function",
function: { name: "send_response" },
},
},
tools: [
{
type: "function",
function: {
name: "send_response",
description: "Use this tool to send your response.",
parameters: {
type: "object",
properties: {
explanation: {
type: "string",
description: "Your explanation to find the answer.",
},
answer: {
type: "string",
description: "A precise and succint answer to the extract text instruction.",
},
},
required: ["explanation", "answer"],
},
},
},
],
responseFormat: (0, zod_1.zodResponseFormat)(extendedResponseFormat, "your_response"),
});
if (!llmResponse) {
throw new Error("Failed to extract text from image");
if (!llmResponse || !llmResponse.content) {
throw new Error("Query failed: no response content from LLM");
}
const response = llmResponse.tool_calls[0];
const { answer } = JSON.parse(response.function.arguments);
return answer;
const response = llmResponse.content;
const jsonData = JSON.parse(response);
return responseFormat.parse(jsonData.answer);
}
exports.query = query;
{
"name": "@empiricalrun/llm",
"version": "0.9.5",
"version": "0.9.6",
"main": "dist/index.js",

@@ -29,3 +29,4 @@ "exports": {

"openai": "^4.67.0",
"portkey-ai": "^1.3.2"
"portkey-ai": "^1.3.2",
"zod": "^3.23.8"
},

@@ -32,0 +33,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc