New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

openpipe

Package Overview
Dependencies
Maintainers
2
Versions
70
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

openpipe - npm Package Compare versions

Comparing version 0.4.1 to 0.4.2

streaming-1220fb88.d.ts

12

index.d.ts
export { o as openai } from './openai.js';
import 'openai-beta';
import 'openai-beta/core';
import 'openai-beta/resources/chat/completions';
import './streaming-4ac89c3c.js';
import 'openai-beta/resources/chat';
import 'openai-beta/streaming';
import 'openai';
import 'openai/core';
import 'openai/resources/chat/completions';
import './streaming-1220fb88.js';
import 'openai/resources/chat';
import 'openai/streaming';

@@ -30,10 +30,16 @@ var __defProp = Object.defineProperty;

});
import * as openai from "openai-beta";
import { readEnv } from "openai-beta/core";
import * as openai from "openai";
import { readEnv } from "openai/core";
// src/openai/streaming.ts
import { Stream } from "openai-beta/streaming";
import { Stream } from "openai/streaming";
// src/openai/mergeChunks.ts
import { omit } from "lodash-es";
var omit = (obj, ...keys) => {
const ret = { ...obj };
for (const key of keys) {
delete ret[key];
}
return ret;
};
function mergeChunks(base, chunk) {

@@ -52,5 +58,8 @@ if (base === null) {

if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name = (fnCall.name ?? "") + (choice.delta.function_call.name ?? "");
fnCall.arguments = (fnCall.arguments ?? "") + (choice.delta.function_call.arguments ?? "");
const fnCall = baseChoice.message.function_call ?? {
name: "",
arguments: ""
};
fnCall.name = fnCall.name + (choice.delta.function_call.name ?? "");
fnCall.arguments = fnCall.arguments + (choice.delta.function_call.arguments ?? "");
}

@@ -558,5 +567,5 @@ } else {

name: "openpipe-dev",
version: "0.4.1",
version: "0.4.2",
type: "module",
description: "Metrics and auto-evaluation for LLM calls",
description: "LLM metrics and inference",
scripts: {

@@ -595,5 +604,4 @@ build: "./build.sh",

"form-data": "^4.0.0",
"lodash-es": "^4.17.21",
"node-fetch": "^2.6.12",
"openai-beta": "npm:openai@4.0.0-beta.7",
openai: "^4.8.0",
"openai-legacy": "npm:openai@3.3.0"

@@ -600,0 +608,0 @@ },

@@ -1,7 +0,7 @@

import * as openai$1 from 'openai-beta';
import * as Core from 'openai-beta/core';
import { CompletionCreateParams, ChatCompletion, ChatCompletionChunk } from 'openai-beta/resources/chat/completions';
import { O as OpenPipeConfig, a as OpenPipeArgs, b as OpenPipeMeta, W as WrappedStream } from './streaming-4ac89c3c.js';
import { Stream } from 'openai-beta/streaming';
import 'openai-beta/resources/chat';
import * as openai$1 from 'openai';
import * as Core from 'openai/core';
import { ChatCompletionCreateParamsNonStreaming, ChatCompletion, ChatCompletionCreateParamsStreaming, ChatCompletionChunk, ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions';
import { O as OpenPipeConfig, a as OpenPipeArgs, b as OpenPipeMeta, W as WrappedStream } from './streaming-1220fb88.js';
import { Stream } from 'openai/streaming';
import 'openai/resources/chat';

@@ -179,8 +179,9 @@ type ApiRequestOptions = {

_report(args: Parameters<DefaultService["report"]>[0]): Promise<void>;
_create(body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming, options?: Core.RequestOptions): Promise<Core.APIResponse<ChatCompletion>>;
_create(body: CompletionCreateParams.CreateChatCompletionRequestStreaming, options?: Core.RequestOptions): Promise<Core.APIResponse<Stream<ChatCompletionChunk>>>;
create(body: CompletionCreateParams.CreateChatCompletionRequestNonStreaming & OpenPipeArgs, options?: Core.RequestOptions): Promise<Core.APIResponse<ChatCompletion & {
_create(body: ChatCompletionCreateParamsNonStreaming, options?: Core.RequestOptions): Core.APIPromise<ChatCompletion>;
_create(body: ChatCompletionCreateParamsStreaming, options?: Core.RequestOptions): Core.APIPromise<Stream<ChatCompletionChunk>>;
create(body: ChatCompletionCreateParamsNonStreaming & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<ChatCompletion & {
openpipe: OpenPipeMeta;
}>>;
create(body: CompletionCreateParams.CreateChatCompletionRequestStreaming & OpenPipeArgs, options?: Core.RequestOptions): Promise<Core.APIResponse<WrappedStream>>;
}>;
create(body: ChatCompletionCreateParamsStreaming & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<WrappedStream>;
create(body: ChatCompletionCreateParamsBase & OpenPipeArgs, options?: Core.RequestOptions): Core.APIPromise<Stream<ChatCompletionChunk> | ChatCompletion>;
}

@@ -187,0 +188,0 @@

@@ -21,10 +21,16 @@ var __accessCheck = (obj, member, msg) => {

// src/openai.ts
import * as openai from "openai-beta";
import { readEnv } from "openai-beta/core";
import * as openai from "openai";
import { readEnv } from "openai/core";
// src/openai/streaming.ts
import { Stream } from "openai-beta/streaming";
import { Stream } from "openai/streaming";
// src/openai/mergeChunks.ts
import { omit } from "lodash-es";
var omit = (obj, ...keys) => {
const ret = { ...obj };
for (const key of keys) {
delete ret[key];
}
return ret;
};
function mergeChunks(base, chunk) {

@@ -43,5 +49,8 @@ if (base === null) {

if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name = (fnCall.name ?? "") + (choice.delta.function_call.name ?? "");
fnCall.arguments = (fnCall.arguments ?? "") + (choice.delta.function_call.arguments ?? "");
const fnCall = baseChoice.message.function_call ?? {
name: "",
arguments: ""
};
fnCall.name = fnCall.name + (choice.delta.function_call.name ?? "");
fnCall.arguments = fnCall.arguments + (choice.delta.function_call.arguments ?? "");
}

@@ -549,5 +558,5 @@ } else {

name: "openpipe-dev",
version: "0.4.1",
version: "0.4.2",
type: "module",
description: "Metrics and auto-evaluation for LLM calls",
description: "LLM metrics and inference",
scripts: {

@@ -586,5 +595,4 @@ build: "./build.sh",

"form-data": "^4.0.0",
"lodash-es": "^4.17.21",
"node-fetch": "^2.6.12",
"openai-beta": "npm:openai@4.0.0-beta.7",
openai: "^4.8.0",
"openai-legacy": "npm:openai@3.3.0"

@@ -591,0 +599,0 @@ },

@@ -1,2 +0,2 @@

import { ChatCompletion, ChatCompletionChunk } from 'openai-beta/resources/chat';
import { ChatCompletion, ChatCompletionChunk } from 'openai/resources/chat';

@@ -3,0 +3,0 @@ declare function mergeChunks(base: ChatCompletion | null, chunk: ChatCompletionChunk): ChatCompletion;

// src/openai/mergeChunks.ts
import { omit } from "lodash-es";
var omit = (obj, ...keys) => {
const ret = { ...obj };
for (const key of keys) {
delete ret[key];
}
return ret;
};
function mergeChunks(base, chunk) {

@@ -16,5 +22,8 @@ if (base === null) {

if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name = (fnCall.name ?? "") + (choice.delta.function_call.name ?? "");
fnCall.arguments = (fnCall.arguments ?? "") + (choice.delta.function_call.arguments ?? "");
const fnCall = baseChoice.message.function_call ?? {
name: "",
arguments: ""
};
fnCall.name = fnCall.name + (choice.delta.function_call.name ?? "");
fnCall.arguments = fnCall.arguments + (choice.delta.function_call.arguments ?? "");
}

@@ -21,0 +30,0 @@ } else {

@@ -1,3 +0,3 @@

import 'openai-beta/resources/chat';
import 'openai-beta/streaming';
export { W as WrappedStream } from '../streaming-4ac89c3c.js';
import 'openai/resources/chat';
import 'openai/streaming';
export { W as WrappedStream } from '../streaming-1220fb88.js';
// src/openai/streaming.ts
import { Stream } from "openai-beta/streaming";
import { Stream } from "openai/streaming";
// src/openai/mergeChunks.ts
import { omit } from "lodash-es";
var omit = (obj, ...keys) => {
const ret = { ...obj };
for (const key of keys) {
delete ret[key];
}
return ret;
};
function mergeChunks(base, chunk) {

@@ -19,5 +25,8 @@ if (base === null) {

if (choice.delta?.function_call) {
const fnCall = baseChoice.message.function_call ?? {};
fnCall.name = (fnCall.name ?? "") + (choice.delta.function_call.name ?? "");
fnCall.arguments = (fnCall.arguments ?? "") + (choice.delta.function_call.arguments ?? "");
const fnCall = baseChoice.message.function_call ?? {
name: "",
arguments: ""
};
fnCall.name = fnCall.name + (choice.delta.function_call.name ?? "");
fnCall.arguments = fnCall.arguments + (choice.delta.function_call.arguments ?? "");
}

@@ -24,0 +33,0 @@ } else {

{
"name": "openpipe",
"version": "0.4.1",
"version": "0.4.2",
"type": "module",
"description": "Metrics and auto-evaluation for LLM calls",
"description": "LLM metrics and inference",
"main": "./index.cjs",

@@ -17,5 +17,4 @@ "publishConfig": {

"form-data": "^4.0.0",
"lodash-es": "^4.17.21",
"node-fetch": "^2.6.12",
"openai-beta": "npm:openai@4.0.0-beta.7",
"openai": "^4.8.0",
"openai-legacy": "npm:openai@3.3.0"

@@ -38,7 +37,3 @@ },

},
"scripts": {
"build": "./build.sh",
"build-update": "./build.sh && ./update-app.sh",
"test": "vitest"
},
"access": "public",
"module": "./index.js",

@@ -59,3 +54,8 @@ "types": "./index.d.ts",

}
},
"scripts": {
"build": "./build.sh",
"build-update": "./build.sh && ./update-app.sh",
"test": "vitest"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc