New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

portkey-ai

Package Overview
Dependencies
Maintainers
1
Versions
47
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

portkey-ai - npm Package Compare versions

Comparing version 1.1.2 to 1.1.3

2

dist/package.json
{
"name": "portkey-ai",
"version": "1.1.2",
"version": "1.1.3",
"description": "Node client library for the Portkey API",

@@ -5,0 +5,0 @@ "types": "./src/index.d.ts",

@@ -5,18 +5,2 @@ export interface RetrySettings {

}
export interface Constructs {
provider?: string;
api_key?: string;
virtual_key?: string;
cache?: boolean;
cache_age?: number;
cache_status?: string;
cache_force_refresh?: boolean;
trace_id?: string;
metadata?: Record<string, any>;
weight?: number;
retry?: RetrySettings;
deployment_id?: string;
resource_name?: string;
api_version?: string;
}
export interface Function {

@@ -49,6 +33,2 @@ name: string;

}
export interface ConversationInput {
prompt?: string;
messages?: Array<Message>;
}
export interface Message {

@@ -58,5 +38,1 @@ role: string;

}
export interface LLMOptions extends Constructs, ConversationInput, ModelParams {
override_params?: Record<string, any>;
}
export declare const ModelParamsList: string[];
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.ModelParamsList = void 0;
exports.ModelParamsList = [
"model", "suffix", "max_tokens", "temperature", "top_k", "top_p", "n", "stop_sequences", "timeout", "functions", "function_call", "logprobs", "echo", "stop", "presence_penalty", "frequency_penalty", "best_of", "logit_bias", "user", "organization"
];
//# sourceMappingURL=portkeyConstructs.js.map

@@ -1,4 +0,1 @@

export interface RetrySettings {

@@ -9,20 +6,2 @@ attempts: number;

export interface Constructs {
provider?: string;
api_key?: string;
virtual_key?: string;
cache?: boolean;
cache_age?: number;
cache_status?: string;
cache_force_refresh?: boolean;
trace_id?: string;
metadata?: Record<string, any>;
weight?: number;
retry?: RetrySettings;
deployment_id?: string;
resource_name?: string;
api_version?: string;
}
export interface Function {

@@ -34,3 +13,2 @@ name: string;

export interface ModelParams {

@@ -59,7 +37,2 @@ model?: string;

export interface ConversationInput {
prompt?: string;
messages?: Array<Message>
}
export interface Message {

@@ -69,10 +42,1 @@ role: string

}
export interface LLMOptions extends Constructs, ConversationInput, ModelParams {
override_params?: Record<string, any>
}
export const ModelParamsList = [
"model", "suffix", "max_tokens", "temperature", "top_k", "top_p", "n", "stop_sequences", "timeout", "functions", "function_call", "logprobs", "echo", "stop", "presence_penalty", "frequency_penalty", "best_of", "logit_bias", "user", "organization"
]

@@ -1,2 +0,1 @@

import * as Types from "./_types/portkeyConstructs";
import * as apis from "./apis";

@@ -6,5 +5,4 @@ import * as client from "./client";

export import Portkey = client.Portkey;
export import LLMOptions = Types.LLMOptions;
export import PORTKEY_GATEWAY_URL = consts.PORTKEY_GATEWAY_URL;
export import createHeaders = apis.createHeaders;
export default Portkey;

@@ -1,2 +0,1 @@

import * as Types from "./_types/portkeyConstructs";
import * as apis from "./apis";

@@ -7,3 +6,2 @@ import * as client from "./client";

export import Portkey = client.Portkey;
export import LLMOptions = Types.LLMOptions;
export import PORTKEY_GATEWAY_URL = consts.PORTKEY_GATEWAY_URL

@@ -10,0 +8,0 @@ export import createHeaders = apis.createHeaders

{
"name": "portkey-ai",
"version": "1.1.2",
"version": "1.1.3",
"description": "Node client library for the Portkey API",

@@ -5,0 +5,0 @@ "types": "dist/src/index.d.ts",

@@ -1,4 +0,1 @@

export interface RetrySettings {

@@ -9,20 +6,2 @@ attempts: number;

export interface Constructs {
provider?: string;
api_key?: string;
virtual_key?: string;
cache?: boolean;
cache_age?: number;
cache_status?: string;
cache_force_refresh?: boolean;
trace_id?: string;
metadata?: Record<string, any>;
weight?: number;
retry?: RetrySettings;
deployment_id?: string;
resource_name?: string;
api_version?: string;
}
export interface Function {

@@ -34,3 +13,2 @@ name: string;

export interface ModelParams {

@@ -59,7 +37,2 @@ model?: string;

export interface ConversationInput {
prompt?: string;
messages?: Array<Message>
}
export interface Message {

@@ -69,10 +42,1 @@ role: string

}
export interface LLMOptions extends Constructs, ConversationInput, ModelParams {
override_params?: Record<string, any>
}
export const ModelParamsList = [
"model", "suffix", "max_tokens", "temperature", "top_k", "top_p", "n", "stop_sequences", "timeout", "functions", "function_call", "logprobs", "echo", "stop", "presence_penalty", "frequency_penalty", "best_of", "logit_bias", "user", "organization"
]

@@ -1,2 +0,1 @@

import * as Types from "./_types/portkeyConstructs";
import * as apis from "./apis";

@@ -7,3 +6,2 @@ import * as client from "./client";

export import Portkey = client.Portkey;
export import LLMOptions = Types.LLMOptions;
export import PORTKEY_GATEWAY_URL = consts.PORTKEY_GATEWAY_URL

@@ -10,0 +8,0 @@ export import createHeaders = apis.createHeaders

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc