New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

portkey-ai

Package Overview
Dependencies
Maintainers
1
Versions
47
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

portkey-ai - npm Package Compare versions

Comparing version 1.4.0-rc.1 to 1.4.0

speech.mp3

2

dist/package.json
{
"name": "portkey-ai",
"version": "1.4.0-rc.1",
"version": "1.4.0",
"description": "Node client library for the Portkey API",

@@ -5,0 +5,0 @@ "types": "./src/index.d.ts",

@@ -31,2 +31,3 @@ export type Headers = Record<string, string | null | undefined>;

strictOpenAiCompliance?: boolean | null | undefined;
anthropicBeta?: string | null | undefined;
}

@@ -33,0 +34,0 @@ export interface APIResponseType {

@@ -32,2 +32,3 @@ export type Headers = Record<string, string | null | undefined>

strictOpenAiCompliance?: boolean | null | undefined;
anthropicBeta?: string | null | undefined;
}

@@ -34,0 +35,0 @@

@@ -8,3 +8,2 @@ import { ChatCompletionStreamParams } from "openai/lib/ChatCompletionStream";

import { ChatCompletionParseParams } from "openai/resources/beta/chat/completions";
import { ExtractParsedContentFromParams } from "openai/lib/parser";
export declare class BetaChat extends ApiResource {

@@ -15,3 +14,3 @@ completions: Completions;

export declare class Completions extends ApiResource {
parse<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>(_body: Params, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
parse<Params extends ChatCompletionParseParams>(_body: Params, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
runFunctions<FunctionsArgs extends BaseFunctionsArgs>(body: ChatCompletionFunctionRunnerParams<FunctionsArgs>, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

@@ -18,0 +17,0 @@ runFunctions<FunctionsArgs extends BaseFunctionsArgs>(body: ChatCompletionStreamingFunctionRunnerParams<FunctionsArgs>, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

@@ -16,4 +16,4 @@ import { ChatCompletionStreamParams } from "openai/lib/ChatCompletionStream";

import { ChatCompletionParseParams } from "openai/resources/beta/chat/completions";
import { ExtractParsedContentFromParams } from "openai/lib/parser";
export class BetaChat extends ApiResource {

@@ -30,3 +30,3 @@ completions: Completions;

async parse<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>
async parse<Params extends ChatCompletionParseParams>
(

@@ -33,0 +33,0 @@ _body: Params,

@@ -74,8 +74,2 @@ import { ChatCompletionMessageToolCall, ChatCompletionStreamOptions, ChatCompletionTokenLogprob } from "openai/resources/chat/completions";

interface FunctionType {
arguments?: string;
name?: string;
[key: string]: any;
}
interface Message {

@@ -82,0 +76,0 @@ role: string;

@@ -1,7 +0,10 @@

import { ModerationCreateParams } from "openai/resources";
import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
export interface ModerationCreateParams {
input: string | Array<string>;
model?: any;
}
export declare class Moderations extends ApiResource {
create(_body: ModerationCreateParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
}

@@ -1,2 +0,1 @@

import { ModerationCreateParams } from "openai/resources";
import { ApiClientInterface } from "../_types/generalTypes";

@@ -8,2 +7,6 @@ import { ApiResource } from "../apiResource";

export interface ModerationCreateParams {
input: string | Array<string>;
model?: any ;
}

@@ -10,0 +13,0 @@ export class Moderations extends ApiResource{

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { UploadCompleteParams } from "openai/resources";
import { Uploadable } from "openai/uploads";
export interface UploadCompleteParams {
part_ids: Array<string>;
md5?: string;
}
export declare class Uploads extends ApiResource {

@@ -7,0 +10,0 @@ parts: Parts;

@@ -6,5 +6,8 @@ import { ApiClientInterface } from "../_types/generalTypes";

import { createHeaders } from "./createHeaders";
import { UploadCompleteParams } from "openai/resources";
import { Uploadable } from "openai/uploads";
export interface UploadCompleteParams {
part_ids: Array<string>;
md5?: string;
}
export class Uploads extends ApiResource {

@@ -11,0 +14,0 @@ parts: Parts

@@ -43,3 +43,3 @@ /// <reference types="node" />

private fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface);
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta }: ApiClientInterface);
protected defaultHeaders(): Record<string, string>;

@@ -46,0 +46,0 @@ _post<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp>;

@@ -82,6 +82,6 @@ "use strict";

class ApiClient {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta }) {
this.apiKey = apiKey !== null && apiKey !== void 0 ? apiKey : "";
this.baseURL = baseURL !== null && baseURL !== void 0 ? baseURL : "";
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance });
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance, anthropicBeta });
this.portkeyHeaders = this.defaultHeaders();

@@ -88,0 +88,0 @@ this.fetch = fetch;

@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive";

private fetch: Fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta }: ApiClientInterface) {
this.apiKey = apiKey ?? "";
this.baseURL = baseURL ?? "";
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance })
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance, anthropicBeta })
this.portkeyHeaders = this.defaultHeaders()

@@ -129,0 +129,0 @@ this.fetch = fetch;

@@ -35,3 +35,4 @@ import { ApiClientInterface } from "./_types/generalTypes";

strictOpenAiCompliance?: boolean | null | undefined;
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, }: ApiClientInterface);
anthropicBeta?: string | null | undefined;
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta, }: ApiClientInterface);
completions: API.Completions;

@@ -38,0 +39,0 @@ chat: API.Chat;

@@ -34,3 +34,3 @@ "use strict";

var _b, _c;
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, } = _a;
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta, } = _a;
super({

@@ -65,2 +65,3 @@ apiKey,

strictOpenAiCompliance,
anthropicBeta,
});

@@ -121,2 +122,3 @@ this.completions = new API.Completions(this);

this.strictOpenAiCompliance = strictOpenAiCompliance;
this.anthropicBeta = anthropicBeta;
}

@@ -123,0 +125,0 @@ }

@@ -38,2 +38,3 @@ import { ApiClientInterface } from "./_types/generalTypes";

strictOpenAiCompliance?: boolean | null | undefined;
anthropicBeta?: string | null | undefined;
constructor({

@@ -68,2 +69,3 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null,

strictOpenAiCompliance,
anthropicBeta,
}: ApiClientInterface) {

@@ -100,2 +102,3 @@

strictOpenAiCompliance,
anthropicBeta,
});

@@ -133,2 +136,3 @@

this.strictOpenAiCompliance = strictOpenAiCompliance;
this.anthropicBeta = anthropicBeta;
}

@@ -135,0 +139,0 @@

@@ -1,1 +0,1 @@

export declare const VERSION = "1.4.0-rc.1";
export declare const VERSION = "1.4.0";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VERSION = void 0;
exports.VERSION = "1.4.0-rc.1";
exports.VERSION = "1.4.0";
//# sourceMappingURL=version.js.map

@@ -1,1 +0,1 @@

export const VERSION = "1.4.0-rc.1";
export const VERSION = "1.4.0";
{
"name": "portkey-ai",
"version": "1.4.0-rc.1",
"version": "1.4.0",
"description": "Node client library for the Portkey API",

@@ -5,0 +5,0 @@ "types": "dist/src/index.d.ts",

@@ -8,5 +8,6 @@ # Security Policy

| 0.1.x | :white_check_mark: |
| 1.x.x | :white_check_mark: |
## Reporting a Vulnerability
Please report any security vulnerabilities at `support@portkey.ai`.
Please report any security vulnerabilities at `support@portkey.ai`.

@@ -32,2 +32,3 @@ export type Headers = Record<string, string | null | undefined>

strictOpenAiCompliance?: boolean | null | undefined;
anthropicBeta?: string | null | undefined;
}

@@ -34,0 +35,0 @@

@@ -16,4 +16,4 @@ import { ChatCompletionStreamParams } from "openai/lib/ChatCompletionStream";

import { ChatCompletionParseParams } from "openai/resources/beta/chat/completions";
import { ExtractParsedContentFromParams } from "openai/lib/parser";
export class BetaChat extends ApiResource {

@@ -30,3 +30,3 @@ completions: Completions;

async parse<Params extends ChatCompletionParseParams, ParsedT = ExtractParsedContentFromParams<Params>>
async parse<Params extends ChatCompletionParseParams>
(

@@ -33,0 +33,0 @@ _body: Params,

@@ -74,8 +74,2 @@ import { ChatCompletionMessageToolCall, ChatCompletionStreamOptions, ChatCompletionTokenLogprob } from "openai/resources/chat/completions";

interface FunctionType {
arguments?: string;
name?: string;
[key: string]: any;
}
interface Message {

@@ -82,0 +76,0 @@ role: string;

@@ -1,2 +0,1 @@

import { ModerationCreateParams } from "openai/resources";
import { ApiClientInterface } from "../_types/generalTypes";

@@ -8,2 +7,6 @@ import { ApiResource } from "../apiResource";

export interface ModerationCreateParams {
input: string | Array<string>;
model?: any ;
}

@@ -10,0 +13,0 @@ export class Moderations extends ApiResource{

@@ -6,5 +6,8 @@ import { ApiClientInterface } from "../_types/generalTypes";

import { createHeaders } from "./createHeaders";
import { UploadCompleteParams } from "openai/resources";
import { Uploadable } from "openai/uploads";
export interface UploadCompleteParams {
part_ids: Array<string>;
md5?: string;
}
export class Uploads extends ApiResource {

@@ -11,0 +14,0 @@ parts: Parts

@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive";

private fetch: Fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, anthropicBeta }: ApiClientInterface) {
this.apiKey = apiKey ?? "";
this.baseURL = baseURL ?? "";
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance })
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance, anthropicBeta })
this.portkeyHeaders = this.defaultHeaders()

@@ -129,0 +129,0 @@ this.fetch = fetch;

@@ -38,2 +38,3 @@ import { ApiClientInterface } from "./_types/generalTypes";

strictOpenAiCompliance?: boolean | null | undefined;
anthropicBeta?: string | null | undefined;
constructor({

@@ -68,2 +69,3 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null,

strictOpenAiCompliance,
anthropicBeta,
}: ApiClientInterface) {

@@ -100,2 +102,3 @@

strictOpenAiCompliance,
anthropicBeta,
});

@@ -133,2 +136,3 @@

this.strictOpenAiCompliance = strictOpenAiCompliance;
this.anthropicBeta = anthropicBeta;
}

@@ -135,0 +139,0 @@

@@ -1,1 +0,1 @@

export const VERSION = "1.4.0-rc.1";
export const VERSION = "1.4.0";

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc