New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

portkey-ai

Package Overview
Dependencies
Maintainers
1
Versions
47
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

portkey-ai - npm Package Compare versions

Comparing version 1.3.2 to 1.4.0-rc.1

dist/src/apis/audio.d.ts

4

dist/package.json
{
"name": "portkey-ai",
"version": "1.3.2",
"version": "1.4.0-rc.1",
"description": "Node client library for the Portkey API",

@@ -25,4 +25,4 @@ "types": "./src/index.d.ts",

"dotenv": "^16.3.1",
"openai": "4.36.0"
"openai": "4.55.3"
}
}

@@ -26,5 +26,7 @@ export type Headers = Record<string, string | null | undefined>;

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
cacheNamespace?: string | null | undefined;
requestTimeout?: number | null | undefined;
strictOpenAiCompliance?: boolean | null | undefined;
}

@@ -31,0 +33,0 @@ export interface APIResponseType {

@@ -27,5 +27,7 @@ export type Headers = Record<string, string | null | undefined>

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
cacheNamespace?: string | null | undefined;
requestTimeout?: number | null | undefined;
strictOpenAiCompliance?: boolean | null | undefined;
}

@@ -32,0 +34,0 @@

@@ -31,2 +31,9 @@ export interface RetrySettings {

organization?: string;
seed?: number;
response_format?: any;
service_tier?: string;
top_logprobs?: number | null;
parallel_tool_calls?: boolean;
tools?: Array<Tool>;
tool_choice?: any;
}

@@ -37,1 +44,5 @@ export interface Message {

}
export interface Tool {
type?: string;
function?: Record<string, any>;
}

@@ -33,2 +33,9 @@ export interface RetrySettings {

organization?: string;
seed?: number;
response_format?: any;
service_tier?: string;
top_logprobs?: number | null;
parallel_tool_calls?: boolean;
tools?: Array<Tool>;
tool_choice?: any;
}

@@ -40,1 +47,6 @@

}
export interface Tool {
type?: string;
function?: Record<string, any>
}

@@ -7,3 +7,2 @@ import { ApiClientInterface } from "../_types/generalTypes";

description?: string | null;
file_ids?: Array<string>;
instructions?: string | null;

@@ -13,2 +12,6 @@ metadata?: unknown | null;

tools?: Array<any>;
response_format?: any | null;
temperature?: number | null;
tool_resources?: any | null;
top_p?: number | null;
}

@@ -40,4 +43,2 @@ export interface FileCreateParams {

export declare class Assistants extends ApiResource {
files: Files;
constructor(client: any);
create(_body: AssistantCreateParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

@@ -49,7 +50,1 @@ list(_query?: AssistantListParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

}
export declare class Files extends ApiResource {
create(assistantId: string, _body: FileCreateParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
list(assistantId: string, _query?: FileListParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
retrieve(assistantId: string, fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
del(assistantId: string, fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
}

@@ -11,17 +11,8 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Files = exports.Assistants = void 0;
exports.Assistants = void 0;
const apiResource_1 = require("../apiResource.js");
const constants_1 = require("../constants.js");
const utils_1 = require("../utils.js");
const createHeaders_1 = require("./createHeaders.js");
const openai_1 = __importDefault(require("openai"));
class Assistants extends apiResource_1.ApiResource {
constructor(client) {
super(client);
this.files = new Files(client);
}
create(_body, params, opts) {

@@ -34,7 +25,3 @@ return __awaiter(this, void 0, void 0, function* () {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.assistants.create(body, opts).withResponse();

@@ -51,7 +38,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -69,7 +52,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.assistants.retrieve(assistantId, opts).withResponse();

@@ -86,7 +65,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.assistants.update(assistantId, body, opts).withResponse();

@@ -102,7 +77,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.assistants.del(assistantId, opts).withResponse();

@@ -114,69 +85,2 @@ return (0, utils_1.finalResponse)(result);

exports.Assistants = Assistants;
class Files extends apiResource_1.ApiResource {
create(assistantId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const result = yield OAIclient.beta.assistants.files.create(assistantId, body, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
list(assistantId, _query, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const query = _query;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = yield OAIclient.beta.assistants.files.list(assistantId, query, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
retrieve(assistantId, fileId, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const result = yield OAIclient.beta.assistants.files.retrieve(assistantId, fileId, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
del(assistantId, fileId, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const result = yield OAIclient.beta.assistants.files.del(assistantId, fileId, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
}
exports.Files = Files;
//# sourceMappingURL=assistants.js.map
import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -12,3 +10,2 @@ export interface AssistantCreateParams {

description?: string | null;
file_ids?: Array<string>;
instructions?: string | null;

@@ -18,2 +15,7 @@ metadata?: unknown | null;

tools?: Array<any>;
response_format?: any | null;
temperature?: number | null;
tool_resources?: any | null;
top_p?: number | null;
}

@@ -52,10 +54,3 @@

export class Assistants extends ApiResource {
files: Files;
constructor(client:any) {
super(client);
this.files = new Files(client);
}
async create(

@@ -75,7 +70,3 @@ _body: AssistantCreateParams,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -101,7 +92,3 @@ const result = await OAIclient.beta.assistants.create(body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -127,7 +114,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -154,7 +137,3 @@ const result = await OAIclient.beta.assistants.retrieve(assistantId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -179,7 +158,3 @@ const result = await OAIclient.beta.assistants.update(assistantId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -192,109 +167,1 @@ const result = await OAIclient.beta.assistants.del(assistantId, opts).withResponse();

}
export class Files extends ApiResource{
async create(
assistantId: string,
_body: FileCreateParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: FileCreateParams = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.create(assistantId, body, opts).withResponse();
return finalResponse(result);
}
async list(
assistantId: string,
_query?: FileListParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const query: FileListParams | undefined = _query;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = await OAIclient.beta.assistants.files.list(assistantId, query, opts).withResponse();
return finalResponse(result);
}
async retrieve(
assistantId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.retrieve(assistantId, fileId, opts).withResponse();
return finalResponse(result);
}
async del(
assistantId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.del(assistantId, fileId, opts).withResponse();
return finalResponse(result);
}
}

@@ -0,1 +1,2 @@

import { ChatCompletionMessageToolCall, ChatCompletionStreamOptions, ChatCompletionTokenLogprob } from "openai/resources/chat/completions";
import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

@@ -20,2 +21,3 @@ import { ModelParams } from "../_types/portkeyConstructs";

stream?: true;
stream_options?: ChatCompletionStreamOptions;
}

@@ -30,23 +32,17 @@ export interface ChatCompletionsBodyNonStreaming extends ChatCompletionsBodyBase {

total_tokens?: number;
[key: string]: any;
}
interface FunctionType {
arguments?: string;
name?: string;
}
interface ToolCall {
index?: number;
id?: string;
function?: FunctionType;
type?: 'function';
}
interface FunctionCall {
arguments?: string;
name?: string;
}
interface Message {
role: string;
content: string | null;
function_call?: FunctionCall;
tool_calls?: Array<ToolCall>;
content: string;
refusal?: string;
function_call?: any;
tool_calls?: Array<ChatCompletionMessageToolCall>;
tool_call_id?: string;
}
export interface Logprobs {
content: Array<ChatCompletionTokenLogprob> | null;
refusal: Array<ChatCompletionTokenLogprob> | null;
[key: string]: any;
}
interface Choices {

@@ -57,2 +53,4 @@ index?: number;

finish_reason?: string;
logprobs?: Logprobs;
[key: string]: any;
}

@@ -66,3 +64,6 @@ interface ChatCompletion extends APIResponseType {

usage: Usage;
service_tier?: string;
system_fingerprint?: string;
[key: string]: any;
}
export {};

@@ -0,1 +1,2 @@

import { ChatCompletionMessageToolCall, ChatCompletionStreamOptions, ChatCompletionTokenLogprob } from "openai/resources/chat/completions";
import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

@@ -57,2 +58,3 @@ import { ModelParams } from "../_types/portkeyConstructs";

stream?: true;
stream_options?: ChatCompletionStreamOptions
}

@@ -70,2 +72,3 @@

total_tokens?: number;
[key: string]: any;
}

@@ -76,23 +79,20 @@

name?: string;
[key: string]: any;
}
interface ToolCall {
index?: number;
id?: string;
function?: FunctionType;
type?: 'function';
}
interface FunctionCall {
arguments?: string;
name?: string;
}
interface Message {
role: string;
content: string | null;
function_call?: FunctionCall;
tool_calls?: Array<ToolCall>;
content: string;
refusal?: string;
function_call?: any;
tool_calls?: Array<ChatCompletionMessageToolCall>;
tool_call_id?: string;
}
export interface Logprobs {
content: Array<ChatCompletionTokenLogprob> | null;
refusal: Array<ChatCompletionTokenLogprob> | null;
[key: string]: any;
}
interface Choices {

@@ -103,2 +103,4 @@ index?: number;

finish_reason?: string;
logprobs?: Logprobs
[key: string]: any;
}

@@ -113,2 +115,5 @@

usage: Usage;
service_tier?: string;
system_fingerprint?: string;
[key: string]: any;
}

@@ -26,6 +26,12 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

}
interface Logprobs {
text_offset?: Array<number>;
token_logprobs?: Array<number>;
tokens?: Array<string>;
top_logprobs?: Array<Record<string, number>>;
}
interface Choices {
index?: number;
text?: string;
logprobs: any;
logprobs: Logprobs;
finish_reason?: string;

@@ -40,3 +46,4 @@ }

usage?: Usage;
system_fingerprint?: string;
}
export {};

@@ -67,6 +67,16 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

interface Logprobs {
text_offset?: Array<number>;
token_logprobs?: Array<number>;
tokens?: Array<string>;
top_logprobs?: Array<Record<string, number>>;
}
interface Choices {
index?: number;
text?: string;
logprobs: any;
logprobs: Logprobs;
finish_reason?: string;

@@ -82,2 +92,3 @@ }

usage?: Usage;
system_fingerprint?: string;
}

@@ -9,2 +9,3 @@ import { ApiClientInterface } from "../_types/generalTypes";

del(fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
content(fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
retrieveContent(fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

@@ -11,0 +12,0 @@ }

@@ -11,12 +11,7 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.MainFiles = void 0;
const apiResource_1 = require("../apiResource.js");
const constants_1 = require("../constants.js");
const utils_1 = require("../utils.js");
const createHeaders_1 = require("./createHeaders.js");
const openai_1 = __importDefault(require("openai"));
class MainFiles extends apiResource_1.ApiResource {

@@ -30,7 +25,3 @@ create(_body, params, opts) {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -49,7 +40,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.files.list(query, opts).withResponse();

@@ -65,7 +52,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.files.retrieve(fileId, opts).withResponse();

@@ -81,7 +64,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.files.del(fileId, opts).withResponse();

@@ -91,2 +70,13 @@ return (0, utils_1.finalResponse)(result);

}
content(fileId, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.files.content(fileId, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
retrieveContent(fileId, params, opts) {

@@ -98,7 +88,3 @@ return __awaiter(this, void 0, void 0, function* () {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.files.content(fileId, opts).withResponse();

@@ -105,0 +91,0 @@ return (0, utils_1.finalResponse)(result);

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -25,7 +23,3 @@ export class MainFiles extends ApiResource {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -53,7 +47,3 @@ // eslint-disable-next-line @typescript-eslint/ban-ts-comment

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -78,7 +68,3 @@ const result = await OAIclient.files.list(query, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -103,7 +89,3 @@ const result = await OAIclient.files.retrieve(fileId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -115,2 +97,22 @@ const result = await OAIclient.files.del(fileId, opts).withResponse();

async content(
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.files.content(fileId, opts).withResponse();
return finalResponse(result);
}
async retrieveContent(

@@ -129,7 +131,3 @@ fileId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -136,0 +134,0 @@ const result = await OAIclient.files.content(fileId, opts).withResponse();

@@ -11,12 +11,7 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Images = void 0;
const apiResource_1 = require("../apiResource.js");
const constants_1 = require("../constants.js");
const utils_1 = require("../utils.js");
const createHeaders_1 = require("./createHeaders.js");
const openai_1 = __importDefault(require("openai"));
class Images extends apiResource_1.ApiResource {

@@ -30,7 +25,3 @@ generate(_body, params, opts) {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -49,7 +40,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -68,7 +55,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -75,0 +58,0 @@ // @ts-ignore

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -66,7 +64,4 @@ export interface ImagesBody {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -93,7 +88,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -121,7 +112,3 @@ // eslint-disable-next-line @typescript-eslint/ban-ts-comment

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -128,0 +115,0 @@ // @ts-ignore

@@ -13,1 +13,8 @@ export { Chat } from "./chatCompletions";

export { Models } from "./models";
export { Batches } from "./batches";
export { FineTuning } from "./fineTuning";
export { Moderations } from "./moderations";
export { Audio } from "./audio";
export { VectorStores } from "./vectorStores";
export { BetaChat } from "./betaChat";
export { Uploads } from "./uploads";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Models = exports.MainFiles = exports.Threads = exports.Assistants = exports.Images = exports.Embeddings = exports.postMethod = exports.Prompt = exports.Generations = exports.Feedback = exports.createHeaders = exports.Completions = exports.Chat = void 0;
exports.Uploads = exports.BetaChat = exports.VectorStores = exports.Audio = exports.Moderations = exports.FineTuning = exports.Batches = exports.Models = exports.MainFiles = exports.Threads = exports.Assistants = exports.Images = exports.Embeddings = exports.postMethod = exports.Prompt = exports.Generations = exports.Feedback = exports.createHeaders = exports.Completions = exports.Chat = void 0;
var chatCompletions_1 = require("./chatCompletions.js");

@@ -29,2 +29,16 @@ Object.defineProperty(exports, "Chat", { enumerable: true, get: function () { return chatCompletions_1.Chat; } });

Object.defineProperty(exports, "Models", { enumerable: true, get: function () { return models_1.Models; } });
var batches_1 = require("./batches.js");
Object.defineProperty(exports, "Batches", { enumerable: true, get: function () { return batches_1.Batches; } });
var fineTuning_1 = require("./fineTuning.js");
Object.defineProperty(exports, "FineTuning", { enumerable: true, get: function () { return fineTuning_1.FineTuning; } });
var moderations_1 = require("./moderations.js");
Object.defineProperty(exports, "Moderations", { enumerable: true, get: function () { return moderations_1.Moderations; } });
var audio_1 = require("./audio.js");
Object.defineProperty(exports, "Audio", { enumerable: true, get: function () { return audio_1.Audio; } });
var vectorStores_1 = require("./vectorStores.js");
Object.defineProperty(exports, "VectorStores", { enumerable: true, get: function () { return vectorStores_1.VectorStores; } });
var betaChat_1 = require("./betaChat.js");
Object.defineProperty(exports, "BetaChat", { enumerable: true, get: function () { return betaChat_1.BetaChat; } });
var uploads_1 = require("./uploads.js");
Object.defineProperty(exports, "Uploads", { enumerable: true, get: function () { return uploads_1.Uploads; } });
//# sourceMappingURL=index.js.map

@@ -13,2 +13,8 @@ export { Chat } from "./chatCompletions";

export { Models } from "./models";
export { Batches } from "./batches";
export { FineTuning } from "./fineTuning"
export { Moderations } from "./moderations"
export { Audio } from "./audio"
export { VectorStores } from "./vectorStores"
export { BetaChat } from "./betaChat"
export { Uploads } from "./uploads"

@@ -11,12 +11,7 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Models = void 0;
const apiResource_1 = require("../apiResource.js");
const constants_1 = require("../constants.js");
const utils_1 = require("../utils.js");
const createHeaders_1 = require("./createHeaders.js");
const openai_1 = __importDefault(require("openai"));
class Models extends apiResource_1.ApiResource {

@@ -29,7 +24,3 @@ list(params, opts) {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client)
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.models.list(opts).withResponse();

@@ -45,7 +36,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.models.retrieve(model, opts).withResponse();

@@ -61,7 +48,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.models.del(model, opts).withResponse();

@@ -68,0 +51,0 @@ return (0, utils_1.finalResponse)(result);

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -22,7 +20,3 @@ export class Models extends ApiResource {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client)
});
const OAIclient = initOpenAIClient(this.client);

@@ -47,7 +41,3 @@ const result = await OAIclient.models.list(opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -72,7 +62,3 @@ const result = await OAIclient.models.retrieve(model, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -79,0 +65,0 @@ const result = await OAIclient.models.del(model, opts).withResponse();

@@ -13,6 +13,8 @@ import { ApiClientInterface } from "../_types/generalTypes";

createAndRun(_body: ThreadCreateAndRunParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
createAndRunPoll(_body: ThreadCreateAndRunParamsNonStreaming, params?: ApiClientInterface, opts?: RequestOptions & {
pollIntervalMs?: number;
}): Promise<any>;
createAndRunStream(_body: ThreadCreateAndRunParamsBaseStream, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
}
export declare class Messages extends ApiResource {
files: Files;
constructor(client: any);
create(threadId: string, _body: MessageCreateParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

@@ -23,6 +25,2 @@ list(threadId: string, _query?: MessageListParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;

}
export declare class Files extends ApiResource {
list(threadId: string, messageId: string, _query?: FileListParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
retrieve(threadId: string, messageId: string, fileId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
}
export declare class Runs extends ApiResource {

@@ -36,3 +34,15 @@ steps: Steps;

submitToolOutputs(threadId: string, runId: string, _body: RunSubmitToolOutputsParams, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
submitToolOutputsAndPoll(threadId: string, runId: string, _body: RunSubmitToolOutputsParamsNonStreaming, params?: ApiClientInterface, opts?: RequestOptions & {
pollIntervalMs?: number;
}): Promise<any>;
submitToolOutputsStream(threadId: string, runId: string, _body: RunSubmitToolOutputsParamsStreaming, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
cancel(threadId: string, runId: string, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
createAndPoll(threadId: string, _body: RunCreateParamsNonStreaming, params?: ApiClientInterface, opts?: RequestOptions & {
pollIntervalMs?: number;
}): Promise<any>;
createAndStream(threadId: string, _body: RunCreateParamsBaseStream, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
poll(threadId: string, runId: string, params?: ApiClientInterface, opts?: RequestOptions & {
pollIntervalMs?: number;
}): Promise<any>;
stream(threadId: string, _body: RunCreateParamsBaseStream, params?: ApiClientInterface, opts?: RequestOptions): Promise<any>;
}

@@ -84,2 +94,5 @@ export declare class Steps extends ApiResource {

}
export interface RunCreateParamsNonStreaming extends RunCreateParams {
stream?: false | null;
}
export interface ThreadCreateAndRunParams {

@@ -93,2 +106,8 @@ assistant_id: string;

}
export interface ThreadCreateAndRunParamsNonStreaming extends ThreadCreateAndRunParams {
stream?: false | null;
}
export type ThreadCreateAndRunParamsBaseStream = Omit<ThreadCreateAndRunParams, 'stream'> & {
stream?: true;
};
export interface RunListParams extends CursorPageParams {

@@ -112,1 +131,10 @@ before?: string;

}
export type RunCreateParamsBaseStream = Omit<RunCreateParams, 'stream'> & {
stream?: true;
};
export interface RunSubmitToolOutputsParamsNonStreaming extends RunSubmitToolOutputsParams {
stream?: false | null;
}
export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutputsParams {
stream: true;
}

@@ -11,12 +11,7 @@ "use strict";

};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Steps = exports.Runs = exports.Files = exports.Messages = exports.Threads = void 0;
exports.Steps = exports.Runs = exports.Messages = exports.Threads = void 0;
const apiResource_1 = require("../apiResource.js");
const constants_1 = require("../constants.js");
const utils_1 = require("../utils.js");
const createHeaders_1 = require("./createHeaders.js");
const openai_1 = __importDefault(require("openai"));
class Threads extends apiResource_1.ApiResource {

@@ -35,7 +30,3 @@ constructor(client) {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -53,7 +44,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.retrieve(threadId, opts).withResponse();

@@ -70,7 +57,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.update(threadId, body, opts).withResponse();

@@ -86,7 +69,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.del(threadId, opts).withResponse();

@@ -103,7 +82,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.createAndRun(body, opts).withResponse();

@@ -113,9 +88,29 @@ return (0, utils_1.finalResponse)(result);

}
createAndRunPoll(_body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.createAndRunPoll(body, opts);
return result;
});
}
createAndRunStream(_body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.createAndRunStream(body, opts);
return result;
});
}
}
exports.Threads = Threads;
class Messages extends apiResource_1.ApiResource {
constructor(client) {
super(client);
this.files = new Files(client);
}
create(threadId, _body, params, opts) {

@@ -128,7 +123,3 @@ return __awaiter(this, void 0, void 0, function* () {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -147,7 +138,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -165,7 +152,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.messages.retrieve(threadId, messageId, opts).withResponse();

@@ -182,7 +165,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.messages.update(threadId, messageId, body, opts).withResponse();

@@ -194,38 +173,2 @@ return (0, utils_1.finalResponse)(result);

exports.Messages = Messages;
class Files extends apiResource_1.ApiResource {
list(threadId, messageId, _query, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const query = _query;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = yield OAIclient.beta.threads.messages.files.list(threadId, messageId, query, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
retrieve(threadId, messageId, fileId, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const result = yield OAIclient.beta.threads.messages.files.retrieve(threadId, messageId, fileId, opts).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
}
exports.Files = Files;
class Runs extends apiResource_1.ApiResource {

@@ -243,7 +186,3 @@ constructor(client) {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.create(threadId, body, opts).withResponse();

@@ -260,7 +199,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -278,7 +213,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.retrieve(threadId, runId, opts).withResponse();

@@ -295,7 +226,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.update(threadId, runId, body, opts).withResponse();

@@ -312,7 +239,3 @@ return (0, utils_1.finalResponse)(result);

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.submitToolOutputs(threadId, runId, body, opts).withResponse();

@@ -322,2 +245,26 @@ return (0, utils_1.finalResponse)(result);

}
submitToolOutputsAndPoll(threadId, runId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.submitToolOutputsAndPoll(threadId, runId, body, opts);
return result;
});
}
submitToolOutputsStream(threadId, runId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.submitToolOutputsStream(threadId, runId, body, opts);
return result;
});
}
cancel(threadId, runId, params, opts) {

@@ -329,11 +276,56 @@ return __awaiter(this, void 0, void 0, function* () {

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const result = yield OAIclient.beta.threads.runs.cancel(threadId, runId, opts).withResponse();
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const body = {};
const options = Object.assign({ body }, opts);
const result = yield OAIclient.beta.threads.runs.cancel(threadId, runId, options).withResponse();
return (0, utils_1.finalResponse)(result);
});
}
createAndPoll(threadId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.createAndPoll(threadId, body, opts);
return result;
});
}
createAndStream(threadId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.createAndStream(threadId, body, opts);
return result;
});
}
poll(threadId, runId, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.poll(threadId, runId, opts);
return result;
});
}
stream(threadId, _body, params, opts) {
return __awaiter(this, void 0, void 0, function* () {
const body = _body;
if (params) {
const config = (0, utils_1.overrideConfig)(this.client.config, params.config);
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config })));
}
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.stream(threadId, body, opts);
return result;
});
}
}

@@ -349,7 +341,3 @@ exports.Runs = Runs;

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -367,7 +355,3 @@ // @ts-ignore

}
const OAIclient = new openai_1.default({
apiKey: constants_1.OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: (0, utils_1.defaultHeadersBuilder)(this.client),
});
const OAIclient = (0, utils_1.initOpenAIClient)(this.client);
const result = yield OAIclient.beta.threads.runs.steps.retrieve(threadId, runId, stepId, opts).withResponse();

@@ -374,0 +358,0 @@ return (0, utils_1.finalResponse)(result);

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -35,7 +34,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -61,7 +56,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -88,7 +79,3 @@ const result = await OAIclient.beta.threads.retrieve(threadId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -113,7 +100,3 @@ const result = await OAIclient.beta.threads.update(threadId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -139,7 +122,3 @@ const result = await OAIclient.beta.threads.del(threadId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -151,2 +130,43 @@ const result = await OAIclient.beta.threads.createAndRun(body, opts).withResponse();

async createAndRunPoll(
_body: ThreadCreateAndRunParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
const body: ThreadCreateAndRunParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.createAndRunPoll(body, opts)
return result;
}
async createAndRunStream(
_body: ThreadCreateAndRunParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: ThreadCreateAndRunParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.createAndRunStream(body, opts);
return result;
}
}

@@ -157,9 +177,2 @@

files: Files;
constructor(client:any) {
super(client);
this.files = new Files(client);
}
async create(

@@ -180,7 +193,3 @@ threadId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -208,7 +217,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -235,7 +240,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -264,7 +265,3 @@ const result = await OAIclient.beta.threads.messages.retrieve(threadId, messageId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -279,61 +276,3 @@ const result = await OAIclient.beta.threads.messages.update(threadId, messageId, body, opts).withResponse();

export class Files extends ApiResource{
async list(
threadId: string,
messageId: string,
_query?: FileListParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const query: FileListParams | undefined = _query;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = await OAIclient.beta.threads.messages.files.list(threadId, messageId, query, opts).withResponse();
return finalResponse(result);
}
async retrieve(
threadId: string,
messageId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.threads.messages.files.retrieve(threadId, messageId, fileId, opts).withResponse();
return finalResponse(result);
}
}
export class Runs extends ApiResource{

@@ -363,7 +302,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -390,7 +325,3 @@ const result = await OAIclient.beta.threads.runs.create(threadId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -417,7 +348,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -445,7 +372,3 @@ const result = await OAIclient.beta.threads.runs.retrieve(threadId, runId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -473,7 +396,3 @@ const result = await OAIclient.beta.threads.runs.update(threadId, runId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -485,2 +404,46 @@ const result = await OAIclient.beta.threads.runs.submitToolOutputs(threadId, runId, body, opts).withResponse();

async submitToolOutputsAndPoll(
threadId: string,
runId: string,
_body: RunSubmitToolOutputsParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
const body: RunSubmitToolOutputsParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.submitToolOutputsAndPoll(threadId, runId, body, opts);
return result;
}
async submitToolOutputsStream(
threadId: string,
runId: string,
_body: RunSubmitToolOutputsParamsStreaming,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunSubmitToolOutputsParamsStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.submitToolOutputsStream(threadId, runId, body, opts);
return result;
}
async cancel(

@@ -500,9 +463,7 @@ threadId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
const body = {}
const options = { body, ...opts }
const result = await OAIclient.beta.threads.runs.cancel(threadId, runId, opts).withResponse();
const result = await OAIclient.beta.threads.runs.cancel(threadId, runId, options).withResponse();

@@ -512,2 +473,85 @@ return finalResponse(result);

async createAndPoll(
threadId: string,
_body: RunCreateParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number},
): Promise<any> {
const body: RunCreateParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.createAndPoll(threadId, body, opts);
return result;
}
async createAndStream(
threadId: string,
_body: RunCreateParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunCreateParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.createAndStream(threadId, body, opts);
return result;
}
async poll(
threadId: string,
runId: string,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.poll(threadId, runId, opts);
return result
}
async stream(
threadId: string,
_body: RunCreateParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunCreateParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.stream(threadId, body, opts);
return result;
}
}

@@ -533,7 +577,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -561,7 +601,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -630,2 +666,5 @@ const result = await OAIclient.beta.threads.runs.steps.retrieve(threadId, runId, stepId, opts).withResponse();

export interface RunCreateParamsNonStreaming extends RunCreateParams {
stream?: false | null;
}
export interface ThreadCreateAndRunParams {

@@ -641,2 +680,10 @@

export interface ThreadCreateAndRunParamsNonStreaming extends ThreadCreateAndRunParams{
stream?: false | null;
}
export type ThreadCreateAndRunParamsBaseStream = Omit<ThreadCreateAndRunParams, 'stream'> & {
stream?: true;
};
export interface RunListParams extends CursorPageParams {

@@ -665,1 +712,13 @@ before?: string;

}
export type RunCreateParamsBaseStream = Omit<RunCreateParams, 'stream'> & {
stream?: true;
};
export interface RunSubmitToolOutputsParamsNonStreaming extends RunSubmitToolOutputsParams {
stream?: false | null;
}
export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutputsParams {
stream: true;
}

@@ -43,3 +43,3 @@ /// <reference types="node" />

private fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout }: ApiClientInterface);
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface);
protected defaultHeaders(): Record<string, string>;

@@ -46,0 +46,0 @@ _post<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp>;

@@ -82,6 +82,6 @@ "use strict";

class ApiClient {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout }) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }) {
this.apiKey = apiKey !== null && apiKey !== void 0 ? apiKey : "";
this.baseURL = baseURL !== null && baseURL !== void 0 ? baseURL : "";
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, requestTimeout });
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance });
this.portkeyHeaders = this.defaultHeaders();

@@ -131,3 +131,3 @@ this.fetch = fetch;

const url = new URL(this.baseURL + opts.path);
const { method, path, query, headers: headers = {}, body } = opts;
const { method, body } = opts;
const reqHeaders = Object.assign(Object.assign({}, this.defaultHeaders()), this.customHeaders);

@@ -134,0 +134,0 @@ const httpAgent = defaultHttpAgent;

@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive";

private fetch: Fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout }: ApiClientInterface) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface) {
this.apiKey = apiKey ?? "";
this.baseURL = baseURL ?? "";
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, requestTimeout })
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance })
this.portkeyHeaders = this.defaultHeaders()

@@ -191,3 +191,3 @@ this.fetch = fetch;

const url = new URL(this.baseURL + opts.path!)
const { method, path, query, headers: headers = {}, body } = opts;
const { method, body } = opts;
const reqHeaders: Record<string, string> = {

@@ -194,0 +194,0 @@ ...this.defaultHeaders(), ...this.customHeaders,

import { ApiClientInterface } from "./_types/generalTypes";
import * as API from "./apis";
import { PostBodyParams } from "./apis/postMethod";
import { ApiClient, RequestOptions } from "./baseClient";
import { PostBodyParams, PostResponse } from "./apis/postMethod";
import { ApiClient, APIPromise, RequestOptions } from "./baseClient";
import { Stream } from "./streaming";
export declare class Portkey extends ApiClient {

@@ -29,20 +30,29 @@ apiKey: string | null;

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
requestTimeout?: number | null | undefined;
cacheNamespace?: string | null | undefined;
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout, }: ApiClientInterface);
strictOpenAiCompliance?: boolean | null | undefined;
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, }: ApiClientInterface);
completions: API.Completions;
chat: API.Chat;
embeddings: API.Embeddings;
files: API.MainFiles;
images: API.Images;
models: API.Models;
generations: API.Generations;
prompts: API.Prompt;
feedback: API.Feedback;
embeddings: API.Embeddings;
images: API.Images;
files: API.MainFiles;
models: API.Models;
batches: API.Batches;
fineTuning: API.FineTuning;
moderations: API.Moderations;
audio: API.Audio;
uploads: API.Uploads;
beta: {
assistants: API.Assistants;
threads: API.Threads;
vectorStores: API.VectorStores;
chat: API.BetaChat;
};
post: (url: string, _body: PostBodyParams, params?: ApiClientInterface, opts?: RequestOptions) => import("./baseClient").APIPromise<import("./apis/postMethod").PostResponse> | import("./baseClient").APIPromise<import("./streaming").Stream<import("./apis/postMethod").PostResponse>>;
post: (url: string, _body: PostBodyParams, params?: ApiClientInterface, opts?: RequestOptions) => APIPromise<Stream<PostResponse>> | APIPromise<PostResponse>;
}

@@ -34,3 +34,3 @@ "use strict";

var _b, _c;
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout, } = _a;
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance, } = _a;
super({

@@ -61,17 +61,26 @@ apiKey,

azureApiVersion,
huggingfaceBaseUrl,
forwardHeaders,
requestTimeout,
strictOpenAiCompliance,
});
this.completions = new API.Completions(this);
this.chat = new API.Chat(this);
this.embeddings = new API.Embeddings(this);
this.files = new API.MainFiles(this);
this.images = new API.Images(this);
this.models = new API.Models(this);
this.generations = new API.Generations(this);
this.prompts = new API.Prompt(this);
this.feedback = new API.Feedback(this);
this.embeddings = new API.Embeddings(this);
this.images = new API.Images(this);
this.files = new API.MainFiles(this);
this.models = new API.Models(this);
this.batches = new API.Batches(this);
this.fineTuning = new API.FineTuning(this);
this.moderations = new API.Moderations(this);
this.audio = new API.Audio(this);
this.uploads = new API.Uploads(this);
this.beta = {
assistants: new API.Assistants(this),
threads: new API.Threads(this)
threads: new API.Threads(this),
vectorStores: new API.VectorStores(this),
chat: new API.BetaChat(this),
};

@@ -108,4 +117,6 @@ this.post = (url, _body, params, opts) => {

this.azureApiVersion = azureApiVersion;
this.huggingfaceBaseUrl = huggingfaceBaseUrl;
this.forwardHeaders = forwardHeaders;
this.requestTimeout = requestTimeout;
this.strictOpenAiCompliance = strictOpenAiCompliance;
}

@@ -112,0 +123,0 @@ }

import { ApiClientInterface } from "./_types/generalTypes";
import * as API from "./apis";
import { PostBodyParams } from "./apis/postMethod";
import { ApiClient, RequestOptions } from "./baseClient";
import { PostBodyParams, PostResponse } from "./apis/postMethod";
import { ApiClient, APIPromise, RequestOptions } from "./baseClient";
import { MISSING_API_KEY_ERROR_MESSAGE, PORTKEY_BASE_URL } from "./constants";
import { Stream } from "./streaming";
import { castToError, readEnv } from "./utils";

@@ -32,5 +33,7 @@

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
requestTimeout?: number | null | undefined;
cacheNamespace?: string | null | undefined;
strictOpenAiCompliance?: boolean | null | undefined;
constructor({

@@ -60,5 +63,7 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null,

azureApiVersion,
huggingfaceBaseUrl,
forwardHeaders,
cacheNamespace,
requestTimeout,
strictOpenAiCompliance,
}: ApiClientInterface) {

@@ -91,4 +96,6 @@

azureApiVersion,
huggingfaceBaseUrl,
forwardHeaders,
requestTimeout,
strictOpenAiCompliance,
});

@@ -122,4 +129,6 @@

this.azureApiVersion = azureApiVersion;
this.huggingfaceBaseUrl = huggingfaceBaseUrl;
this.forwardHeaders = forwardHeaders;
this.requestTimeout = requestTimeout;
this.strictOpenAiCompliance = strictOpenAiCompliance;
}

@@ -129,12 +138,19 @@

chat = new API.Chat(this);
embeddings = new API.Embeddings(this);
files = new API.MainFiles(this);
images = new API.Images(this);
models = new API.Models(this);
generations = new API.Generations(this);
prompts = new API.Prompt(this);
feedback = new API.Feedback(this);
embeddings = new API.Embeddings(this);
images = new API.Images(this);
files = new API.MainFiles(this);
models = new API.Models(this);
batches = new API.Batches(this);
fineTuning = new API.FineTuning(this);
moderations = new API.Moderations(this);
audio = new API.Audio(this);
uploads = new API.Uploads(this);
beta = {
assistants: new API.Assistants(this),
threads: new API.Threads(this)
threads: new API.Threads(this),
vectorStores: new API.VectorStores(this),
chat: new API.BetaChat(this),
};

@@ -148,3 +164,3 @@

opts?: RequestOptions
) => {
): APIPromise<Stream<PostResponse>> | APIPromise<PostResponse> => {
return new API.postMethod(this).create(url, _body, params, opts)

@@ -151,0 +167,0 @@ };

@@ -96,3 +96,2 @@ "use strict";

var _b, e_2, _c, _d;
let done = false;
try {

@@ -105,3 +104,2 @@ try {

if (sse.data.startsWith('[DONE]')) {
done = true;
continue;

@@ -134,3 +132,2 @@ }

}
done = true;
}

@@ -137,0 +134,0 @@ catch (e) {

@@ -69,7 +69,5 @@ import { Fetch } from "./baseClient";

async *[Symbol.asyncIterator](): AsyncIterator<Item, any, undefined> {
let done = false;
try {
for await (const sse of this.iterMessages()) {
if (sse.data.startsWith('[DONE]')) {
done = true;
continue;

@@ -95,3 +93,2 @@ }

}
done = true;
} catch (e) {

@@ -98,0 +95,0 @@ if (e instanceof Error && e.name === "AbortError") return;

@@ -0,1 +1,3 @@

import OpenAI from "openai";
import type { Portkey } from "./index";
type PlatformProperties = {

@@ -16,2 +18,3 @@ "x-portkey-runtime"?: string;

export declare function defaultHeadersBuilder(client: any): any;
export declare function initOpenAIClient(client: Portkey): OpenAI;
export {};
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.defaultHeadersBuilder = exports.portkeyHeaders = exports.finalResponse = exports.parseBody = exports.overrideConfig = exports.getPortkeyHeader = exports.isEmpty = exports.castToError = exports.readEnv = exports.getPlatformProperties = void 0;
exports.initOpenAIClient = exports.defaultHeadersBuilder = exports.portkeyHeaders = exports.finalResponse = exports.parseBody = exports.overrideConfig = exports.getPortkeyHeader = exports.isEmpty = exports.castToError = exports.readEnv = exports.getPlatformProperties = void 0;
const constants_1 = require("./constants.js");
const streaming_1 = require("./streaming.js");
const openai_1 = __importDefault(require("openai"));
const getPlatformProperties = () => {

@@ -106,2 +110,11 @@ if (Object.prototype.toString.call(typeof process !== "undefined" ? process : 0) === "[object process]") {

exports.defaultHeadersBuilder = defaultHeadersBuilder;
function initOpenAIClient(client) {
return new openai_1.default({
apiKey: client.apiKey || (0, exports.readEnv)("OPENAI_API_KEY") || constants_1.OPEN_AI_API_KEY,
baseURL: client.baseURL,
defaultHeaders: defaultHeadersBuilder(client),
maxRetries: 0
});
}
exports.initOpenAIClient = initOpenAIClient;
//# sourceMappingURL=utils.js.map

@@ -1,3 +0,5 @@

import { PORTKEY_HEADER_PREFIX } from "./constants";
import { OPEN_AI_API_KEY, PORTKEY_HEADER_PREFIX } from "./constants";
import { createResponseHeaders } from "./streaming";
import OpenAI from "openai";
import type { Portkey } from "./index";

@@ -121,2 +123,11 @@ type PlatformProperties = {

return {...customHeaders, ...portkeyHeaders}
}
export function initOpenAIClient(client: Portkey){
return new OpenAI({
apiKey: client.apiKey || readEnv("OPENAI_API_KEY") || OPEN_AI_API_KEY,
baseURL: client.baseURL,
defaultHeaders: defaultHeadersBuilder(client),
maxRetries: 0
})
}

@@ -1,1 +0,1 @@

export declare const VERSION = "1.3.2";
export declare const VERSION = "1.4.0-rc.1";
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.VERSION = void 0;
exports.VERSION = "1.3.2";
exports.VERSION = "1.4.0-rc.1";
//# sourceMappingURL=version.js.map

@@ -1,1 +0,1 @@

export const VERSION = "1.3.2";
export const VERSION = "1.4.0-rc.1";
{
"name": "portkey-ai",
"version": "1.3.2",
"version": "1.4.0-rc.1",
"description": "Node client library for the Portkey API",

@@ -45,4 +45,4 @@ "types": "dist/src/index.d.ts",

"dotenv": "^16.3.1",
"openai": "4.36.0"
"openai": "4.55.3"
}
}

@@ -27,5 +27,7 @@ export type Headers = Record<string, string | null | undefined>

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
cacheNamespace?: string | null | undefined;
requestTimeout?: number | null | undefined;
strictOpenAiCompliance?: boolean | null | undefined;
}

@@ -32,0 +34,0 @@

@@ -33,2 +33,9 @@ export interface RetrySettings {

organization?: string;
seed?: number;
response_format?: any;
service_tier?: string;
top_logprobs?: number | null;
parallel_tool_calls?: boolean;
tools?: Array<Tool>;
tool_choice?: any;
}

@@ -40,1 +47,6 @@

}
export interface Tool {
type?: string;
function?: Record<string, any>
}
import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -12,3 +10,2 @@ export interface AssistantCreateParams {

description?: string | null;
file_ids?: Array<string>;
instructions?: string | null;

@@ -18,2 +15,7 @@ metadata?: unknown | null;

tools?: Array<any>;
response_format?: any | null;
temperature?: number | null;
tool_resources?: any | null;
top_p?: number | null;
}

@@ -52,10 +54,3 @@

export class Assistants extends ApiResource {
files: Files;
constructor(client:any) {
super(client);
this.files = new Files(client);
}
async create(

@@ -75,7 +70,3 @@ _body: AssistantCreateParams,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -101,7 +92,3 @@ const result = await OAIclient.beta.assistants.create(body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -127,7 +114,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -154,7 +137,3 @@ const result = await OAIclient.beta.assistants.retrieve(assistantId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -179,7 +158,3 @@ const result = await OAIclient.beta.assistants.update(assistantId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -192,109 +167,1 @@ const result = await OAIclient.beta.assistants.del(assistantId, opts).withResponse();

}
export class Files extends ApiResource{
async create(
assistantId: string,
_body: FileCreateParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: FileCreateParams = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.create(assistantId, body, opts).withResponse();
return finalResponse(result);
}
async list(
assistantId: string,
_query?: FileListParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const query: FileListParams | undefined = _query;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = await OAIclient.beta.assistants.files.list(assistantId, query, opts).withResponse();
return finalResponse(result);
}
async retrieve(
assistantId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.retrieve(assistantId, fileId, opts).withResponse();
return finalResponse(result);
}
async del(
assistantId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.assistants.files.del(assistantId, fileId, opts).withResponse();
return finalResponse(result);
}
}

@@ -0,1 +1,2 @@

import { ChatCompletionMessageToolCall, ChatCompletionStreamOptions, ChatCompletionTokenLogprob } from "openai/resources/chat/completions";
import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

@@ -57,2 +58,3 @@ import { ModelParams } from "../_types/portkeyConstructs";

stream?: true;
stream_options?: ChatCompletionStreamOptions
}

@@ -70,2 +72,3 @@

total_tokens?: number;
[key: string]: any;
}

@@ -76,23 +79,20 @@

name?: string;
[key: string]: any;
}
interface ToolCall {
index?: number;
id?: string;
function?: FunctionType;
type?: 'function';
}
interface FunctionCall {
arguments?: string;
name?: string;
}
interface Message {
role: string;
content: string | null;
function_call?: FunctionCall;
tool_calls?: Array<ToolCall>;
content: string;
refusal?: string;
function_call?: any;
tool_calls?: Array<ChatCompletionMessageToolCall>;
tool_call_id?: string;
}
export interface Logprobs {
content: Array<ChatCompletionTokenLogprob> | null;
refusal: Array<ChatCompletionTokenLogprob> | null;
[key: string]: any;
}
interface Choices {

@@ -103,2 +103,4 @@ index?: number;

finish_reason?: string;
logprobs?: Logprobs
[key: string]: any;
}

@@ -113,2 +115,5 @@

usage: Usage;
service_tier?: string;
system_fingerprint?: string;
[key: string]: any;
}

@@ -67,6 +67,16 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes";

interface Logprobs {
text_offset?: Array<number>;
token_logprobs?: Array<number>;
tokens?: Array<string>;
top_logprobs?: Array<Record<string, number>>;
}
interface Choices {
index?: number;
text?: string;
logprobs: any;
logprobs: Logprobs;
finish_reason?: string;

@@ -82,2 +92,3 @@ }

usage?: Usage;
system_fingerprint?: string;
}
import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -25,7 +23,3 @@ export class MainFiles extends ApiResource {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -53,7 +47,3 @@ // eslint-disable-next-line @typescript-eslint/ban-ts-comment

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -78,7 +68,3 @@ const result = await OAIclient.files.list(query, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -103,7 +89,3 @@ const result = await OAIclient.files.retrieve(fileId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -115,2 +97,22 @@ const result = await OAIclient.files.del(fileId, opts).withResponse();

async content(
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.files.content(fileId, opts).withResponse();
return finalResponse(result);
}
async retrieveContent(

@@ -129,7 +131,3 @@ fileId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -136,0 +134,0 @@ const result = await OAIclient.files.content(fileId, opts).withResponse();

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -66,7 +64,4 @@ export interface ImagesBody {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -93,7 +88,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -121,7 +112,3 @@ // eslint-disable-next-line @typescript-eslint/ban-ts-comment

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -128,0 +115,0 @@ // @ts-ignore

@@ -13,2 +13,8 @@ export { Chat } from "./chatCompletions";

export { Models } from "./models";
export { Batches } from "./batches";
export { FineTuning } from "./fineTuning"
export { Moderations } from "./moderations"
export { Audio } from "./audio"
export { VectorStores } from "./vectorStores"
export { BetaChat } from "./betaChat"
export { Uploads } from "./uploads"
import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -22,7 +20,3 @@ export class Models extends ApiResource {

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client)
});
const OAIclient = initOpenAIClient(this.client);

@@ -47,7 +41,3 @@ const result = await OAIclient.models.list(opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -72,7 +62,3 @@ const result = await OAIclient.models.retrieve(model, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -79,0 +65,0 @@ const result = await OAIclient.models.del(model, opts).withResponse();

import { ApiClientInterface } from "../_types/generalTypes";
import { ApiResource } from "../apiResource";
import { RequestOptions } from "../baseClient";
import { OPEN_AI_API_KEY } from "../constants";
import { defaultHeadersBuilder, finalResponse, overrideConfig } from "../utils";
import { finalResponse, initOpenAIClient, overrideConfig } from "../utils";
import { createHeaders } from "./createHeaders";
import OpenAI from "openai";

@@ -35,7 +34,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -61,7 +56,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -88,7 +79,3 @@ const result = await OAIclient.beta.threads.retrieve(threadId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -113,7 +100,3 @@ const result = await OAIclient.beta.threads.update(threadId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -139,7 +122,3 @@ const result = await OAIclient.beta.threads.del(threadId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -151,2 +130,43 @@ const result = await OAIclient.beta.threads.createAndRun(body, opts).withResponse();

async createAndRunPoll(
_body: ThreadCreateAndRunParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
const body: ThreadCreateAndRunParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.createAndRunPoll(body, opts)
return result;
}
async createAndRunStream(
_body: ThreadCreateAndRunParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: ThreadCreateAndRunParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.createAndRunStream(body, opts);
return result;
}
}

@@ -157,9 +177,2 @@

files: Files;
constructor(client:any) {
super(client);
this.files = new Files(client);
}
async create(

@@ -180,7 +193,3 @@ threadId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -208,7 +217,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -235,7 +240,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -264,7 +265,3 @@ const result = await OAIclient.beta.threads.messages.retrieve(threadId, messageId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -279,61 +276,3 @@ const result = await OAIclient.beta.threads.messages.update(threadId, messageId, body, opts).withResponse();

export class Files extends ApiResource{
async list(
threadId: string,
messageId: string,
_query?: FileListParams,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const query: FileListParams | undefined = _query;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
const result = await OAIclient.beta.threads.messages.files.list(threadId, messageId, query, opts).withResponse();
return finalResponse(result);
}
async retrieve(
threadId: string,
messageId: string,
fileId: string,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const result = await OAIclient.beta.threads.messages.files.retrieve(threadId, messageId, fileId, opts).withResponse();
return finalResponse(result);
}
}
export class Runs extends ApiResource{

@@ -363,7 +302,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -390,7 +325,3 @@ const result = await OAIclient.beta.threads.runs.create(threadId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -417,7 +348,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -445,7 +372,3 @@ const result = await OAIclient.beta.threads.runs.retrieve(threadId, runId, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -473,7 +396,3 @@ const result = await OAIclient.beta.threads.runs.update(threadId, runId, body, opts).withResponse();

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -485,2 +404,46 @@ const result = await OAIclient.beta.threads.runs.submitToolOutputs(threadId, runId, body, opts).withResponse();

async submitToolOutputsAndPoll(
threadId: string,
runId: string,
_body: RunSubmitToolOutputsParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
const body: RunSubmitToolOutputsParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.submitToolOutputsAndPoll(threadId, runId, body, opts);
return result;
}
async submitToolOutputsStream(
threadId: string,
runId: string,
_body: RunSubmitToolOutputsParamsStreaming,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunSubmitToolOutputsParamsStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.submitToolOutputsStream(threadId, runId, body, opts);
return result;
}
async cancel(

@@ -500,9 +463,7 @@ threadId: string,

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
const body = {}
const options = { body, ...opts }
const result = await OAIclient.beta.threads.runs.cancel(threadId, runId, opts).withResponse();
const result = await OAIclient.beta.threads.runs.cancel(threadId, runId, options).withResponse();

@@ -512,2 +473,85 @@ return finalResponse(result);

async createAndPoll(
threadId: string,
_body: RunCreateParamsNonStreaming,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number},
): Promise<any> {
const body: RunCreateParamsNonStreaming = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.createAndPoll(threadId, body, opts);
return result;
}
async createAndStream(
threadId: string,
_body: RunCreateParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunCreateParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.createAndStream(threadId, body, opts);
return result;
}
async poll(
threadId: string,
runId: string,
params?: ApiClientInterface,
opts?: RequestOptions & {pollIntervalMs?: number}
): Promise<any> {
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.poll(threadId, runId, opts);
return result
}
async stream(
threadId: string,
_body: RunCreateParamsBaseStream,
params?: ApiClientInterface,
opts?: RequestOptions
): Promise<any> {
const body: RunCreateParamsBaseStream = _body;
if (params) {
const config = overrideConfig(this.client.config, params.config);
this.client.customHeaders = {
...this.client.customHeaders,
...createHeaders({ ...params, config }),
};
}
const OAIclient = initOpenAIClient(this.client);
const result = await OAIclient.beta.threads.runs.stream(threadId, body, opts);
return result;
}
}

@@ -533,7 +577,3 @@

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);
// eslint-disable-next-line @typescript-eslint/ban-ts-comment

@@ -561,7 +601,3 @@ // @ts-ignore

const OAIclient = new OpenAI({
apiKey: OPEN_AI_API_KEY,
baseURL: this.client.baseURL,
defaultHeaders: defaultHeadersBuilder(this.client),
});
const OAIclient = initOpenAIClient(this.client);

@@ -630,2 +666,5 @@ const result = await OAIclient.beta.threads.runs.steps.retrieve(threadId, runId, stepId, opts).withResponse();

export interface RunCreateParamsNonStreaming extends RunCreateParams {
stream?: false | null;
}
export interface ThreadCreateAndRunParams {

@@ -641,2 +680,10 @@

export interface ThreadCreateAndRunParamsNonStreaming extends ThreadCreateAndRunParams{
stream?: false | null;
}
export type ThreadCreateAndRunParamsBaseStream = Omit<ThreadCreateAndRunParams, 'stream'> & {
stream?: true;
};
export interface RunListParams extends CursorPageParams {

@@ -665,1 +712,13 @@ before?: string;

}
export type RunCreateParamsBaseStream = Omit<RunCreateParams, 'stream'> & {
stream?: true;
};
export interface RunSubmitToolOutputsParamsNonStreaming extends RunSubmitToolOutputsParams {
stream?: false | null;
}
export interface RunSubmitToolOutputsParamsStreaming extends RunSubmitToolOutputsParams {
stream: true;
}

@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive";

private fetch: Fetch;
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, cacheNamespace, requestTimeout }: ApiClientInterface) {
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, cacheNamespace, requestTimeout, strictOpenAiCompliance }: ApiClientInterface) {
this.apiKey = apiKey ?? "";
this.baseURL = baseURL ?? "";
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, forwardHeaders, requestTimeout })
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost, cacheNamespace, openaiProject, openaiOrganization, awsSecretAccessKey, awsAccessKeyId, awsSessionToken, awsRegion, vertexProjectId, vertexRegion, workersAiAccountId, azureResourceName, azureDeploymentId, azureApiVersion, huggingfaceBaseUrl, forwardHeaders, requestTimeout, strictOpenAiCompliance })
this.portkeyHeaders = this.defaultHeaders()

@@ -191,3 +191,3 @@ this.fetch = fetch;

const url = new URL(this.baseURL + opts.path!)
const { method, path, query, headers: headers = {}, body } = opts;
const { method, body } = opts;
const reqHeaders: Record<string, string> = {

@@ -194,0 +194,0 @@ ...this.defaultHeaders(), ...this.customHeaders,

import { ApiClientInterface } from "./_types/generalTypes";
import * as API from "./apis";
import { PostBodyParams } from "./apis/postMethod";
import { ApiClient, RequestOptions } from "./baseClient";
import { PostBodyParams, PostResponse } from "./apis/postMethod";
import { ApiClient, APIPromise, RequestOptions } from "./baseClient";
import { MISSING_API_KEY_ERROR_MESSAGE, PORTKEY_BASE_URL } from "./constants";
import { Stream } from "./streaming";
import { castToError, readEnv } from "./utils";

@@ -32,5 +33,7 @@

azureApiVersion?: string | null | undefined;
huggingfaceBaseUrl?: string | null | undefined;
forwardHeaders?: Array<string> | null | undefined;
requestTimeout?: number | null | undefined;
cacheNamespace?: string | null | undefined;
strictOpenAiCompliance?: boolean | null | undefined;
constructor({

@@ -60,5 +63,7 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null,

azureApiVersion,
huggingfaceBaseUrl,
forwardHeaders,
cacheNamespace,
requestTimeout,
strictOpenAiCompliance,
}: ApiClientInterface) {

@@ -91,4 +96,6 @@

azureApiVersion,
huggingfaceBaseUrl,
forwardHeaders,
requestTimeout,
strictOpenAiCompliance,
});

@@ -122,4 +129,6 @@

this.azureApiVersion = azureApiVersion;
this.huggingfaceBaseUrl = huggingfaceBaseUrl;
this.forwardHeaders = forwardHeaders;
this.requestTimeout = requestTimeout;
this.strictOpenAiCompliance = strictOpenAiCompliance;
}

@@ -129,12 +138,19 @@

chat = new API.Chat(this);
embeddings = new API.Embeddings(this);
files = new API.MainFiles(this);
images = new API.Images(this);
models = new API.Models(this);
generations = new API.Generations(this);
prompts = new API.Prompt(this);
feedback = new API.Feedback(this);
embeddings = new API.Embeddings(this);
images = new API.Images(this);
files = new API.MainFiles(this);
models = new API.Models(this);
batches = new API.Batches(this);
fineTuning = new API.FineTuning(this);
moderations = new API.Moderations(this);
audio = new API.Audio(this);
uploads = new API.Uploads(this);
beta = {
assistants: new API.Assistants(this),
threads: new API.Threads(this)
threads: new API.Threads(this),
vectorStores: new API.VectorStores(this),
chat: new API.BetaChat(this),
};

@@ -148,3 +164,3 @@

opts?: RequestOptions
) => {
): APIPromise<Stream<PostResponse>> | APIPromise<PostResponse> => {
return new API.postMethod(this).create(url, _body, params, opts)

@@ -151,0 +167,0 @@ };

@@ -69,7 +69,5 @@ import { Fetch } from "./baseClient";

async *[Symbol.asyncIterator](): AsyncIterator<Item, any, undefined> {
let done = false;
try {
for await (const sse of this.iterMessages()) {
if (sse.data.startsWith('[DONE]')) {
done = true;
continue;

@@ -95,3 +93,2 @@ }

}
done = true;
} catch (e) {

@@ -98,0 +95,0 @@ if (e instanceof Error && e.name === "AbortError") return;

@@ -1,3 +0,5 @@

import { PORTKEY_HEADER_PREFIX } from "./constants";
import { OPEN_AI_API_KEY, PORTKEY_HEADER_PREFIX } from "./constants";
import { createResponseHeaders } from "./streaming";
import OpenAI from "openai";
import type { Portkey } from "./index";

@@ -121,2 +123,11 @@ type PlatformProperties = {

return {...customHeaders, ...portkeyHeaders}
}
export function initOpenAIClient(client: Portkey){
return new OpenAI({
apiKey: client.apiKey || readEnv("OPENAI_API_KEY") || OPEN_AI_API_KEY,
baseURL: client.baseURL,
defaultHeaders: defaultHeadersBuilder(client),
maxRetries: 0
})
}

@@ -1,1 +0,1 @@

export const VERSION = "1.3.2";
export const VERSION = "1.4.0-rc.1";

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc