portkey-ai
Advanced tools
Comparing version 1.2.1 to 1.3.0
{ | ||
"name": "portkey-ai", | ||
"version": "1.2.1", | ||
"version": "1.3.0", | ||
"description": "Node client library for the Portkey API", | ||
@@ -25,4 +25,4 @@ "types": "./src/index.d.ts", | ||
"dotenv": "^16.3.1", | ||
"openai": "^4.28.4" | ||
"openai": "4.36.0" | ||
} | ||
} |
@@ -12,2 +12,4 @@ <div align="center"> | ||
The Portkey SDK is built on top of the OpenAI SDK, allowing you to seamlessly integrate Portkey's advanced features while retaining full compatibility with OpenAI methods. With Portkey, you can enhance your interactions with OpenAI or any other OpenAI-like provider by leveraging robust monitoring, reliability, prompt management, and more features - without modifying much of your existing code. | ||
### AI Gateway | ||
@@ -14,0 +16,0 @@ <table> |
@@ -12,2 +12,4 @@ export type Headers = Record<string, string | null | undefined>; | ||
cacheForceRefresh?: boolean | null | undefined; | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined; | ||
} | ||
@@ -14,0 +16,0 @@ export interface APIResponseType { |
@@ -13,2 +13,4 @@ export type Headers = Record<string, string | null | undefined> | ||
cacheForceRefresh?: boolean | null | undefined; | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined | ||
} | ||
@@ -15,0 +17,0 @@ |
@@ -5,3 +5,4 @@ import type { Portkey } from "./index"; | ||
protected post: Portkey["_post"]; | ||
protected put: Portkey["_put"]; | ||
constructor(client: Portkey); | ||
} |
@@ -8,2 +8,3 @@ "use strict"; | ||
this.post = client._post.bind(client); | ||
this.put = client._put.bind(client); | ||
} | ||
@@ -10,0 +11,0 @@ } |
@@ -6,2 +6,3 @@ import type { Portkey } from "./index"; | ||
protected post: Portkey["_post"] | ||
protected put: Portkey["_put"] | ||
@@ -11,3 +12,4 @@ constructor(client: Portkey) { | ||
this.post = client._post.bind(client) | ||
this.put = client._put.bind(client) | ||
} | ||
} |
@@ -16,2 +16,6 @@ "use strict"; | ||
} | ||
// false logic (type is boolean, to handle flasy logic) | ||
if (typeof v === "boolean") { | ||
v = v.toString(); | ||
} | ||
k = k.replace('ID', 'Id') | ||
@@ -18,0 +22,0 @@ .replace(/[A-Z]/g, letter => `-${letter.toLowerCase()}`); |
@@ -8,3 +8,2 @@ import { getPortkeyHeader, isEmpty } from "../utils" | ||
let v = config[k]; | ||
if (isEmpty(v)) continue; | ||
@@ -17,2 +16,8 @@ | ||
} | ||
// false logic (type is boolean, to handle flasy logic) | ||
if (typeof v === "boolean") { | ||
v = v.toString() | ||
} | ||
k = k.replace('ID', 'Id') | ||
@@ -19,0 +24,0 @@ .replace(/[A-Z]/g, letter => `-${letter.toLowerCase()}`) |
@@ -6,2 +6,3 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
traceID?: string; | ||
feedbackID?: string; | ||
value?: number; | ||
@@ -15,6 +16,8 @@ weight?: number; | ||
message: string; | ||
feedback_id: Array<string>; | ||
} | ||
export declare class Feedback extends ApiResource { | ||
create(_body: FeedbackBody, params?: ApiClientInterface, opts?: RequestOptions): APIPromise<FeedbackResponse>; | ||
update(_body: FeedbackBodyBase, params?: ApiClientInterface, opts?: RequestOptions): APIPromise<FeedbackResponse>; | ||
} | ||
export {}; |
@@ -18,4 +18,14 @@ "use strict"; | ||
} | ||
update(_body, params, opts) { | ||
const body = _body; | ||
const feedbackID = _body.feedbackID; | ||
if (params) { | ||
const config = (0, utils_1.overrideConfig)(this.client.config, params.config); | ||
this.client.customHeaders = Object.assign(Object.assign({}, this.client.customHeaders), (0, createHeaders_1.createHeaders)(Object.assign(Object.assign({}, params), { config }))); | ||
} | ||
const response = this.put(constants_1.FEEDBACK_API + '/' + feedbackID, Object.assign({ body }, opts)); | ||
return response; | ||
} | ||
} | ||
exports.Feedback = Feedback; | ||
//# sourceMappingURL=feedback.js.map |
@@ -10,2 +10,3 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
traceID?: string; | ||
feedbackID?: string; | ||
value?: number; | ||
@@ -21,2 +22,3 @@ weight?: number; | ||
message: string; | ||
feedback_id: Array<string>; | ||
} | ||
@@ -38,2 +40,17 @@ | ||
} | ||
update( | ||
_body: FeedbackBodyBase, | ||
params?: ApiClientInterface, | ||
opts?: RequestOptions | ||
): APIPromise<FeedbackResponse> { | ||
const body = _body | ||
const feedbackID = _body.feedbackID | ||
if (params) { | ||
const config = overrideConfig(this.client.config, params.config) | ||
this.client.customHeaders = { ...this.client.customHeaders, ...createHeaders({ ...params, config }) } | ||
} | ||
const response = this.put<FeedbackResponse>(FEEDBACK_API+'/'+feedbackID , { body, ...opts }) | ||
return response | ||
} | ||
} |
@@ -27,7 +27,54 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
} | ||
export interface Functions { | ||
name?: string; | ||
description?: string; | ||
parameters?: object; | ||
} | ||
export interface Tool { | ||
function?: Functions; | ||
type?: string; | ||
} | ||
export interface Messages { | ||
content?: string; | ||
role?: string; | ||
} | ||
export type PromptsCreateParams = PromptsCreateNonStreaming | PromptsCreateStreaming; | ||
type PromptsResponse = Record<string, any> & APIResponseType; | ||
type PromptRenderResponse = { | ||
success: boolean; | ||
data: { | ||
messages?: Messages[]; | ||
prompt?: string; | ||
model?: string; | ||
stream?: boolean; | ||
suffix?: string; | ||
max_tokens?: number; | ||
temperature?: number; | ||
top_k?: number; | ||
top_p?: number; | ||
n?: number; | ||
stop_sequences?: string[]; | ||
functions?: Functions[]; | ||
function_call?: string | Functions; | ||
logprobs?: boolean; | ||
top_logprobs?: number; | ||
echo?: boolean; | ||
stop?: string | string[]; | ||
presence_penalty?: number; | ||
frequency_penalty?: number; | ||
best_of?: number; | ||
logit_bias?: { | ||
[key: string]: number; | ||
}; | ||
user?: string; | ||
organization?: string; | ||
tool_choice?: string; | ||
tools?: Tool[]; | ||
response_format?: object; | ||
seed?: number; | ||
}; | ||
} & APIResponseType; | ||
export declare class Prompt extends ApiResource { | ||
completions: PromptCompletions; | ||
render(_body: PromptsCreateParams, params?: ApiClientInterface, opts?: RequestOptions): APIPromise<PromptsResponse>; | ||
render(_body: PromptsCreateParams, params?: ApiClientInterface, opts?: RequestOptions): APIPromise<PromptRenderResponse>; | ||
} | ||
@@ -34,0 +81,0 @@ export declare class PromptCompletions extends ApiResource { |
@@ -44,2 +44,18 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
export interface Functions { | ||
name?: string; | ||
description?: string; | ||
parameters?: object; | ||
} | ||
export interface Tool{ | ||
function?: Functions; | ||
type?: string; | ||
} | ||
export interface Messages { | ||
content?: string; | ||
role?: string; | ||
} | ||
export type PromptsCreateParams = PromptsCreateNonStreaming | PromptsCreateStreaming | ||
@@ -49,2 +65,35 @@ | ||
type PromptRenderResponse = { | ||
success: boolean; | ||
data: { | ||
messages?: Messages[]; | ||
prompt?: string; | ||
model?: string; | ||
stream?: boolean; | ||
suffix?: string; | ||
max_tokens?: number; | ||
temperature?: number; | ||
top_k?: number; | ||
top_p?: number; | ||
n?: number; | ||
stop_sequences?: string[]; | ||
functions?: Functions[]; | ||
function_call?: string | Functions; | ||
logprobs?: boolean; | ||
top_logprobs?: number; | ||
echo?: boolean; | ||
stop?: string | string[]; | ||
presence_penalty?: number; | ||
frequency_penalty?: number; | ||
best_of?: number; | ||
logit_bias?: { [key: string]: number }; | ||
user?: string; | ||
organization?: string; | ||
tool_choice?: string; | ||
tools?: Tool[]; | ||
response_format?: object; | ||
seed?: number; | ||
}; | ||
} & APIResponseType; | ||
export class Prompt extends ApiResource { | ||
@@ -57,3 +106,3 @@ completions: PromptCompletions = new PromptCompletions(this.client); | ||
opts?: RequestOptions | ||
): APIPromise<PromptsResponse> { | ||
): APIPromise<PromptRenderResponse> { | ||
const body = _body | ||
@@ -65,3 +114,3 @@ const promptId = _body.promptID | ||
} | ||
const response = this.post<PromptsResponse>(`/prompts/${promptId}/render`, { body, ...opts }) | ||
const response = this.post<PromptRenderResponse>(`/prompts/${promptId}/render`, { body, ...opts }) | ||
return response | ||
@@ -68,0 +117,0 @@ } |
@@ -43,5 +43,6 @@ /// <reference types="node" /> | ||
private fetch; | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }: ApiClientInterface); | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }: ApiClientInterface); | ||
protected defaultHeaders(): Record<string, string>; | ||
_post<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp>; | ||
_put<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp>; | ||
protected generateError(status: number | undefined, errorResponse: object | undefined, message: string | undefined, headers: Headers | undefined): APIError; | ||
@@ -48,0 +49,0 @@ request(opts: FinalRequestOptions): Promise<APIResponseProps>; |
@@ -82,6 +82,6 @@ "use strict"; | ||
class ApiClient { | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }) { | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }) { | ||
this.apiKey = apiKey !== null && apiKey !== void 0 ? apiKey : ""; | ||
this.baseURL = baseURL !== null && baseURL !== void 0 ? baseURL : ""; | ||
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }); | ||
this.customHeaders = (0, apis_1.createHeaders)({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }); | ||
this.portkeyHeaders = this.defaultHeaders(); | ||
@@ -97,2 +97,5 @@ this.fetch = fetch; | ||
} | ||
_put(path, opts) { | ||
return this.methodRequest("put", path, opts); | ||
} | ||
generateError(status, errorResponse, message, headers) { | ||
@@ -99,0 +102,0 @@ return error_1.APIError.generate(status, errorResponse, message, headers); |
@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive"; | ||
private fetch: Fetch; | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }: ApiClientInterface) { | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }: ApiClientInterface) { | ||
this.apiKey = apiKey ?? ""; | ||
this.baseURL = baseURL ?? ""; | ||
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }) | ||
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }) | ||
this.portkeyHeaders = this.defaultHeaders() | ||
@@ -145,2 +145,6 @@ this.fetch = fetch; | ||
_put<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp> { | ||
return this.methodRequest("put", path, opts); | ||
} | ||
protected generateError( | ||
@@ -147,0 +151,0 @@ status: number | undefined, |
@@ -15,3 +15,5 @@ import { ApiClientInterface } from "./_types/generalTypes"; | ||
cacheForceRefresh?: boolean | null | undefined; | ||
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh }: ApiClientInterface); | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined; | ||
constructor({ apiKey, baseURL, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, }: ApiClientInterface); | ||
completions: API.Completions; | ||
@@ -18,0 +20,0 @@ chat: API.Chat; |
@@ -34,3 +34,3 @@ "use strict"; | ||
var _b, _c; | ||
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh } = _a; | ||
var { apiKey = (_b = (0, utils_1.readEnv)("PORTKEY_API_KEY")) !== null && _b !== void 0 ? _b : null, baseURL = (_c = (0, utils_1.readEnv)("PORTKEY_BASE_URL")) !== null && _c !== void 0 ? _c : null, config, virtualKey, provider, traceID, metadata, Authorization, cacheForceRefresh, debug, customHost, } = _a; | ||
super({ | ||
@@ -46,2 +46,4 @@ apiKey, | ||
cacheForceRefresh, | ||
debug, | ||
customHost | ||
}); | ||
@@ -75,2 +77,4 @@ this.completions = new API.Completions(this); | ||
this.cacheForceRefresh = cacheForceRefresh; | ||
this.debug = debug; | ||
this.customHost = customHost; | ||
} | ||
@@ -77,0 +81,0 @@ } |
@@ -18,2 +18,4 @@ import { ApiClientInterface } from "./_types/generalTypes"; | ||
cacheForceRefresh?: boolean | null | undefined; | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined; | ||
constructor({ | ||
@@ -28,3 +30,5 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null, | ||
Authorization, | ||
cacheForceRefresh | ||
cacheForceRefresh, | ||
debug, | ||
customHost, | ||
}: ApiClientInterface) { | ||
@@ -42,2 +46,4 @@ | ||
cacheForceRefresh, | ||
debug, | ||
customHost | ||
}); | ||
@@ -56,2 +62,4 @@ | ||
this.cacheForceRefresh = cacheForceRefresh; | ||
this.debug = debug; | ||
this.customHost = customHost | ||
} | ||
@@ -58,0 +66,0 @@ |
{ | ||
"name": "portkey-ai", | ||
"version": "1.2.1", | ||
"version": "1.3.0", | ||
"description": "Node client library for the Portkey API", | ||
@@ -45,4 +45,4 @@ "types": "dist/src/index.d.ts", | ||
"dotenv": "^16.3.1", | ||
"openai": "^4.28.4" | ||
"openai": "4.36.0" | ||
} | ||
} |
@@ -12,2 +12,4 @@ <div align="center"> | ||
The Portkey SDK is built on top of the OpenAI SDK, allowing you to seamlessly integrate Portkey's advanced features while retaining full compatibility with OpenAI methods. With Portkey, you can enhance your interactions with OpenAI or any other OpenAI-like provider by leveraging robust monitoring, reliability, prompt management, and more features - without modifying much of your existing code. | ||
### AI Gateway | ||
@@ -14,0 +16,0 @@ <table> |
@@ -13,2 +13,4 @@ export type Headers = Record<string, string | null | undefined> | ||
cacheForceRefresh?: boolean | null | undefined; | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined | ||
} | ||
@@ -15,0 +17,0 @@ |
@@ -6,2 +6,3 @@ import type { Portkey } from "./index"; | ||
protected post: Portkey["_post"] | ||
protected put: Portkey["_put"] | ||
@@ -11,3 +12,4 @@ constructor(client: Portkey) { | ||
this.post = client._post.bind(client) | ||
this.put = client._put.bind(client) | ||
} | ||
} |
@@ -8,3 +8,2 @@ import { getPortkeyHeader, isEmpty } from "../utils" | ||
let v = config[k]; | ||
if (isEmpty(v)) continue; | ||
@@ -17,2 +16,8 @@ | ||
} | ||
// false logic (type is boolean, to handle flasy logic) | ||
if (typeof v === "boolean") { | ||
v = v.toString() | ||
} | ||
k = k.replace('ID', 'Id') | ||
@@ -19,0 +24,0 @@ .replace(/[A-Z]/g, letter => `-${letter.toLowerCase()}`) |
@@ -10,2 +10,3 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
traceID?: string; | ||
feedbackID?: string; | ||
value?: number; | ||
@@ -21,2 +22,3 @@ weight?: number; | ||
message: string; | ||
feedback_id: Array<string>; | ||
} | ||
@@ -38,2 +40,17 @@ | ||
} | ||
update( | ||
_body: FeedbackBodyBase, | ||
params?: ApiClientInterface, | ||
opts?: RequestOptions | ||
): APIPromise<FeedbackResponse> { | ||
const body = _body | ||
const feedbackID = _body.feedbackID | ||
if (params) { | ||
const config = overrideConfig(this.client.config, params.config) | ||
this.client.customHeaders = { ...this.client.customHeaders, ...createHeaders({ ...params, config }) } | ||
} | ||
const response = this.put<FeedbackResponse>(FEEDBACK_API+'/'+feedbackID , { body, ...opts }) | ||
return response | ||
} | ||
} |
@@ -44,2 +44,18 @@ import { APIResponseType, ApiClientInterface } from "../_types/generalTypes"; | ||
export interface Functions { | ||
name?: string; | ||
description?: string; | ||
parameters?: object; | ||
} | ||
export interface Tool{ | ||
function?: Functions; | ||
type?: string; | ||
} | ||
export interface Messages { | ||
content?: string; | ||
role?: string; | ||
} | ||
export type PromptsCreateParams = PromptsCreateNonStreaming | PromptsCreateStreaming | ||
@@ -49,2 +65,35 @@ | ||
type PromptRenderResponse = { | ||
success: boolean; | ||
data: { | ||
messages?: Messages[]; | ||
prompt?: string; | ||
model?: string; | ||
stream?: boolean; | ||
suffix?: string; | ||
max_tokens?: number; | ||
temperature?: number; | ||
top_k?: number; | ||
top_p?: number; | ||
n?: number; | ||
stop_sequences?: string[]; | ||
functions?: Functions[]; | ||
function_call?: string | Functions; | ||
logprobs?: boolean; | ||
top_logprobs?: number; | ||
echo?: boolean; | ||
stop?: string | string[]; | ||
presence_penalty?: number; | ||
frequency_penalty?: number; | ||
best_of?: number; | ||
logit_bias?: { [key: string]: number }; | ||
user?: string; | ||
organization?: string; | ||
tool_choice?: string; | ||
tools?: Tool[]; | ||
response_format?: object; | ||
seed?: number; | ||
}; | ||
} & APIResponseType; | ||
export class Prompt extends ApiResource { | ||
@@ -57,3 +106,3 @@ completions: PromptCompletions = new PromptCompletions(this.client); | ||
opts?: RequestOptions | ||
): APIPromise<PromptsResponse> { | ||
): APIPromise<PromptRenderResponse> { | ||
const body = _body | ||
@@ -65,3 +114,3 @@ const promptId = _body.promptID | ||
} | ||
const response = this.post<PromptsResponse>(`/prompts/${promptId}/render`, { body, ...opts }) | ||
const response = this.post<PromptRenderResponse>(`/prompts/${promptId}/render`, { body, ...opts }) | ||
return response | ||
@@ -68,0 +117,0 @@ } |
@@ -123,6 +123,6 @@ import KeepAliveAgent from "agentkeepalive"; | ||
private fetch: Fetch; | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }: ApiClientInterface) { | ||
constructor({ apiKey, baseURL, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }: ApiClientInterface) { | ||
this.apiKey = apiKey ?? ""; | ||
this.baseURL = baseURL ?? ""; | ||
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh }) | ||
this.customHeaders = createHeaders({ apiKey, config, virtualKey, traceID, metadata, provider, Authorization, cacheForceRefresh, debug, customHost }) | ||
this.portkeyHeaders = this.defaultHeaders() | ||
@@ -145,2 +145,6 @@ this.fetch = fetch; | ||
_put<Rsp extends APIResponseType>(path: string, opts?: RequestOptions): APIPromise<Rsp> { | ||
return this.methodRequest("put", path, opts); | ||
} | ||
protected generateError( | ||
@@ -147,0 +151,0 @@ status: number | undefined, |
@@ -18,2 +18,4 @@ import { ApiClientInterface } from "./_types/generalTypes"; | ||
cacheForceRefresh?: boolean | null | undefined; | ||
debug?: boolean | null | undefined; | ||
customHost?: string | null | undefined; | ||
constructor({ | ||
@@ -28,3 +30,5 @@ apiKey = readEnv("PORTKEY_API_KEY") ?? null, | ||
Authorization, | ||
cacheForceRefresh | ||
cacheForceRefresh, | ||
debug, | ||
customHost, | ||
}: ApiClientInterface) { | ||
@@ -42,2 +46,4 @@ | ||
cacheForceRefresh, | ||
debug, | ||
customHost | ||
}); | ||
@@ -56,2 +62,4 @@ | ||
this.cacheForceRefresh = cacheForceRefresh; | ||
this.debug = debug; | ||
this.customHost = customHost | ||
} | ||
@@ -58,0 +66,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
7090256
8286
89
+ Addedopenai@4.36.0(transitive)
+ Addedweb-streams-polyfill@3.3.3(transitive)
- Removedopenai@4.86.2(transitive)
Updatedopenai@4.36.0