You're Invited: Meet the Socket team at BSidesSF and RSAC - April 27 - May 1.RSVP

@re-ai/openai-like-api

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@re-ai/openai-like-api - npm Package Compare versions

Comparing version

to
0.1.6

@@ -8,3 +8,3 @@ import { ChatCompletionCreateParams, ChatCompletionChunk, ChatCompletion, CreateEmbeddingResponse, EmbeddingCreateParams } from "openai/resources";

endpoint: string;
client?: OpenAIClient;
client: OpenAIClient;
constructor(apiKey: string, endpoint?: string);

@@ -11,0 +11,0 @@ chatComplete(params: ChatCompletionCreateParams): Promise<ChatCompletionStream<ChatCompletionChunk> | ChatCompletion>;

@@ -14,19 +14,8 @@ "use strict";

this.endpoint = endpoint || process.env.AZURE_OPENAI_ENDPOINT || "";
// if (!this.endpoint) {
// throw new Error("Azure OpenAI endpoint is not set")
// }
if (this.endpoint) {
this.client = new openai_1.OpenAIClient(`https://${this.endpoint}.openai.azure.com/`, new openai_1.AzureKeyCredential(this.apiKey));
if (!this.endpoint) {
throw new Error("Azure OpenAI endpoint is not set");
}
else {
this.client = undefined;
}
this.client = new openai_1.OpenAIClient(`https://${this.endpoint}.openai.azure.com/`, new openai_1.AzureKeyCredential(this.apiKey));
}
async chatComplete(params) {
if (params.model) {
this.client = new openai_1.OpenAIClient(`https://${params.model}.openai.azure.com/`, new openai_1.AzureKeyCredential(this.apiKey));
}
if (!this.client) {
return Promise.reject("Azure OpenAI client is not set");
}
const options = {

@@ -46,10 +35,7 @@ functions: params.functions,

logitBias: params.logit_bias || undefined,
user: params.user,
responseFormat: {
type: params.response_format?.type || "json_object",
}
user: params.user
};
if (params.response_format?.type) {
options.responseFormat = {
type: params.response_format.type
type: params.response_format.type || "text"
};

@@ -64,2 +50,5 @@ }

for await (const chunk of response) {
if (!chunk.id) {
continue;
}
finish = chunk.choices[0].finishReason || null;

@@ -83,3 +72,3 @@ let content = chunk.choices[0].delta?.content;

{
index: 0,
index: index,
delta: {

@@ -101,2 +90,3 @@ role: chunk.choices[0].delta?.role,

stream.write(data);
index++;
}

@@ -107,2 +97,4 @@ }

}, 500);
}).catch(err => {
return Promise.reject(err);
});

@@ -139,5 +131,2 @@ return stream;

async embeddings(params) {
if (!this.client) {
return Promise.reject("Azure OpenAI client is not set");
}
const inputs = typeof params.input === 'string' ? [params.input] : params.input;

@@ -144,0 +133,0 @@ const response = await this.client.getEmbeddings(params.model, inputs, {

{
"name": "@re-ai/openai-like-api",
"version": "0.1.5",
"version": "0.1.6",
"description": "reai openai like api",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

@@ -11,32 +11,20 @@ import { ChatCompletionCreateParams, ChatCompletionChunk, ChatCompletion, CreateEmbeddingResponse, EmbeddingCreateParams } from "openai/resources";

endpoint: string
client?: OpenAIClient
client: OpenAIClient
constructor(apiKey: string, endpoint?: string) {
this.apiKey = apiKey
this.endpoint = endpoint || process.env.AZURE_OPENAI_ENDPOINT || ""
// if (!this.endpoint) {
// throw new Error("Azure OpenAI endpoint is not set")
// }
if (this.endpoint) {
this.client = new OpenAIClient(
`https://${this.endpoint}.openai.azure.com/`,
new AzureKeyCredential(this.apiKey)
)
} else {
this.client = undefined
if (!this.endpoint) {
throw new Error("Azure OpenAI endpoint is not set")
}
this.client = new OpenAIClient(
`https://${this.endpoint}.openai.azure.com/`,
new AzureKeyCredential(this.apiKey)
)
}
async chatComplete(params: ChatCompletionCreateParams): Promise<ChatCompletionStream<ChatCompletionChunk> | ChatCompletion> {
if (params.model) {
this.client = new OpenAIClient(
`https://${params.model}.openai.azure.com/`,
new AzureKeyCredential(this.apiKey)
)
}
if (!this.client) {
return Promise.reject("Azure OpenAI client is not set")
}
const options: GetChatCompletionsOptions = {

@@ -53,13 +41,10 @@ functions: params.functions,

toolChoice: params.tool_choice,
stop: ((typeof params.stop === "string" ) ? [params.stop] : params.stop) || undefined,
stop: ((typeof params.stop === "string") ? [params.stop] : params.stop) || undefined,
n: params.n || undefined,
logitBias: params.logit_bias || undefined,
user: params.user,
responseFormat: {
type: params.response_format?.type || "json_object",
}
user: params.user
}
if (params.response_format?.type) {
options.responseFormat = {
type : params.response_format.type
type: params.response_format.type || "text"
}

@@ -70,3 +55,5 @@ }

const stream = new ChatCompletionStream<ChatCompletionChunk>
this.client.streamChatCompletions(params.model, params.messages, options).then(async (response) => {
let contentBuf = ""

@@ -76,13 +63,18 @@ let index = 0

for await (const chunk of response) {
finish = chunk.choices[0].finishReason || null
let content = chunk.choices[0].delta?.content
if (content === null) {
if (!chunk.id) {
continue
}
finish = chunk.choices[0].finishReason || null
let content = chunk.choices[0].delta?.content
if (content === null) {
stream.write(chunk)
return
} else if (content === undefined) {
} else if (content === undefined) {
content = ""
}
contentBuf += content
if (isTextCut(content) || finish !== null) {
}
contentBuf += content
if (isTextCut(content) || finish !== null) {
let data: ChatCompletionChunk = {

@@ -95,3 +87,3 @@ id: chunk.id,

{
index: 0,
index: index,
delta: {

@@ -105,3 +97,3 @@ role: chunk.choices[0].delta?.role as 'system' | 'user' | 'assistant' | 'tool',

logprobs: null,
}

@@ -116,3 +108,4 @@ ]

stream.write(data)
}
index++
}
}

@@ -123,8 +116,11 @@

}, 500)
}).catch(err => {
return Promise.reject(err)
})
return stream
} else {
const response = await this.client.getChatCompletions(params.model, params.messages, options)
const result: ChatCompletion = {

@@ -157,11 +153,8 @@ id: response.id,

async embeddings(params: EmbeddingCreateParams): Promise<CreateEmbeddingResponse> {
if (!this.client) {
return Promise.reject("Azure OpenAI client is not set")
}
const inputs: string[] = typeof params.input === 'string' ? [params.input] : params.input as string[]
const response = await this.client.getEmbeddings(params.model, inputs, {
user: params.user,
dimensions: params.dimensions,
model: params.model
user: params.user,
dimensions: params.dimensions,
model: params.model
})

@@ -185,6 +178,6 @@ const result: CreateEmbeddingResponse = {

private toolCallsFormat(toolCalls: ChatCompletionsToolCallUnion[]): ChatCompletionChunk.Choice.Delta.ToolCall[] {
const result: ChatCompletionChunk.Choice.Delta.ToolCall[] = []
toolCalls.forEach(toolCall => {
const item: ChatCompletionChunk.Choice.Delta.ToolCall = {

@@ -191,0 +184,0 @@ index: toolCall.index || 0,