New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai-compatible

Package Overview
Dependencies
Maintainers
0
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai-compatible - npm Package Compare versions

Comparing version 0.0.6 to 0.0.7

6

CHANGELOG.md
# @ai-sdk/openai-compatible
## 0.0.7
### Patch Changes
- ad2bf11: feat (provider/fireworks): Add Fireworks provider.
## 0.0.6

@@ -4,0 +10,0 @@

16

dist/index.d.ts
import { ProviderV1, LanguageModelV1, EmbeddingModelV1, LanguageModelV1ObjectGenerationMode } from '@ai-sdk/provider';
import { FetchFunction } from '@ai-sdk/provider-utils';
import { ZodSchema } from 'zod';

@@ -90,2 +91,8 @@ type OpenAICompatibleChatModelId = string;

type ProviderErrorStructure<T> = {
errorSchema: ZodSchema<T>;
errorToMessage: (error: T) => string;
isRetryable?: (response: Response, error?: T) => boolean;
};
type OpenAICompatibleChatConfig = {

@@ -99,2 +106,3 @@ provider: string;

fetch?: FetchFunction;
errorStructure?: ProviderErrorStructure<any>;
/**

@@ -117,2 +125,4 @@ Default object generation mode that should be used with this model when

private readonly config;
private readonly failedResponseHandler;
private readonly chunkSchema;
constructor(modelId: OpenAICompatibleChatModelId, settings: OpenAICompatibleChatSettings, config: OpenAICompatibleChatConfig);

@@ -134,2 +144,3 @@ get defaultObjectGenerationMode(): 'json' | 'tool' | undefined;

fetch?: FetchFunction;
errorStructure?: ProviderErrorStructure<any>;
};

@@ -142,2 +153,4 @@ declare class OpenAICompatibleCompletionLanguageModel implements LanguageModelV1 {

private readonly config;
private readonly failedResponseHandler;
private readonly chunkSchema;
constructor(modelId: OpenAICompatibleCompletionModelId, settings: OpenAICompatibleCompletionSettings, config: OpenAICompatibleCompletionConfig);

@@ -166,2 +179,3 @@ get provider(): string;

fetch?: FetchFunction;
errorStructure?: ProviderErrorStructure<any>;
};

@@ -180,2 +194,2 @@ declare class OpenAICompatibleEmbeddingModel implements EmbeddingModelV1<string> {

export { OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatSettings, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionSettings, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingSettings, type OpenAICompatibleProvider, type OpenAICompatibleProviderSettings, createOpenAICompatible };
export { OpenAICompatibleChatLanguageModel, type OpenAICompatibleChatSettings, OpenAICompatibleCompletionLanguageModel, type OpenAICompatibleCompletionSettings, OpenAICompatibleEmbeddingModel, type OpenAICompatibleEmbeddingSettings, type OpenAICompatibleProvider, type OpenAICompatibleProviderSettings, type ProviderErrorStructure, createOpenAICompatible };

131

dist/index.js

@@ -31,7 +31,7 @@ "use strict";

// src/openai-compatible-provider.ts
var import_provider_utils6 = require("@ai-sdk/provider-utils");
var import_provider_utils5 = require("@ai-sdk/provider-utils");
// src/openai-compatible-chat-language-model.ts
var import_provider3 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var import_provider_utils2 = require("@ai-sdk/provider-utils");
var import_zod2 = require("zod");

@@ -146,5 +146,21 @@

// src/map-openai-compatible-finish-reason.ts
function mapOpenAICompatibleFinishReason(finishReason) {
switch (finishReason) {
case "stop":
return "stop";
case "length":
return "length";
case "content_filter":
return "content-filter";
case "function_call":
case "tool_calls":
return "tool-calls";
default:
return "unknown";
}
}
// src/openai-compatible-error.ts
var import_zod = require("zod");
var import_provider_utils2 = require("@ai-sdk/provider-utils");
var openaiCompatibleErrorDataSchema = import_zod.z.object({

@@ -161,6 +177,6 @@ error: import_zod.z.object({

});
var openaiCompatibleFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
var defaultOpenAICompatibleErrorStructure = {
errorSchema: openaiCompatibleErrorDataSchema,
errorToMessage: (data) => data.error.message
});
};

@@ -225,28 +241,17 @@ // src/openai-compatible-prepare-tools.ts

// src/map-openai-compatible-finish-reason.ts
function mapOpenAICompatibleFinishReason(finishReason) {
switch (finishReason) {
case "stop":
return "stop";
case "length":
return "length";
case "content_filter":
return "content-filter";
case "function_call":
case "tool_calls":
return "tool-calls";
default:
return "unknown";
}
}
// src/openai-compatible-chat-language-model.ts
var OpenAICompatibleChatLanguageModel = class {
// type inferred via constructor
constructor(modelId, settings, config) {
this.specificationVersion = "v1";
var _a;
var _a, _b;
this.modelId = modelId;
this.settings = settings;
this.config = config;
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : false;
const errorStructure = (_a = config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure;
this.chunkSchema = createOpenAICompatibleChatChunkSchema(
errorStructure.errorSchema
);
this.failedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)(errorStructure);
this.supportsStructuredOutputs = (_b = config.supportsStructuredOutputs) != null ? _b : false;
}

@@ -374,3 +379,3 @@ get defaultObjectGenerationMode() {

const body = JSON.stringify(args);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
url: this.config.url({

@@ -380,6 +385,6 @@ path: "/chat/completions",

}),
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
body: args,
failedResponseHandler: openaiCompatibleFailedResponseHandler,
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
failedResponseHandler: this.failedResponseHandler,
successfulResponseHandler: (0, import_provider_utils2.createJsonResponseHandler)(
OpenAICompatibleChatResponseSchema

@@ -398,3 +403,3 @@ ),

toolCallType: "function",
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils3.generateId)(),
toolCallId: (_a2 = toolCall.id) != null ? _a2 : (0, import_provider_utils2.generateId)(),
toolName: toolCall.function.name,

@@ -419,3 +424,3 @@ args: toolCall.function.arguments

const body = JSON.stringify({ ...args, stream: true });
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils2.postJsonToApi)({
url: this.config.url({

@@ -425,3 +430,3 @@ path: "/chat/completions",

}),
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
headers: (0, import_provider_utils2.combineHeaders)(this.config.headers(), options.headers),
body: {

@@ -431,5 +436,5 @@ ...args,

},
failedResponseHandler: openaiCompatibleFailedResponseHandler,
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
OpenAICompatibleChatChunkSchema
failedResponseHandler: this.failedResponseHandler,
successfulResponseHandler: (0, import_provider_utils2.createEventSourceResponseHandler)(
this.chunkSchema
),

@@ -535,7 +540,7 @@ abortSignal: options.abortSignal,

}
if ((0, import_provider_utils3.isParsableJson)(toolCall2.function.arguments)) {
if ((0, import_provider_utils2.isParsableJson)(toolCall2.function.arguments)) {
controller.enqueue({
type: "tool-call",
toolCallType: "function",
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils2.generateId)(),
toolName: toolCall2.function.name,

@@ -563,7 +568,7 @@ args: toolCall2.function.arguments

});
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils2.isParsableJson)(toolCall.function.arguments)) {
controller.enqueue({
type: "tool-call",
toolCallType: "function",
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils2.generateId)(),
toolName: toolCall.function.name,

@@ -626,3 +631,3 @@ args: toolCall.function.arguments

});
var OpenAICompatibleChatChunkSchema = import_zod2.z.union([
var createOpenAICompatibleChatChunkSchema = (errorSchema) => import_zod2.z.union([
import_zod2.z.object({

@@ -657,3 +662,3 @@ id: import_zod2.z.string().nullish(),

}),
openaiCompatibleErrorDataSchema
errorSchema
]);

@@ -663,3 +668,3 @@

var import_provider5 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var import_zod3 = require("zod");

@@ -753,8 +758,15 @@

var OpenAICompatibleCompletionLanguageModel = class {
// type inferred via constructor
constructor(modelId, settings, config) {
this.specificationVersion = "v1";
this.defaultObjectGenerationMode = void 0;
var _a;
this.modelId = modelId;
this.settings = settings;
this.config = config;
const errorStructure = (_a = config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure;
this.chunkSchema = createOpenAICompatibleCompletionChunkSchema(
errorStructure.errorSchema
);
this.failedResponseHandler = (0, import_provider_utils3.createJsonErrorResponseHandler)(errorStructure);
}

@@ -849,3 +861,3 @@ get provider() {

const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
url: this.config.url({

@@ -855,6 +867,6 @@ path: "/completions",

}),
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
body: args,
failedResponseHandler: openaiCompatibleFailedResponseHandler,
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
failedResponseHandler: this.failedResponseHandler,
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
openaiCompatibleCompletionResponseSchema

@@ -887,3 +899,3 @@ ),

};
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
url: this.config.url({

@@ -893,7 +905,7 @@ path: "/completions",

}),
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), options.headers),
headers: (0, import_provider_utils3.combineHeaders)(this.config.headers(), options.headers),
body,
failedResponseHandler: openaiCompatibleFailedResponseHandler,
successfulResponseHandler: (0, import_provider_utils4.createEventSourceResponseHandler)(
openaiCompatibleCompletionChunkSchema
failedResponseHandler: this.failedResponseHandler,
successfulResponseHandler: (0, import_provider_utils3.createEventSourceResponseHandler)(
this.chunkSchema
),

@@ -982,3 +994,3 @@ abortSignal: options.abortSignal,

});
var openaiCompatibleCompletionChunkSchema = import_zod3.z.union([
var createOpenAICompatibleCompletionChunkSchema = (errorSchema) => import_zod3.z.union([
import_zod3.z.object({

@@ -1000,3 +1012,3 @@ id: import_zod3.z.string().nullish(),

}),
openaiCompatibleErrorDataSchema
errorSchema
]);

@@ -1006,3 +1018,3 @@

var import_provider6 = require("@ai-sdk/provider");
var import_provider_utils5 = require("@ai-sdk/provider-utils");
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_zod4 = require("zod");

@@ -1032,2 +1044,3 @@ var OpenAICompatibleEmbeddingModel = class {

}) {
var _a;
if (values.length > this.maxEmbeddingsPerCall) {

@@ -1041,3 +1054,3 @@ throw new import_provider6.TooManyEmbeddingValuesForCallError({

}
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
url: this.config.url({

@@ -1047,3 +1060,3 @@ path: "/embeddings",

}),
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), headers),
headers: (0, import_provider_utils4.combineHeaders)(this.config.headers(), headers),
body: {

@@ -1056,4 +1069,6 @@ model: this.modelId,

},
failedResponseHandler: openaiCompatibleFailedResponseHandler,
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
failedResponseHandler: (0, import_provider_utils4.createJsonErrorResponseHandler)(
(_a = this.config.errorStructure) != null ? _a : defaultOpenAICompatibleErrorStructure
),
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
openaiTextEmbeddingResponseSchema

@@ -1081,3 +1096,3 @@ ),

}
const baseURL = (0, import_provider_utils6.withoutTrailingSlash)(options.baseURL);
const baseURL = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL);
if (!options.name) {

@@ -1084,0 +1099,0 @@ throw new Error("Provider name is required");

{
"name": "@ai-sdk/openai-compatible",
"version": "0.0.6",
"version": "0.0.7",
"license": "Apache-2.0",

@@ -5,0 +5,0 @@ "sideEffects": false,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc