New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai-compatible

Package Overview
Dependencies
Maintainers
2
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai-compatible - npm Package Compare versions

Comparing version 0.0.1 to 0.0.2

6

CHANGELOG.md
# @ai-sdk/openai-compatible
## 0.0.2
### Patch Changes
- fc18132: feat (ai/core): experimental output for generateText
## 0.0.1

@@ -4,0 +10,0 @@

6

dist/index.d.ts

@@ -104,6 +104,10 @@ import { ProviderV1, LanguageModelV1, EmbeddingModelV1, LanguageModelV1ObjectGenerationMode } from '@ai-sdk/provider';

defaultObjectGenerationMode?: LanguageModelV1ObjectGenerationMode;
/**
* Whether the model supports structured outputs.
*/
supportsStructuredOutputs?: boolean;
};
declare class OpenAICompatibleChatLanguageModel implements LanguageModelV1 {
readonly specificationVersion = "v1";
readonly supportsStructuredOutputs = false;
readonly supportsStructuredOutputs: boolean;
readonly modelId: OpenAICompatibleChatModelId;

@@ -110,0 +114,0 @@ readonly settings: OpenAICompatibleChatSettings;

56

dist/index.js

@@ -167,3 +167,4 @@ "use strict";

function prepareTools({
mode
mode,
structuredOutputs
}) {

@@ -187,3 +188,4 @@ var _a;

description: tool.description,
parameters: tool.parameters
parameters: tool.parameters,
strict: structuredOutputs ? true : void 0
}

@@ -243,6 +245,7 @@ });

this.specificationVersion = "v1";
this.supportsStructuredOutputs = false;
var _a;
this.modelId = modelId;
this.settings = settings;
this.config = config;
this.supportsStructuredOutputs = (_a = config.supportsStructuredOutputs) != null ? _a : false;
}

@@ -266,5 +269,5 @@ get defaultObjectGenerationMode() {

responseFormat,
seed,
stream
seed
}) {
var _a, _b;
const type = mode.type;

@@ -278,7 +281,7 @@ const warnings = [];

}
if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
if ((responseFormat == null ? void 0 : responseFormat.type) === "json" && responseFormat.schema != null && !this.supportsStructuredOutputs) {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format schema is not supported"
details: "JSON response format schema is only supported with structuredOutputs"
});

@@ -297,6 +300,13 @@ }

presence_penalty: presencePenalty,
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? this.supportsStructuredOutputs === true && responseFormat.schema != null ? {
type: "json_schema",
json_schema: {
schema: responseFormat.schema,
strict: true,
name: (_a = responseFormat.name) != null ? _a : "response",
description: responseFormat.description
}
} : { type: "json_object" } : void 0,
stop: stopSequences,
seed,
// response format:
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
// messages:

@@ -307,9 +317,8 @@ messages: convertToOpenAICompatibleChatMessages(prompt)

case "regular": {
const { tools, tool_choice, toolWarnings } = prepareTools({ mode });
const { tools, tool_choice, toolWarnings } = prepareTools({
mode,
structuredOutputs: this.supportsStructuredOutputs
});
return {
args: {
...baseArgs,
tools,
tool_choice
},
args: { ...baseArgs, tools, tool_choice },
warnings: [...warnings, ...toolWarnings]

@@ -322,3 +331,11 @@ };

...baseArgs,
response_format: { type: "json_object" }
response_format: this.supportsStructuredOutputs === true && mode.schema != null ? {
type: "json_schema",
json_schema: {
schema: mode.schema,
strict: true,
name: (_b = mode.name) != null ? _b : "response",
description: mode.description
}
} : { type: "json_object" }
},

@@ -342,3 +359,4 @@ warnings

description: mode.tool.description,
parameters: mode.tool.parameters
parameters: mode.tool.parameters,
strict: this.supportsStructuredOutputs ? true : void 0
}

@@ -359,3 +377,3 @@ }

var _a, _b, _c, _d, _e, _f;
const { args, warnings } = this.getArgs({ ...options, stream: false });
const { args, warnings } = this.getArgs({ ...options });
const body = JSON.stringify(args);

@@ -402,3 +420,3 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

async doStream(options) {
const { args, warnings } = this.getArgs({ ...options, stream: true });
const { args, warnings } = this.getArgs({ ...options });
const body = JSON.stringify({ ...args, stream: true });

@@ -405,0 +423,0 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

{
"name": "@ai-sdk/openai-compatible",
"version": "0.0.1",
"version": "0.0.2",
"license": "Apache-2.0",

@@ -5,0 +5,0 @@ "sideEffects": false,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc