Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai

Package Overview
Dependencies
Maintainers
2
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai - npm Package Compare versions

Comparing version 0.0.37 to 0.0.38

285

./dist/index.js

@@ -33,3 +33,3 @@ "use strict";

// src/openai-chat-language-model.ts
var import_provider = require("@ai-sdk/provider");
var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");

@@ -39,4 +39,8 @@ var import_zod2 = require("zod");

// src/convert-to-openai-chat-messages.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils = require("@ai-sdk/provider-utils");
function convertToOpenAIChatMessages(prompt) {
function convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling = false
}) {
const messages = [];

@@ -101,7 +105,20 @@ for (const { role, content } of prompt) {

}
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
if (useLegacyFunctionCalling) {
if (toolCalls.length > 1) {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
});
}
messages.push({
role: "assistant",
content: text,
function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
});
} else {
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
}
break;

@@ -111,7 +128,15 @@ }

for (const toolResponse of content) {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
if (useLegacyFunctionCalling) {
messages.push({
role: "function",
name: toolResponse.toolName,
content: JSON.stringify(toolResponse.result)
});
} else {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
}
}

@@ -193,7 +218,30 @@ break;

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences,
responseFormat,
seed
}) {
const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format schema is not supported"
});
}
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with parallelToolCalls"
});
}
const baseArgs = {

@@ -214,14 +262,29 @@ // model id:

presence_penalty: presencePenalty,
stop: stopSequences,
seed,
// response format:
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
// messages:
messages: convertToOpenAIChatMessages(prompt)
messages: convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling
})
};
switch (type) {
case "regular": {
return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };
return {
args: {
...baseArgs,
...prepareToolsAndToolChoice({ mode, useLegacyFunctionCalling })
},
warnings
};
}
case "object-json": {
return {
...baseArgs,
response_format: { type: "json_object" }
args: {
...baseArgs,
response_format: { type: "json_object" }
},
warnings
};

@@ -231,8 +294,9 @@ }

return {
...baseArgs,
tool_choice: { type: "function", function: { name: mode.tool.name } },
tools: [
{
type: "function",
function: {
args: useLegacyFunctionCalling ? {
...baseArgs,
function_call: {
name: mode.tool.name
},
functions: [
{
name: mode.tool.name,

@@ -242,11 +306,23 @@ description: mode.tool.description,

}
}
]
]
} : {
...baseArgs,
tool_choice: {
type: "function",
function: { name: mode.tool.name }
},
tools: [
{
type: "function",
function: {
name: mode.tool.name,
description: mode.tool.description,
parameters: mode.tool.parameters
}
}
]
},
warnings
};
}
case "object-grammar": {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -260,3 +336,3 @@ const _exhaustiveCheck = type;

var _a, _b;
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -280,3 +356,10 @@ url: this.config.url({

text: (_a = choice.message.content) != null ? _a : void 0,
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
{
toolCallType: "function",
toolCallId: (0, import_provider_utils3.generateId)(),
toolName: choice.message.function_call.name,
args: choice.message.function_call.arguments
}
] : (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
var _a2;

@@ -297,3 +380,3 @@ return {

rawResponse: { headers: responseHeaders },
warnings: [],
warnings,
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs)

@@ -303,3 +386,3 @@ };

async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -332,2 +415,3 @@ url: this.config.url({

let logprobs;
const { useLegacyFunctionCalling } = this.settings;
return {

@@ -377,8 +461,16 @@ stream: response.pipeThrough(

}
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
{
type: "function",
id: (0, import_provider_utils3.generateId)(),
function: delta.function_call,
index: 0
}
] : delta.tool_calls;
if (mappedToolCalls != null) {
for (const toolCallDelta of mappedToolCalls) {
const index = toolCallDelta.index;
if (toolCalls[index] == null) {
if (toolCallDelta.type !== "function") {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -389,3 +481,3 @@ message: `Expected 'function' type.`

if (toolCallDelta.id == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -396,3 +488,3 @@ message: `Expected 'id' to be a string.`

if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -464,3 +556,3 @@ message: `Expected 'function.name' to be a string.`

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -474,6 +566,10 @@ }

role: import_zod2.z.literal("assistant"),
content: import_zod2.z.string().nullable().optional(),
content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
arguments: import_zod2.z.string(),
name: import_zod2.z.string()
}).nullish(),
tool_calls: import_zod2.z.array(
import_zod2.z.object({
id: import_zod2.z.string().optional().nullable(),
id: import_zod2.z.string().nullish(),
type: import_zod2.z.literal("function"),

@@ -485,3 +581,3 @@ function: import_zod2.z.object({

})
).optional()
).nullish()
}),

@@ -502,4 +598,4 @@ index: import_zod2.z.number(),

).nullable()
}).nullable().optional(),
finish_reason: import_zod2.z.string().optional().nullable()
}).nullish(),
finish_reason: import_zod2.z.string().nullish()
})

@@ -519,2 +615,6 @@ ),

content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
name: import_zod2.z.string().optional(),
arguments: import_zod2.z.string().optional()
}).nullish(),
tool_calls: import_zod2.z.array(

@@ -557,3 +657,6 @@ import_zod2.z.object({

]);
function prepareToolsAndToolChoice(mode) {
function prepareToolsAndToolChoice({
mode,
useLegacyFunctionCalling = false
}) {
var _a;

@@ -564,2 +667,32 @@ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;

}
const toolChoice = mode.toolChoice;
if (useLegacyFunctionCalling) {
const mappedFunctions = tools.map((tool) => ({
name: tool.name,
description: tool.description,
parameters: tool.parameters
}));
if (toolChoice == null) {
return { functions: mappedFunctions, function_call: void 0 };
}
const type2 = toolChoice.type;
switch (type2) {
case "auto":
case "none":
case void 0:
return {
functions: mappedFunctions,
function_call: void 0
};
case "required":
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling and toolChoice: required"
});
default:
return {
functions: mappedFunctions,
function_call: { name: toolChoice.toolName }
};
}
}
const mappedTools = tools.map((tool) => ({

@@ -573,3 +706,2 @@ type: "function",

}));
const toolChoice = mode.toolChoice;
if (toolChoice == null) {

@@ -602,3 +734,3 @@ return { tools: mappedTools, tool_choice: void 0 };

// src/openai-completion-language-model.ts
var import_provider3 = require("@ai-sdk/provider");
var import_provider4 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");

@@ -608,3 +740,3 @@ var import_zod3 = require("zod");

// src/convert-to-openai-completion-prompt.ts
var import_provider2 = require("@ai-sdk/provider");
var import_provider3 = require("@ai-sdk/provider");
function convertToOpenAICompletionPrompt({

@@ -629,3 +761,3 @@ prompt,

case "system": {
throw new import_provider2.InvalidPromptError({
throw new import_provider3.InvalidPromptError({
message: "Unexpected system message in prompt: ${content}",

@@ -642,3 +774,3 @@ prompt

case "image": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "images"

@@ -662,3 +794,3 @@ });

case "tool-call": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool-call messages"

@@ -676,3 +808,3 @@ });

case "tool": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool messages"

@@ -729,4 +861,7 @@ });

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences: userStopSequences,
responseFormat,
seed

@@ -736,3 +871,18 @@ }) {

const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type !== "text") {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format is not supported."
});
}
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
const baseArgs = {

@@ -757,3 +907,3 @@ // model id:

// stop sequences:
stop: stopSequences
stop: stop.length > 0 ? stop : void 0
};

@@ -763,3 +913,3 @@ switch (type) {

if ((_a = mode.tools) == null ? void 0 : _a.length) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "tools"

@@ -769,10 +919,10 @@ });

if (mode.toolChoice) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "toolChoice"
});
}
return baseArgs;
return { args: baseArgs, warnings };
}
case "object-json": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-json mode"

@@ -782,11 +932,6 @@ });

case "object-tool": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-tool mode"
});
}
case "object-grammar": {
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -799,3 +944,3 @@ const _exhaustiveCheck = type;

async doGenerate(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -827,7 +972,7 @@ url: this.config.url({

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};
}
async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -840,3 +985,3 @@ url: this.config.url({

body: {
...this.getArgs(options),
...args,
stream: true,

@@ -912,3 +1057,3 @@ // only include stream_options when in strict compatibility mode:

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -1007,3 +1152,3 @@ }

// src/openai-embedding-model.ts
var import_provider4 = require("@ai-sdk/provider");
var import_provider5 = require("@ai-sdk/provider");
var import_provider_utils6 = require("@ai-sdk/provider-utils");

@@ -1035,3 +1180,3 @@ var import_zod4 = require("zod");

if (values.length > this.maxEmbeddingsPerCall) {
throw new import_provider4.TooManyEmbeddingValuesForCallError({
throw new import_provider5.TooManyEmbeddingValuesForCallError({
provider: this.provider,

@@ -1038,0 +1183,0 @@ modelId: this.modelId,

@@ -37,2 +37,14 @@ import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';

/**
Whether to use legacy function calling. Defaults to false.
Required by some open source inference engines which do not support the `tools` API. May also
provide a workaround for `parallelToolCalls` resulting in the provider buffering tool calls,
which causes `streamObject` to be non-streaming.
Prefer setting `parallelToolCalls: false` over this option.
@deprecated this API is supported but deprecated by OpenAI.
*/
useLegacyFunctionCalling?: boolean;
/**
A unique identifier representing your end-user, which can help OpenAI to

@@ -39,0 +51,0 @@ monitor and detect abuse. Learn more.

@@ -33,3 +33,3 @@ "use strict";

// src/openai-chat-language-model.ts
var import_provider = require("@ai-sdk/provider");
var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");

@@ -39,4 +39,8 @@ var import_zod2 = require("zod");

// src/convert-to-openai-chat-messages.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils = require("@ai-sdk/provider-utils");
function convertToOpenAIChatMessages(prompt) {
function convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling = false
}) {
const messages = [];

@@ -101,7 +105,20 @@ for (const { role, content } of prompt) {

}
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
if (useLegacyFunctionCalling) {
if (toolCalls.length > 1) {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
});
}
messages.push({
role: "assistant",
content: text,
function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
});
} else {
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
}
break;

@@ -111,7 +128,15 @@ }

for (const toolResponse of content) {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
if (useLegacyFunctionCalling) {
messages.push({
role: "function",
name: toolResponse.toolName,
content: JSON.stringify(toolResponse.result)
});
} else {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
}
}

@@ -193,7 +218,30 @@ break;

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences,
responseFormat,
seed
}) {
const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format schema is not supported"
});
}
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with parallelToolCalls"
});
}
const baseArgs = {

@@ -214,14 +262,29 @@ // model id:

presence_penalty: presencePenalty,
stop: stopSequences,
seed,
// response format:
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
// messages:
messages: convertToOpenAIChatMessages(prompt)
messages: convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling
})
};
switch (type) {
case "regular": {
return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };
return {
args: {
...baseArgs,
...prepareToolsAndToolChoice({ mode, useLegacyFunctionCalling })
},
warnings
};
}
case "object-json": {
return {
...baseArgs,
response_format: { type: "json_object" }
args: {
...baseArgs,
response_format: { type: "json_object" }
},
warnings
};

@@ -231,8 +294,9 @@ }

return {
...baseArgs,
tool_choice: { type: "function", function: { name: mode.tool.name } },
tools: [
{
type: "function",
function: {
args: useLegacyFunctionCalling ? {
...baseArgs,
function_call: {
name: mode.tool.name
},
functions: [
{
name: mode.tool.name,

@@ -242,11 +306,23 @@ description: mode.tool.description,

}
}
]
]
} : {
...baseArgs,
tool_choice: {
type: "function",
function: { name: mode.tool.name }
},
tools: [
{
type: "function",
function: {
name: mode.tool.name,
description: mode.tool.description,
parameters: mode.tool.parameters
}
}
]
},
warnings
};
}
case "object-grammar": {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -260,3 +336,3 @@ const _exhaustiveCheck = type;

var _a, _b;
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -280,3 +356,10 @@ url: this.config.url({

text: (_a = choice.message.content) != null ? _a : void 0,
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
{
toolCallType: "function",
toolCallId: (0, import_provider_utils3.generateId)(),
toolName: choice.message.function_call.name,
args: choice.message.function_call.arguments
}
] : (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
var _a2;

@@ -297,3 +380,3 @@ return {

rawResponse: { headers: responseHeaders },
warnings: [],
warnings,
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs)

@@ -303,3 +386,3 @@ };

async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -332,2 +415,3 @@ url: this.config.url({

let logprobs;
const { useLegacyFunctionCalling } = this.settings;
return {

@@ -377,8 +461,16 @@ stream: response.pipeThrough(

}
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
{
type: "function",
id: (0, import_provider_utils3.generateId)(),
function: delta.function_call,
index: 0
}
] : delta.tool_calls;
if (mappedToolCalls != null) {
for (const toolCallDelta of mappedToolCalls) {
const index = toolCallDelta.index;
if (toolCalls[index] == null) {
if (toolCallDelta.type !== "function") {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -389,3 +481,3 @@ message: `Expected 'function' type.`

if (toolCallDelta.id == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -396,3 +488,3 @@ message: `Expected 'id' to be a string.`

if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -464,3 +556,3 @@ message: `Expected 'function.name' to be a string.`

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -474,6 +566,10 @@ }

role: import_zod2.z.literal("assistant"),
content: import_zod2.z.string().nullable().optional(),
content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
arguments: import_zod2.z.string(),
name: import_zod2.z.string()
}).nullish(),
tool_calls: import_zod2.z.array(
import_zod2.z.object({
id: import_zod2.z.string().optional().nullable(),
id: import_zod2.z.string().nullish(),
type: import_zod2.z.literal("function"),

@@ -485,3 +581,3 @@ function: import_zod2.z.object({

})
).optional()
).nullish()
}),

@@ -502,4 +598,4 @@ index: import_zod2.z.number(),

).nullable()
}).nullable().optional(),
finish_reason: import_zod2.z.string().optional().nullable()
}).nullish(),
finish_reason: import_zod2.z.string().nullish()
})

@@ -519,2 +615,6 @@ ),

content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
name: import_zod2.z.string().optional(),
arguments: import_zod2.z.string().optional()
}).nullish(),
tool_calls: import_zod2.z.array(

@@ -557,3 +657,6 @@ import_zod2.z.object({

]);
function prepareToolsAndToolChoice(mode) {
function prepareToolsAndToolChoice({
mode,
useLegacyFunctionCalling = false
}) {
var _a;

@@ -564,2 +667,32 @@ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;

}
const toolChoice = mode.toolChoice;
if (useLegacyFunctionCalling) {
const mappedFunctions = tools.map((tool) => ({
name: tool.name,
description: tool.description,
parameters: tool.parameters
}));
if (toolChoice == null) {
return { functions: mappedFunctions, function_call: void 0 };
}
const type2 = toolChoice.type;
switch (type2) {
case "auto":
case "none":
case void 0:
return {
functions: mappedFunctions,
function_call: void 0
};
case "required":
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling and toolChoice: required"
});
default:
return {
functions: mappedFunctions,
function_call: { name: toolChoice.toolName }
};
}
}
const mappedTools = tools.map((tool) => ({

@@ -573,3 +706,2 @@ type: "function",

}));
const toolChoice = mode.toolChoice;
if (toolChoice == null) {

@@ -602,3 +734,3 @@ return { tools: mappedTools, tool_choice: void 0 };

// src/openai-completion-language-model.ts
var import_provider3 = require("@ai-sdk/provider");
var import_provider4 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");

@@ -608,3 +740,3 @@ var import_zod3 = require("zod");

// src/convert-to-openai-completion-prompt.ts
var import_provider2 = require("@ai-sdk/provider");
var import_provider3 = require("@ai-sdk/provider");
function convertToOpenAICompletionPrompt({

@@ -629,3 +761,3 @@ prompt,

case "system": {
throw new import_provider2.InvalidPromptError({
throw new import_provider3.InvalidPromptError({
message: "Unexpected system message in prompt: ${content}",

@@ -642,3 +774,3 @@ prompt

case "image": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "images"

@@ -662,3 +794,3 @@ });

case "tool-call": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool-call messages"

@@ -676,3 +808,3 @@ });

case "tool": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool messages"

@@ -729,4 +861,7 @@ });

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences: userStopSequences,
responseFormat,
seed

@@ -736,3 +871,18 @@ }) {

const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type !== "text") {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format is not supported."
});
}
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
const baseArgs = {

@@ -757,3 +907,3 @@ // model id:

// stop sequences:
stop: stopSequences
stop: stop.length > 0 ? stop : void 0
};

@@ -763,3 +913,3 @@ switch (type) {

if ((_a = mode.tools) == null ? void 0 : _a.length) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "tools"

@@ -769,10 +919,10 @@ });

if (mode.toolChoice) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "toolChoice"
});
}
return baseArgs;
return { args: baseArgs, warnings };
}
case "object-json": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-json mode"

@@ -782,11 +932,6 @@ });

case "object-tool": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-tool mode"
});
}
case "object-grammar": {
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -799,3 +944,3 @@ const _exhaustiveCheck = type;

async doGenerate(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -827,7 +972,7 @@ url: this.config.url({

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};
}
async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -840,3 +985,3 @@ url: this.config.url({

body: {
...this.getArgs(options),
...args,
stream: true,

@@ -912,3 +1057,3 @@ // only include stream_options when in strict compatibility mode:

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -1007,3 +1152,3 @@ }

// src/openai-embedding-model.ts
var import_provider4 = require("@ai-sdk/provider");
var import_provider5 = require("@ai-sdk/provider");
var import_provider_utils6 = require("@ai-sdk/provider-utils");

@@ -1035,3 +1180,3 @@ var import_zod4 = require("zod");

if (values.length > this.maxEmbeddingsPerCall) {
throw new import_provider4.TooManyEmbeddingValuesForCallError({
throw new import_provider5.TooManyEmbeddingValuesForCallError({
provider: this.provider,

@@ -1038,0 +1183,0 @@ modelId: this.modelId,

@@ -37,2 +37,14 @@ import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';

/**
Whether to use legacy function calling. Defaults to false.
Required by some open source inference engines which do not support the `tools` API. May also
provide a workaround for `parallelToolCalls` resulting in the provider buffering tool calls,
which causes `streamObject` to be non-streaming.
Prefer setting `parallelToolCalls: false` over this option.
@deprecated this API is supported but deprecated by OpenAI.
*/
useLegacyFunctionCalling?: boolean;
/**
A unique identifier representing your end-user, which can help OpenAI to

@@ -39,0 +51,0 @@ monitor and detect abuse. Learn more.

@@ -30,3 +30,3 @@ "use strict";

// src/openai-chat-language-model.ts
var import_provider = require("@ai-sdk/provider");
var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");

@@ -36,4 +36,8 @@ var import_zod2 = require("zod");

// src/convert-to-openai-chat-messages.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils = require("@ai-sdk/provider-utils");
function convertToOpenAIChatMessages(prompt) {
function convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling = false
}) {
const messages = [];

@@ -98,7 +102,20 @@ for (const { role, content } of prompt) {

}
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
if (useLegacyFunctionCalling) {
if (toolCalls.length > 1) {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with multiple tool calls in one message"
});
}
messages.push({
role: "assistant",
content: text,
function_call: toolCalls.length > 0 ? toolCalls[0].function : void 0
});
} else {
messages.push({
role: "assistant",
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : void 0
});
}
break;

@@ -108,7 +125,15 @@ }

for (const toolResponse of content) {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
if (useLegacyFunctionCalling) {
messages.push({
role: "function",
name: toolResponse.toolName,
content: JSON.stringify(toolResponse.result)
});
} else {
messages.push({
role: "tool",
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result)
});
}
}

@@ -190,7 +215,30 @@ break;

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences,
responseFormat,
seed
}) {
const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type === "json" && responseFormat.schema != null) {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format schema is not supported"
});
}
const useLegacyFunctionCalling = this.settings.useLegacyFunctionCalling;
if (useLegacyFunctionCalling && this.settings.parallelToolCalls === true) {
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling with parallelToolCalls"
});
}
const baseArgs = {

@@ -211,14 +259,29 @@ // model id:

presence_penalty: presencePenalty,
stop: stopSequences,
seed,
// response format:
response_format: (responseFormat == null ? void 0 : responseFormat.type) === "json" ? { type: "json_object" } : void 0,
// messages:
messages: convertToOpenAIChatMessages(prompt)
messages: convertToOpenAIChatMessages({
prompt,
useLegacyFunctionCalling
})
};
switch (type) {
case "regular": {
return { ...baseArgs, ...prepareToolsAndToolChoice(mode) };
return {
args: {
...baseArgs,
...prepareToolsAndToolChoice({ mode, useLegacyFunctionCalling })
},
warnings
};
}
case "object-json": {
return {
...baseArgs,
response_format: { type: "json_object" }
args: {
...baseArgs,
response_format: { type: "json_object" }
},
warnings
};

@@ -228,8 +291,9 @@ }

return {
...baseArgs,
tool_choice: { type: "function", function: { name: mode.tool.name } },
tools: [
{
type: "function",
function: {
args: useLegacyFunctionCalling ? {
...baseArgs,
function_call: {
name: mode.tool.name
},
functions: [
{
name: mode.tool.name,

@@ -239,11 +303,23 @@ description: mode.tool.description,

}
}
]
]
} : {
...baseArgs,
tool_choice: {
type: "function",
function: { name: mode.tool.name }
},
tools: [
{
type: "function",
function: {
name: mode.tool.name,
description: mode.tool.description,
parameters: mode.tool.parameters
}
}
]
},
warnings
};
}
case "object-grammar": {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -257,3 +333,3 @@ const _exhaustiveCheck = type;

var _a, _b;
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -277,3 +353,10 @@ url: this.config.url({

text: (_a = choice.message.content) != null ? _a : void 0,
toolCalls: (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [
{
toolCallType: "function",
toolCallId: (0, import_provider_utils3.generateId)(),
toolName: choice.message.function_call.name,
args: choice.message.function_call.arguments
}
] : (_b = choice.message.tool_calls) == null ? void 0 : _b.map((toolCall) => {
var _a2;

@@ -294,3 +377,3 @@ return {

rawResponse: { headers: responseHeaders },
warnings: [],
warnings,
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs)

@@ -300,3 +383,3 @@ };

async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

@@ -329,2 +412,3 @@ url: this.config.url({

let logprobs;
const { useLegacyFunctionCalling } = this.settings;
return {

@@ -374,8 +458,16 @@ stream: response.pipeThrough(

}
if (delta.tool_calls != null) {
for (const toolCallDelta of delta.tool_calls) {
const mappedToolCalls = useLegacyFunctionCalling && delta.function_call != null ? [
{
type: "function",
id: (0, import_provider_utils3.generateId)(),
function: delta.function_call,
index: 0
}
] : delta.tool_calls;
if (mappedToolCalls != null) {
for (const toolCallDelta of mappedToolCalls) {
const index = toolCallDelta.index;
if (toolCalls[index] == null) {
if (toolCallDelta.type !== "function") {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -386,3 +478,3 @@ message: `Expected 'function' type.`

if (toolCallDelta.id == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -393,3 +485,3 @@ message: `Expected 'id' to be a string.`

if (((_a = toolCallDelta.function) == null ? void 0 : _a.name) == null) {
throw new import_provider.InvalidResponseDataError({
throw new import_provider2.InvalidResponseDataError({
data: toolCallDelta,

@@ -461,3 +553,3 @@ message: `Expected 'function.name' to be a string.`

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -471,6 +563,10 @@ }

role: import_zod2.z.literal("assistant"),
content: import_zod2.z.string().nullable().optional(),
content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
arguments: import_zod2.z.string(),
name: import_zod2.z.string()
}).nullish(),
tool_calls: import_zod2.z.array(
import_zod2.z.object({
id: import_zod2.z.string().optional().nullable(),
id: import_zod2.z.string().nullish(),
type: import_zod2.z.literal("function"),

@@ -482,3 +578,3 @@ function: import_zod2.z.object({

})
).optional()
).nullish()
}),

@@ -499,4 +595,4 @@ index: import_zod2.z.number(),

).nullable()
}).nullable().optional(),
finish_reason: import_zod2.z.string().optional().nullable()
}).nullish(),
finish_reason: import_zod2.z.string().nullish()
})

@@ -516,2 +612,6 @@ ),

content: import_zod2.z.string().nullish(),
function_call: import_zod2.z.object({
name: import_zod2.z.string().optional(),
arguments: import_zod2.z.string().optional()
}).nullish(),
tool_calls: import_zod2.z.array(

@@ -554,3 +654,6 @@ import_zod2.z.object({

]);
function prepareToolsAndToolChoice(mode) {
function prepareToolsAndToolChoice({
mode,
useLegacyFunctionCalling = false
}) {
var _a;

@@ -561,2 +664,32 @@ const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;

}
const toolChoice = mode.toolChoice;
if (useLegacyFunctionCalling) {
const mappedFunctions = tools.map((tool) => ({
name: tool.name,
description: tool.description,
parameters: tool.parameters
}));
if (toolChoice == null) {
return { functions: mappedFunctions, function_call: void 0 };
}
const type2 = toolChoice.type;
switch (type2) {
case "auto":
case "none":
case void 0:
return {
functions: mappedFunctions,
function_call: void 0
};
case "required":
throw new import_provider2.UnsupportedFunctionalityError({
functionality: "useLegacyFunctionCalling and toolChoice: required"
});
default:
return {
functions: mappedFunctions,
function_call: { name: toolChoice.toolName }
};
}
}
const mappedTools = tools.map((tool) => ({

@@ -570,3 +703,2 @@ type: "function",

}));
const toolChoice = mode.toolChoice;
if (toolChoice == null) {

@@ -599,3 +731,3 @@ return { tools: mappedTools, tool_choice: void 0 };

// src/openai-completion-language-model.ts
var import_provider3 = require("@ai-sdk/provider");
var import_provider4 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");

@@ -605,3 +737,3 @@ var import_zod3 = require("zod");

// src/convert-to-openai-completion-prompt.ts
var import_provider2 = require("@ai-sdk/provider");
var import_provider3 = require("@ai-sdk/provider");
function convertToOpenAICompletionPrompt({

@@ -626,3 +758,3 @@ prompt,

case "system": {
throw new import_provider2.InvalidPromptError({
throw new import_provider3.InvalidPromptError({
message: "Unexpected system message in prompt: ${content}",

@@ -639,3 +771,3 @@ prompt

case "image": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "images"

@@ -659,3 +791,3 @@ });

case "tool-call": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool-call messages"

@@ -673,3 +805,3 @@ });

case "tool": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "tool messages"

@@ -726,4 +858,7 @@ });

topP,
topK,
frequencyPenalty,
presencePenalty,
stopSequences: userStopSequences,
responseFormat,
seed

@@ -733,3 +868,18 @@ }) {

const type = mode.type;
const warnings = [];
if (topK != null) {
warnings.push({
type: "unsupported-setting",
setting: "topK"
});
}
if (responseFormat != null && responseFormat.type !== "text") {
warnings.push({
type: "unsupported-setting",
setting: "responseFormat",
details: "JSON response format is not supported."
});
}
const { prompt: completionPrompt, stopSequences } = convertToOpenAICompletionPrompt({ prompt, inputFormat });
const stop = [...stopSequences != null ? stopSequences : [], ...userStopSequences != null ? userStopSequences : []];
const baseArgs = {

@@ -754,3 +904,3 @@ // model id:

// stop sequences:
stop: stopSequences
stop: stop.length > 0 ? stop : void 0
};

@@ -760,3 +910,3 @@ switch (type) {

if ((_a = mode.tools) == null ? void 0 : _a.length) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "tools"

@@ -766,10 +916,10 @@ });

if (mode.toolChoice) {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "toolChoice"
});
}
return baseArgs;
return { args: baseArgs, warnings };
}
case "object-json": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-json mode"

@@ -779,11 +929,6 @@ });

case "object-tool": {
throw new import_provider3.UnsupportedFunctionalityError({
throw new import_provider4.UnsupportedFunctionalityError({
functionality: "object-tool mode"
});
}
case "object-grammar": {
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "object-grammar mode"
});
}
default: {

@@ -796,3 +941,3 @@ const _exhaustiveCheck = type;

async doGenerate(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -824,7 +969,7 @@ url: this.config.url({

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};
}
async doStream(options) {
const args = this.getArgs(options);
const { args, warnings } = this.getArgs(options);
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({

@@ -837,3 +982,3 @@ url: this.config.url({

body: {
...this.getArgs(options),
...args,
stream: true,

@@ -909,3 +1054,3 @@ // only include stream_options when in strict compatibility mode:

rawResponse: { headers: responseHeaders },
warnings: []
warnings
};

@@ -954,3 +1099,3 @@ }

// src/openai-embedding-model.ts
var import_provider4 = require("@ai-sdk/provider");
var import_provider5 = require("@ai-sdk/provider");
var import_provider_utils5 = require("@ai-sdk/provider-utils");

@@ -982,3 +1127,3 @@ var import_zod4 = require("zod");

if (values.length > this.maxEmbeddingsPerCall) {
throw new import_provider4.TooManyEmbeddingValuesForCallError({
throw new import_provider5.TooManyEmbeddingValuesForCallError({
provider: this.provider,

@@ -985,0 +1130,0 @@ modelId: this.modelId,

{
"name": "@ai-sdk/openai",
"version": "0.0.37",
"version": "0.0.38",
"license": "Apache-2.0",

@@ -28,4 +28,4 @@ "sideEffects": false,

"dependencies": {
"@ai-sdk/provider": "0.0.12",
"@ai-sdk/provider-utils": "1.0.2"
"@ai-sdk/provider": "0.0.13",
"@ai-sdk/provider-utils": "1.0.3"
},

@@ -32,0 +32,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc