@ai-sdk/openai
Advanced tools
Comparing version 1.0.0 to 1.0.1
# @ai-sdk/openai | ||
## 1.0.1 | ||
### Patch Changes | ||
- 5e6419a: feat (provider/openai): support streaming for reasoning models | ||
## 1.0.0 | ||
@@ -4,0 +10,0 @@ |
@@ -579,38 +579,2 @@ "use strict"; | ||
async doStream(options) { | ||
if (isReasoningModel(this.modelId)) { | ||
const result = await this.doGenerate(options); | ||
const simulatedStream = new ReadableStream({ | ||
start(controller) { | ||
controller.enqueue({ type: "response-metadata", ...result.response }); | ||
if (result.text) { | ||
controller.enqueue({ | ||
type: "text-delta", | ||
textDelta: result.text | ||
}); | ||
} | ||
if (result.toolCalls) { | ||
for (const toolCall of result.toolCalls) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
...toolCall | ||
}); | ||
} | ||
} | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason: result.finishReason, | ||
usage: result.usage, | ||
logprobs: result.logprobs, | ||
providerMetadata: result.providerMetadata | ||
}); | ||
controller.close(); | ||
} | ||
}); | ||
return { | ||
stream: simulatedStream, | ||
rawCall: result.rawCall, | ||
rawResponse: result.rawResponse, | ||
warnings: result.warnings | ||
}; | ||
} | ||
const { args, warnings } = this.getArgs(options); | ||
@@ -652,3 +616,3 @@ const body = { | ||
transform(chunk, controller) { | ||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p; | ||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; | ||
if (!chunk.success) { | ||
@@ -677,8 +641,14 @@ finishReason = "error"; | ||
}; | ||
if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) { | ||
providerMetadata = { | ||
openai: { | ||
cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens | ||
} | ||
}; | ||
const { | ||
completion_tokens_details: completionTokenDetails, | ||
prompt_tokens_details: promptTokenDetails | ||
} = value.usage; | ||
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null || (promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) { | ||
providerMetadata = { openai: {} }; | ||
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) { | ||
providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens; | ||
} | ||
if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) { | ||
providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens; | ||
} | ||
} | ||
@@ -731,3 +701,3 @@ } | ||
} | ||
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) { | ||
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) { | ||
throw new import_provider3.InvalidResponseDataError({ | ||
@@ -743,7 +713,7 @@ data: toolCallDelta, | ||
name: toolCallDelta.function.name, | ||
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : "" | ||
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : "" | ||
} | ||
}; | ||
const toolCall2 = toolCalls[index]; | ||
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) { | ||
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) { | ||
if (toolCall2.function.arguments.length > 0) { | ||
@@ -762,3 +732,3 @@ controller.enqueue({ | ||
toolCallType: "function", | ||
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(), | ||
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(), | ||
toolName: toolCall2.function.name, | ||
@@ -772,4 +742,4 @@ args: toolCall2.function.arguments | ||
const toolCall = toolCalls[index]; | ||
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) { | ||
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : ""; | ||
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) { | ||
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : ""; | ||
} | ||
@@ -781,9 +751,9 @@ controller.enqueue({ | ||
toolName: toolCall.function.name, | ||
argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : "" | ||
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : "" | ||
}); | ||
if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) { | ||
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
toolCallType: "function", | ||
toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(), | ||
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(), | ||
toolName: toolCall.function.name, | ||
@@ -790,0 +760,0 @@ args: toolCall.function.arguments |
@@ -577,38 +577,2 @@ "use strict"; | ||
async doStream(options) { | ||
if (isReasoningModel(this.modelId)) { | ||
const result = await this.doGenerate(options); | ||
const simulatedStream = new ReadableStream({ | ||
start(controller) { | ||
controller.enqueue({ type: "response-metadata", ...result.response }); | ||
if (result.text) { | ||
controller.enqueue({ | ||
type: "text-delta", | ||
textDelta: result.text | ||
}); | ||
} | ||
if (result.toolCalls) { | ||
for (const toolCall of result.toolCalls) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
...toolCall | ||
}); | ||
} | ||
} | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason: result.finishReason, | ||
usage: result.usage, | ||
logprobs: result.logprobs, | ||
providerMetadata: result.providerMetadata | ||
}); | ||
controller.close(); | ||
} | ||
}); | ||
return { | ||
stream: simulatedStream, | ||
rawCall: result.rawCall, | ||
rawResponse: result.rawResponse, | ||
warnings: result.warnings | ||
}; | ||
} | ||
const { args, warnings } = this.getArgs(options); | ||
@@ -650,3 +614,3 @@ const body = { | ||
transform(chunk, controller) { | ||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p; | ||
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n; | ||
if (!chunk.success) { | ||
@@ -675,8 +639,14 @@ finishReason = "error"; | ||
}; | ||
if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) { | ||
providerMetadata = { | ||
openai: { | ||
cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens | ||
} | ||
}; | ||
const { | ||
completion_tokens_details: completionTokenDetails, | ||
prompt_tokens_details: promptTokenDetails | ||
} = value.usage; | ||
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null || (promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) { | ||
providerMetadata = { openai: {} }; | ||
if ((completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens) != null) { | ||
providerMetadata.openai.reasoningTokens = completionTokenDetails == null ? void 0 : completionTokenDetails.reasoning_tokens; | ||
} | ||
if ((promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens) != null) { | ||
providerMetadata.openai.cachedPromptTokens = promptTokenDetails == null ? void 0 : promptTokenDetails.cached_tokens; | ||
} | ||
} | ||
@@ -729,3 +699,3 @@ } | ||
} | ||
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) { | ||
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) { | ||
throw new import_provider3.InvalidResponseDataError({ | ||
@@ -741,7 +711,7 @@ data: toolCallDelta, | ||
name: toolCallDelta.function.name, | ||
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : "" | ||
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : "" | ||
} | ||
}; | ||
const toolCall2 = toolCalls[index]; | ||
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) { | ||
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) { | ||
if (toolCall2.function.arguments.length > 0) { | ||
@@ -760,3 +730,3 @@ controller.enqueue({ | ||
toolCallType: "function", | ||
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(), | ||
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(), | ||
toolName: toolCall2.function.name, | ||
@@ -770,4 +740,4 @@ args: toolCall2.function.arguments | ||
const toolCall = toolCalls[index]; | ||
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) { | ||
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : ""; | ||
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) { | ||
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : ""; | ||
} | ||
@@ -779,9 +749,9 @@ controller.enqueue({ | ||
toolName: toolCall.function.name, | ||
argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : "" | ||
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : "" | ||
}); | ||
if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) { | ||
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
toolCallType: "function", | ||
toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(), | ||
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(), | ||
toolName: toolCall.function.name, | ||
@@ -788,0 +758,0 @@ args: toolCall.function.arguments |
{ | ||
"name": "@ai-sdk/openai", | ||
"version": "1.0.0", | ||
"version": "1.0.1", | ||
"license": "Apache-2.0", | ||
@@ -5,0 +5,0 @@ "sideEffects": false, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
578040
5783