@ai-sdk/openai
Advanced tools
Comparing version 0.0.61 to 0.0.62
@@ -302,3 +302,3 @@ "use strict"; | ||
}; | ||
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") { | ||
if (isReasoningModel(this.modelId)) { | ||
baseArgs.temperature = void 0; | ||
@@ -437,2 +437,38 @@ baseArgs.top_p = void 0; | ||
async doStream(options) { | ||
if (isReasoningModel(this.modelId)) { | ||
const result = await this.doGenerate(options); | ||
const simulatedStream = new ReadableStream({ | ||
start(controller) { | ||
controller.enqueue({ type: "response-metadata", ...result.response }); | ||
if (result.text) { | ||
controller.enqueue({ | ||
type: "text-delta", | ||
textDelta: result.text | ||
}); | ||
} | ||
if (result.toolCalls) { | ||
for (const toolCall of result.toolCalls) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
...toolCall | ||
}); | ||
} | ||
} | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason: result.finishReason, | ||
usage: result.usage, | ||
logprobs: result.logprobs, | ||
providerMetadata: result.providerMetadata | ||
}); | ||
controller.close(); | ||
} | ||
}); | ||
return { | ||
stream: simulatedStream, | ||
rawCall: result.rawCall, | ||
rawResponse: result.rawResponse, | ||
warnings: result.warnings | ||
}; | ||
} | ||
const { args, warnings } = this.getArgs(options); | ||
@@ -795,2 +831,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
} | ||
function isReasoningModel(modelId) { | ||
return modelId.startsWith("o1-"); | ||
} | ||
@@ -797,0 +836,0 @@ // src/openai-completion-language-model.ts |
# @ai-sdk/openai | ||
## 0.0.62 | ||
### Patch Changes | ||
- 7efa867: feat (provider/openai): simulated streaming for reasoning models | ||
## 0.0.61 | ||
@@ -4,0 +10,0 @@ |
@@ -302,3 +302,3 @@ "use strict"; | ||
}; | ||
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") { | ||
if (isReasoningModel(this.modelId)) { | ||
baseArgs.temperature = void 0; | ||
@@ -437,2 +437,38 @@ baseArgs.top_p = void 0; | ||
async doStream(options) { | ||
if (isReasoningModel(this.modelId)) { | ||
const result = await this.doGenerate(options); | ||
const simulatedStream = new ReadableStream({ | ||
start(controller) { | ||
controller.enqueue({ type: "response-metadata", ...result.response }); | ||
if (result.text) { | ||
controller.enqueue({ | ||
type: "text-delta", | ||
textDelta: result.text | ||
}); | ||
} | ||
if (result.toolCalls) { | ||
for (const toolCall of result.toolCalls) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
...toolCall | ||
}); | ||
} | ||
} | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason: result.finishReason, | ||
usage: result.usage, | ||
logprobs: result.logprobs, | ||
providerMetadata: result.providerMetadata | ||
}); | ||
controller.close(); | ||
} | ||
}); | ||
return { | ||
stream: simulatedStream, | ||
rawCall: result.rawCall, | ||
rawResponse: result.rawResponse, | ||
warnings: result.warnings | ||
}; | ||
} | ||
const { args, warnings } = this.getArgs(options); | ||
@@ -795,2 +831,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
} | ||
function isReasoningModel(modelId) { | ||
return modelId.startsWith("o1-"); | ||
} | ||
@@ -797,0 +836,0 @@ // src/openai-completion-language-model.ts |
@@ -299,3 +299,3 @@ "use strict"; | ||
}; | ||
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") { | ||
if (isReasoningModel(this.modelId)) { | ||
baseArgs.temperature = void 0; | ||
@@ -434,2 +434,38 @@ baseArgs.top_p = void 0; | ||
async doStream(options) { | ||
if (isReasoningModel(this.modelId)) { | ||
const result = await this.doGenerate(options); | ||
const simulatedStream = new ReadableStream({ | ||
start(controller) { | ||
controller.enqueue({ type: "response-metadata", ...result.response }); | ||
if (result.text) { | ||
controller.enqueue({ | ||
type: "text-delta", | ||
textDelta: result.text | ||
}); | ||
} | ||
if (result.toolCalls) { | ||
for (const toolCall of result.toolCalls) { | ||
controller.enqueue({ | ||
type: "tool-call", | ||
...toolCall | ||
}); | ||
} | ||
} | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason: result.finishReason, | ||
usage: result.usage, | ||
logprobs: result.logprobs, | ||
providerMetadata: result.providerMetadata | ||
}); | ||
controller.close(); | ||
} | ||
}); | ||
return { | ||
stream: simulatedStream, | ||
rawCall: result.rawCall, | ||
rawResponse: result.rawResponse, | ||
warnings: result.warnings | ||
}; | ||
} | ||
const { args, warnings } = this.getArgs(options); | ||
@@ -792,2 +828,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
} | ||
function isReasoningModel(modelId) { | ||
return modelId.startsWith("o1-"); | ||
} | ||
@@ -794,0 +833,0 @@ // src/openai-completion-language-model.ts |
{ | ||
"name": "@ai-sdk/openai", | ||
"version": "0.0.61", | ||
"version": "0.0.62", | ||
"license": "Apache-2.0", | ||
@@ -5,0 +5,0 @@ "sideEffects": false, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
600086
7063