Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai

Package Overview
Dependencies
Maintainers
2
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai - npm Package Compare versions

Comparing version 0.0.61 to 0.0.62

41

./dist/index.js

@@ -302,3 +302,3 @@ "use strict";

};
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") {
if (isReasoningModel(this.modelId)) {
baseArgs.temperature = void 0;

@@ -437,2 +437,38 @@ baseArgs.top_p = void 0;

async doStream(options) {
if (isReasoningModel(this.modelId)) {
const result = await this.doGenerate(options);
const simulatedStream = new ReadableStream({
start(controller) {
controller.enqueue({ type: "response-metadata", ...result.response });
if (result.text) {
controller.enqueue({
type: "text-delta",
textDelta: result.text
});
}
if (result.toolCalls) {
for (const toolCall of result.toolCalls) {
controller.enqueue({
type: "tool-call",
...toolCall
});
}
}
controller.enqueue({
type: "finish",
finishReason: result.finishReason,
usage: result.usage,
logprobs: result.logprobs,
providerMetadata: result.providerMetadata
});
controller.close();
}
});
return {
stream: simulatedStream,
rawCall: result.rawCall,
rawResponse: result.rawResponse,
warnings: result.warnings
};
}
const { args, warnings } = this.getArgs(options);

@@ -795,2 +831,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

}
function isReasoningModel(modelId) {
return modelId.startsWith("o1-");
}

@@ -797,0 +836,0 @@ // src/openai-completion-language-model.ts

# @ai-sdk/openai
## 0.0.62
### Patch Changes
- 7efa867: feat (provider/openai): simulated streaming for reasoning models
## 0.0.61

@@ -4,0 +10,0 @@

@@ -302,3 +302,3 @@ "use strict";

};
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") {
if (isReasoningModel(this.modelId)) {
baseArgs.temperature = void 0;

@@ -437,2 +437,38 @@ baseArgs.top_p = void 0;

async doStream(options) {
if (isReasoningModel(this.modelId)) {
const result = await this.doGenerate(options);
const simulatedStream = new ReadableStream({
start(controller) {
controller.enqueue({ type: "response-metadata", ...result.response });
if (result.text) {
controller.enqueue({
type: "text-delta",
textDelta: result.text
});
}
if (result.toolCalls) {
for (const toolCall of result.toolCalls) {
controller.enqueue({
type: "tool-call",
...toolCall
});
}
}
controller.enqueue({
type: "finish",
finishReason: result.finishReason,
usage: result.usage,
logprobs: result.logprobs,
providerMetadata: result.providerMetadata
});
controller.close();
}
});
return {
stream: simulatedStream,
rawCall: result.rawCall,
rawResponse: result.rawResponse,
warnings: result.warnings
};
}
const { args, warnings } = this.getArgs(options);

@@ -795,2 +831,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

}
function isReasoningModel(modelId) {
return modelId.startsWith("o1-");
}

@@ -797,0 +836,0 @@ // src/openai-completion-language-model.ts

@@ -299,3 +299,3 @@ "use strict";

};
if (this.modelId === "o1-preview" || this.modelId === "o1-mini") {
if (isReasoningModel(this.modelId)) {
baseArgs.temperature = void 0;

@@ -434,2 +434,38 @@ baseArgs.top_p = void 0;

async doStream(options) {
if (isReasoningModel(this.modelId)) {
const result = await this.doGenerate(options);
const simulatedStream = new ReadableStream({
start(controller) {
controller.enqueue({ type: "response-metadata", ...result.response });
if (result.text) {
controller.enqueue({
type: "text-delta",
textDelta: result.text
});
}
if (result.toolCalls) {
for (const toolCall of result.toolCalls) {
controller.enqueue({
type: "tool-call",
...toolCall
});
}
}
controller.enqueue({
type: "finish",
finishReason: result.finishReason,
usage: result.usage,
logprobs: result.logprobs,
providerMetadata: result.providerMetadata
});
controller.close();
}
});
return {
stream: simulatedStream,
rawCall: result.rawCall,
rawResponse: result.rawResponse,
warnings: result.warnings
};
}
const { args, warnings } = this.getArgs(options);

@@ -792,2 +828,5 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

}
function isReasoningModel(modelId) {
return modelId.startsWith("o1-");
}

@@ -794,0 +833,0 @@ // src/openai-completion-language-model.ts

2

package.json
{
"name": "@ai-sdk/openai",
"version": "0.0.61",
"version": "0.0.62",
"license": "Apache-2.0",

@@ -5,0 +5,0 @@ "sideEffects": false,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc