Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/openai

Package Overview
Dependencies
Maintainers
2
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/openai - npm Package Compare versions

Comparing version 0.0.64 to 0.0.65

57

./dist/index.js

@@ -387,3 +387,3 @@ "use strict";

async doGenerate(options) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
const { args, warnings } = this.getArgs(options);

@@ -406,9 +406,14 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

const choice = response.choices[0];
const providerMetadata = ((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? {
openai: {
reasoningTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens_details) == null ? void 0 : _d.reasoning_tokens
let providerMetadata;
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
providerMetadata = { openai: {} };
if (((_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null) {
providerMetadata.openai.reasoningTokens = (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens_details) == null ? void 0 : _h.reasoning_tokens;
}
} : void 0;
if (((_j = (_i = response.usage) == null ? void 0 : _i.prompt_tokens_details) == null ? void 0 : _j.cached_tokens) != null) {
providerMetadata.openai.cachedPromptTokens = (_l = (_k = response.usage) == null ? void 0 : _k.prompt_tokens_details) == null ? void 0 : _l.cached_tokens;
}
}
return {
text: (_e = choice.message.content) != null ? _e : void 0,
text: (_m = choice.message.content) != null ? _m : void 0,
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [

@@ -421,3 +426,3 @@ {

}
] : (_f = choice.message.tool_calls) == null ? void 0 : _f.map((toolCall) => {
] : (_n = choice.message.tool_calls) == null ? void 0 : _n.map((toolCall) => {
var _a2;

@@ -433,4 +438,4 @@ return {

usage: {
promptTokens: (_h = (_g = response.usage) == null ? void 0 : _g.prompt_tokens) != null ? _h : NaN,
completionTokens: (_j = (_i = response.usage) == null ? void 0 : _i.completion_tokens) != null ? _j : NaN
promptTokens: (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens) != null ? _p : NaN,
completionTokens: (_r = (_q = response.usage) == null ? void 0 : _q.completion_tokens) != null ? _r : NaN
},

@@ -512,2 +517,3 @@ rawCall: { rawPrompt, rawSettings },

const { useLegacyFunctionCalling } = this.settings;
let providerMetadata;
return {

@@ -517,3 +523,3 @@ stream: response.pipeThrough(

transform(chunk, controller) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
if (!chunk.success) {

@@ -542,2 +548,9 @@ finishReason = "error";

};
if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) {
providerMetadata = {
openai: {
cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens
}
};
}
}

@@ -589,3 +602,3 @@ const choice = value.choices[0];

}
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
throw new import_provider2.InvalidResponseDataError({

@@ -601,7 +614,7 @@ data: toolCallDelta,

name: toolCallDelta.function.name,
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
}
};
const toolCall2 = toolCalls[index];
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
if (toolCall2.function.arguments.length > 0) {

@@ -620,3 +633,3 @@ controller.enqueue({

toolCallType: "function",
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(),
toolName: toolCall2.function.name,

@@ -630,4 +643,4 @@ args: toolCall2.function.arguments

const toolCall = toolCalls[index];
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
}

@@ -639,9 +652,9 @@ controller.enqueue({

toolName: toolCall.function.name,
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
});
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
controller.enqueue({
type: "tool-call",
toolCallType: "function",
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(),
toolName: toolCall.function.name,

@@ -663,3 +676,4 @@ args: toolCall.function.arguments

completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
}
},
...providerMetadata != null ? { providerMetadata } : {}
});

@@ -678,2 +692,5 @@ }

completion_tokens: import_zod2.z.number().nullish(),
prompt_tokens_details: import_zod2.z.object({
cached_tokens: import_zod2.z.number().nullish()
}).nullish(),
completion_tokens_details: import_zod2.z.object({

@@ -680,0 +697,0 @@ reasoning_tokens: import_zod2.z.number().nullish()

# @ai-sdk/openai
## 0.0.65
### Patch Changes
- e8aed44: Add OpenAI cached prompt tokens to experimental_providerMetadata for generateText and streamText
## 0.0.64

@@ -4,0 +10,0 @@

@@ -387,3 +387,3 @@ "use strict";

async doGenerate(options) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
const { args, warnings } = this.getArgs(options);

@@ -406,9 +406,14 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

const choice = response.choices[0];
const providerMetadata = ((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? {
openai: {
reasoningTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens_details) == null ? void 0 : _d.reasoning_tokens
let providerMetadata;
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
providerMetadata = { openai: {} };
if (((_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null) {
providerMetadata.openai.reasoningTokens = (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens_details) == null ? void 0 : _h.reasoning_tokens;
}
} : void 0;
if (((_j = (_i = response.usage) == null ? void 0 : _i.prompt_tokens_details) == null ? void 0 : _j.cached_tokens) != null) {
providerMetadata.openai.cachedPromptTokens = (_l = (_k = response.usage) == null ? void 0 : _k.prompt_tokens_details) == null ? void 0 : _l.cached_tokens;
}
}
return {
text: (_e = choice.message.content) != null ? _e : void 0,
text: (_m = choice.message.content) != null ? _m : void 0,
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [

@@ -421,3 +426,3 @@ {

}
] : (_f = choice.message.tool_calls) == null ? void 0 : _f.map((toolCall) => {
] : (_n = choice.message.tool_calls) == null ? void 0 : _n.map((toolCall) => {
var _a2;

@@ -433,4 +438,4 @@ return {

usage: {
promptTokens: (_h = (_g = response.usage) == null ? void 0 : _g.prompt_tokens) != null ? _h : NaN,
completionTokens: (_j = (_i = response.usage) == null ? void 0 : _i.completion_tokens) != null ? _j : NaN
promptTokens: (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens) != null ? _p : NaN,
completionTokens: (_r = (_q = response.usage) == null ? void 0 : _q.completion_tokens) != null ? _r : NaN
},

@@ -512,2 +517,3 @@ rawCall: { rawPrompt, rawSettings },

const { useLegacyFunctionCalling } = this.settings;
let providerMetadata;
return {

@@ -517,3 +523,3 @@ stream: response.pipeThrough(

transform(chunk, controller) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
if (!chunk.success) {

@@ -542,2 +548,9 @@ finishReason = "error";

};
if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) {
providerMetadata = {
openai: {
cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens
}
};
}
}

@@ -589,3 +602,3 @@ const choice = value.choices[0];

}
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
throw new import_provider2.InvalidResponseDataError({

@@ -601,7 +614,7 @@ data: toolCallDelta,

name: toolCallDelta.function.name,
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
}
};
const toolCall2 = toolCalls[index];
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
if (toolCall2.function.arguments.length > 0) {

@@ -620,3 +633,3 @@ controller.enqueue({

toolCallType: "function",
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(),
toolName: toolCall2.function.name,

@@ -630,4 +643,4 @@ args: toolCall2.function.arguments

const toolCall = toolCalls[index];
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
}

@@ -639,9 +652,9 @@ controller.enqueue({

toolName: toolCall.function.name,
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
});
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
controller.enqueue({
type: "tool-call",
toolCallType: "function",
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(),
toolName: toolCall.function.name,

@@ -663,3 +676,4 @@ args: toolCall.function.arguments

completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
}
},
...providerMetadata != null ? { providerMetadata } : {}
});

@@ -678,2 +692,5 @@ }

completion_tokens: import_zod2.z.number().nullish(),
prompt_tokens_details: import_zod2.z.object({
cached_tokens: import_zod2.z.number().nullish()
}).nullish(),
completion_tokens_details: import_zod2.z.object({

@@ -680,0 +697,0 @@ reasoning_tokens: import_zod2.z.number().nullish()

@@ -384,3 +384,3 @@ "use strict";

async doGenerate(options) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r;
const { args, warnings } = this.getArgs(options);

@@ -403,9 +403,14 @@ const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({

const choice = response.choices[0];
const providerMetadata = ((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null ? {
openai: {
reasoningTokens: (_d = (_c = response.usage) == null ? void 0 : _c.completion_tokens_details) == null ? void 0 : _d.reasoning_tokens
let providerMetadata;
if (((_b = (_a = response.usage) == null ? void 0 : _a.completion_tokens_details) == null ? void 0 : _b.reasoning_tokens) != null || ((_d = (_c = response.usage) == null ? void 0 : _c.prompt_tokens_details) == null ? void 0 : _d.cached_tokens) != null) {
providerMetadata = { openai: {} };
if (((_f = (_e = response.usage) == null ? void 0 : _e.completion_tokens_details) == null ? void 0 : _f.reasoning_tokens) != null) {
providerMetadata.openai.reasoningTokens = (_h = (_g = response.usage) == null ? void 0 : _g.completion_tokens_details) == null ? void 0 : _h.reasoning_tokens;
}
} : void 0;
if (((_j = (_i = response.usage) == null ? void 0 : _i.prompt_tokens_details) == null ? void 0 : _j.cached_tokens) != null) {
providerMetadata.openai.cachedPromptTokens = (_l = (_k = response.usage) == null ? void 0 : _k.prompt_tokens_details) == null ? void 0 : _l.cached_tokens;
}
}
return {
text: (_e = choice.message.content) != null ? _e : void 0,
text: (_m = choice.message.content) != null ? _m : void 0,
toolCalls: this.settings.useLegacyFunctionCalling && choice.message.function_call ? [

@@ -418,3 +423,3 @@ {

}
] : (_f = choice.message.tool_calls) == null ? void 0 : _f.map((toolCall) => {
] : (_n = choice.message.tool_calls) == null ? void 0 : _n.map((toolCall) => {
var _a2;

@@ -430,4 +435,4 @@ return {

usage: {
promptTokens: (_h = (_g = response.usage) == null ? void 0 : _g.prompt_tokens) != null ? _h : NaN,
completionTokens: (_j = (_i = response.usage) == null ? void 0 : _i.completion_tokens) != null ? _j : NaN
promptTokens: (_p = (_o = response.usage) == null ? void 0 : _o.prompt_tokens) != null ? _p : NaN,
completionTokens: (_r = (_q = response.usage) == null ? void 0 : _q.completion_tokens) != null ? _r : NaN
},

@@ -509,2 +514,3 @@ rawCall: { rawPrompt, rawSettings },

const { useLegacyFunctionCalling } = this.settings;
let providerMetadata;
return {

@@ -514,3 +520,3 @@ stream: response.pipeThrough(

transform(chunk, controller) {
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n;
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l, _m, _n, _o, _p;
if (!chunk.success) {

@@ -539,2 +545,9 @@ finishReason = "error";

};
if (((_c = value.usage.prompt_tokens_details) == null ? void 0 : _c.cached_tokens) != null) {
providerMetadata = {
openai: {
cachedPromptTokens: (_d = value.usage.prompt_tokens_details) == null ? void 0 : _d.cached_tokens
}
};
}
}

@@ -586,3 +599,3 @@ const choice = value.choices[0];

}
if (((_c = toolCallDelta.function) == null ? void 0 : _c.name) == null) {
if (((_e = toolCallDelta.function) == null ? void 0 : _e.name) == null) {
throw new import_provider2.InvalidResponseDataError({

@@ -598,7 +611,7 @@ data: toolCallDelta,

name: toolCallDelta.function.name,
arguments: (_d = toolCallDelta.function.arguments) != null ? _d : ""
arguments: (_f = toolCallDelta.function.arguments) != null ? _f : ""
}
};
const toolCall2 = toolCalls[index];
if (((_e = toolCall2.function) == null ? void 0 : _e.name) != null && ((_f = toolCall2.function) == null ? void 0 : _f.arguments) != null) {
if (((_g = toolCall2.function) == null ? void 0 : _g.name) != null && ((_h = toolCall2.function) == null ? void 0 : _h.arguments) != null) {
if (toolCall2.function.arguments.length > 0) {

@@ -617,3 +630,3 @@ controller.enqueue({

toolCallType: "function",
toolCallId: (_g = toolCall2.id) != null ? _g : (0, import_provider_utils3.generateId)(),
toolCallId: (_i = toolCall2.id) != null ? _i : (0, import_provider_utils3.generateId)(),
toolName: toolCall2.function.name,

@@ -627,4 +640,4 @@ args: toolCall2.function.arguments

const toolCall = toolCalls[index];
if (((_h = toolCallDelta.function) == null ? void 0 : _h.arguments) != null) {
toolCall.function.arguments += (_j = (_i = toolCallDelta.function) == null ? void 0 : _i.arguments) != null ? _j : "";
if (((_j = toolCallDelta.function) == null ? void 0 : _j.arguments) != null) {
toolCall.function.arguments += (_l = (_k = toolCallDelta.function) == null ? void 0 : _k.arguments) != null ? _l : "";
}

@@ -636,9 +649,9 @@ controller.enqueue({

toolName: toolCall.function.name,
argsTextDelta: (_k = toolCallDelta.function.arguments) != null ? _k : ""
argsTextDelta: (_m = toolCallDelta.function.arguments) != null ? _m : ""
});
if (((_l = toolCall.function) == null ? void 0 : _l.name) != null && ((_m = toolCall.function) == null ? void 0 : _m.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
if (((_n = toolCall.function) == null ? void 0 : _n.name) != null && ((_o = toolCall.function) == null ? void 0 : _o.arguments) != null && (0, import_provider_utils3.isParsableJson)(toolCall.function.arguments)) {
controller.enqueue({
type: "tool-call",
toolCallType: "function",
toolCallId: (_n = toolCall.id) != null ? _n : (0, import_provider_utils3.generateId)(),
toolCallId: (_p = toolCall.id) != null ? _p : (0, import_provider_utils3.generateId)(),
toolName: toolCall.function.name,

@@ -660,3 +673,4 @@ args: toolCall.function.arguments

completionTokens: (_b = usage.completionTokens) != null ? _b : NaN
}
},
...providerMetadata != null ? { providerMetadata } : {}
});

@@ -675,2 +689,5 @@ }

completion_tokens: import_zod2.z.number().nullish(),
prompt_tokens_details: import_zod2.z.object({
cached_tokens: import_zod2.z.number().nullish()
}).nullish(),
completion_tokens_details: import_zod2.z.object({

@@ -677,0 +694,0 @@ reasoning_tokens: import_zod2.z.number().nullish()

{
"name": "@ai-sdk/openai",
"version": "0.0.64",
"version": "0.0.65",
"license": "Apache-2.0",

@@ -5,0 +5,0 @@ "sideEffects": false,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc