@ai-sdk/openai
Advanced tools
Comparing version 0.0.5 to 0.0.6
@@ -154,2 +154,15 @@ "use strict"; | ||
// src/map-openai-chat-logprobs.ts | ||
function mapOpenAIChatLogProbsOutput(logprobs) { | ||
var _a, _b; | ||
return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({ | ||
token, | ||
logprob, | ||
topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({ | ||
token: token2, | ||
logprob: logprob2 | ||
})) : [] | ||
}))) != null ? _b : void 0; | ||
} | ||
// src/openai-chat-language-model.ts | ||
@@ -184,2 +197,4 @@ var OpenAIChatLanguageModel = class { | ||
logit_bias: this.settings.logitBias, | ||
logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number", | ||
top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0, | ||
user: this.settings.user, | ||
@@ -247,3 +262,3 @@ // standardized settings: | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils3.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
url: `${this.config.baseURL}/chat/completions`, | ||
@@ -274,3 +289,5 @@ headers: this.config.headers(), | ||
rawCall: { rawPrompt, rawSettings }, | ||
warnings: [] | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [], | ||
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs) | ||
}; | ||
@@ -280,3 +297,3 @@ } | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils3.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
url: `${this.config.baseURL}/chat/completions`, | ||
@@ -301,2 +318,3 @@ headers: this.config.headers(), | ||
}; | ||
let logprobs; | ||
return { | ||
@@ -332,2 +350,10 @@ stream: response.pipeThrough( | ||
} | ||
const mappedLogprobs = mapOpenAIChatLogProbsOutput( | ||
choice == null ? void 0 : choice.logprobs | ||
); | ||
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) { | ||
if (logprobs === void 0) | ||
logprobs = []; | ||
logprobs.push(...mappedLogprobs); | ||
} | ||
if (delta.tool_calls != null) { | ||
@@ -390,3 +416,8 @@ for (const toolCallDelta of delta.tool_calls) { | ||
flush(controller) { | ||
controller.enqueue({ type: "finish", finishReason, usage }); | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason, | ||
logprobs, | ||
usage | ||
}); | ||
} | ||
@@ -396,2 +427,3 @@ }) | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -419,2 +451,16 @@ }; | ||
index: import_zod2.z.number(), | ||
logprobs: import_zod2.z.object({ | ||
content: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number(), | ||
top_logprobs: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number() | ||
}) | ||
) | ||
}) | ||
).nullable() | ||
}).nullable().optional(), | ||
finish_reason: import_zod2.z.string().optional().nullable() | ||
@@ -452,2 +498,16 @@ }) | ||
}), | ||
logprobs: import_zod2.z.object({ | ||
content: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number(), | ||
top_logprobs: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number() | ||
}) | ||
) | ||
}) | ||
).nullable() | ||
}).nullable().optional(), | ||
finish_reason: import_zod2.z.string().nullable().optional(), | ||
@@ -552,2 +612,16 @@ index: import_zod2.z.number() | ||
// src/map-openai-completion-logprobs.ts | ||
function mapOpenAICompletionLogProbs(logprobs) { | ||
return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({ | ||
token, | ||
logprob: logprobs.token_logprobs[index], | ||
topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map( | ||
([token2, logprob]) => ({ | ||
token: token2, | ||
logprob | ||
}) | ||
) : [] | ||
})); | ||
} | ||
// src/openai-completion-language-model.ts | ||
@@ -585,2 +659,3 @@ var OpenAICompletionLanguageModel = class { | ||
logit_bias: this.settings.logitBias, | ||
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0, | ||
suffix: this.settings.suffix, | ||
@@ -632,3 +707,3 @@ user: this.settings.user, | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils4.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({ | ||
url: `${this.config.baseURL}/completions`, | ||
@@ -652,3 +727,5 @@ headers: this.config.headers(), | ||
finishReason: mapOpenAIFinishReason(choice.finish_reason), | ||
logprobs: mapOpenAICompletionLogProbs(choice.logprobs), | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -659,3 +736,3 @@ }; | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils4.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({ | ||
url: `${this.config.baseURL}/completions`, | ||
@@ -679,2 +756,3 @@ headers: this.config.headers(), | ||
}; | ||
let logprobs; | ||
return { | ||
@@ -705,5 +783,18 @@ stream: response.pipeThrough( | ||
} | ||
const mappedLogprobs = mapOpenAICompletionLogProbs( | ||
choice == null ? void 0 : choice.logprobs | ||
); | ||
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) { | ||
if (logprobs === void 0) | ||
logprobs = []; | ||
logprobs.push(...mappedLogprobs); | ||
} | ||
}, | ||
flush(controller) { | ||
controller.enqueue({ type: "finish", finishReason, usage }); | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason, | ||
logprobs, | ||
usage | ||
}); | ||
} | ||
@@ -713,2 +804,3 @@ }) | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -722,3 +814,8 @@ }; | ||
text: import_zod3.z.string(), | ||
finish_reason: import_zod3.z.string() | ||
finish_reason: import_zod3.z.string(), | ||
logprobs: import_zod3.z.object({ | ||
tokens: import_zod3.z.array(import_zod3.z.string()), | ||
token_logprobs: import_zod3.z.array(import_zod3.z.number()), | ||
top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable() | ||
}).nullable().optional() | ||
}) | ||
@@ -737,3 +834,8 @@ ), | ||
finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(), | ||
index: import_zod3.z.number() | ||
index: import_zod3.z.number(), | ||
logprobs: import_zod3.z.object({ | ||
tokens: import_zod3.z.array(import_zod3.z.string()), | ||
token_logprobs: import_zod3.z.array(import_zod3.z.number()), | ||
top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable() | ||
}).nullable().optional() | ||
}) | ||
@@ -740,0 +842,0 @@ ), |
@@ -21,2 +21,14 @@ import { LanguageModelV1 } from '@ai-sdk/provider'; | ||
/** | ||
* The log probabilities of the tokens. Including logprobs will increase | ||
* the response size and can slow down response times. However, it can | ||
* be useful for developers to better understand how the model is behaving. | ||
* | ||
* Setting to true will return the log probabilities of the tokens that | ||
* were generated. | ||
* | ||
* Setting to a number will return the log probabilities of the top n | ||
* tokens that were generated. | ||
*/ | ||
logprobs?: boolean | number; | ||
/** | ||
* A unique identifier representing your end-user, which can help OpenAI to | ||
@@ -68,2 +80,10 @@ * monitor and detect abuse. Learn more. | ||
/** | ||
* The number of logprobs to return. | ||
* | ||
* Including logprobs will increase the response size and can slow down | ||
* response times. However, it can be useful for developers to better | ||
* understand how the model is behaving. | ||
*/ | ||
logprobs?: boolean | number; | ||
/** | ||
* The suffix that comes after a completion of inserted text. | ||
@@ -70,0 +90,0 @@ */ |
@@ -154,2 +154,15 @@ "use strict"; | ||
// src/map-openai-chat-logprobs.ts | ||
function mapOpenAIChatLogProbsOutput(logprobs) { | ||
var _a, _b; | ||
return (_b = (_a = logprobs == null ? void 0 : logprobs.content) == null ? void 0 : _a.map(({ token, logprob, top_logprobs }) => ({ | ||
token, | ||
logprob, | ||
topLogprobs: top_logprobs ? top_logprobs.map(({ token: token2, logprob: logprob2 }) => ({ | ||
token: token2, | ||
logprob: logprob2 | ||
})) : [] | ||
}))) != null ? _b : void 0; | ||
} | ||
// src/openai-chat-language-model.ts | ||
@@ -184,2 +197,4 @@ var OpenAIChatLanguageModel = class { | ||
logit_bias: this.settings.logitBias, | ||
logprobs: this.settings.logprobs === true || typeof this.settings.logprobs === "number", | ||
top_logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0, | ||
user: this.settings.user, | ||
@@ -247,3 +262,3 @@ // standardized settings: | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils3.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
url: `${this.config.baseURL}/chat/completions`, | ||
@@ -274,3 +289,5 @@ headers: this.config.headers(), | ||
rawCall: { rawPrompt, rawSettings }, | ||
warnings: [] | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [], | ||
logprobs: mapOpenAIChatLogProbsOutput(choice.logprobs) | ||
}; | ||
@@ -280,3 +297,3 @@ } | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils3.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({ | ||
url: `${this.config.baseURL}/chat/completions`, | ||
@@ -301,2 +318,3 @@ headers: this.config.headers(), | ||
}; | ||
let logprobs; | ||
return { | ||
@@ -332,2 +350,10 @@ stream: response.pipeThrough( | ||
} | ||
const mappedLogprobs = mapOpenAIChatLogProbsOutput( | ||
choice == null ? void 0 : choice.logprobs | ||
); | ||
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) { | ||
if (logprobs === void 0) | ||
logprobs = []; | ||
logprobs.push(...mappedLogprobs); | ||
} | ||
if (delta.tool_calls != null) { | ||
@@ -390,3 +416,8 @@ for (const toolCallDelta of delta.tool_calls) { | ||
flush(controller) { | ||
controller.enqueue({ type: "finish", finishReason, usage }); | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason, | ||
logprobs, | ||
usage | ||
}); | ||
} | ||
@@ -396,2 +427,3 @@ }) | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -419,2 +451,16 @@ }; | ||
index: import_zod2.z.number(), | ||
logprobs: import_zod2.z.object({ | ||
content: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number(), | ||
top_logprobs: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number() | ||
}) | ||
) | ||
}) | ||
).nullable() | ||
}).nullable().optional(), | ||
finish_reason: import_zod2.z.string().optional().nullable() | ||
@@ -452,2 +498,16 @@ }) | ||
}), | ||
logprobs: import_zod2.z.object({ | ||
content: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number(), | ||
top_logprobs: import_zod2.z.array( | ||
import_zod2.z.object({ | ||
token: import_zod2.z.string(), | ||
logprob: import_zod2.z.number() | ||
}) | ||
) | ||
}) | ||
).nullable() | ||
}).nullable().optional(), | ||
finish_reason: import_zod2.z.string().nullable().optional(), | ||
@@ -552,2 +612,16 @@ index: import_zod2.z.number() | ||
// src/map-openai-completion-logprobs.ts | ||
function mapOpenAICompletionLogProbs(logprobs) { | ||
return logprobs == null ? void 0 : logprobs.tokens.map((token, index) => ({ | ||
token, | ||
logprob: logprobs.token_logprobs[index], | ||
topLogprobs: logprobs.top_logprobs ? Object.entries(logprobs.top_logprobs[index]).map( | ||
([token2, logprob]) => ({ | ||
token: token2, | ||
logprob | ||
}) | ||
) : [] | ||
})); | ||
} | ||
// src/openai-completion-language-model.ts | ||
@@ -585,2 +659,3 @@ var OpenAICompletionLanguageModel = class { | ||
logit_bias: this.settings.logitBias, | ||
logprobs: typeof this.settings.logprobs === "number" ? this.settings.logprobs : typeof this.settings.logprobs === "boolean" ? this.settings.logprobs ? 0 : void 0 : void 0, | ||
suffix: this.settings.suffix, | ||
@@ -632,3 +707,3 @@ user: this.settings.user, | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils4.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({ | ||
url: `${this.config.baseURL}/completions`, | ||
@@ -652,3 +727,5 @@ headers: this.config.headers(), | ||
finishReason: mapOpenAIFinishReason(choice.finish_reason), | ||
logprobs: mapOpenAICompletionLogProbs(choice.logprobs), | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -659,3 +736,3 @@ }; | ||
const args = this.getArgs(options); | ||
const response = await (0, import_provider_utils4.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({ | ||
url: `${this.config.baseURL}/completions`, | ||
@@ -679,2 +756,3 @@ headers: this.config.headers(), | ||
}; | ||
let logprobs; | ||
return { | ||
@@ -705,5 +783,18 @@ stream: response.pipeThrough( | ||
} | ||
const mappedLogprobs = mapOpenAICompletionLogProbs( | ||
choice == null ? void 0 : choice.logprobs | ||
); | ||
if (mappedLogprobs == null ? void 0 : mappedLogprobs.length) { | ||
if (logprobs === void 0) | ||
logprobs = []; | ||
logprobs.push(...mappedLogprobs); | ||
} | ||
}, | ||
flush(controller) { | ||
controller.enqueue({ type: "finish", finishReason, usage }); | ||
controller.enqueue({ | ||
type: "finish", | ||
finishReason, | ||
logprobs, | ||
usage | ||
}); | ||
} | ||
@@ -713,2 +804,3 @@ }) | ||
rawCall: { rawPrompt, rawSettings }, | ||
rawResponse: { headers: responseHeaders }, | ||
warnings: [] | ||
@@ -722,3 +814,8 @@ }; | ||
text: import_zod3.z.string(), | ||
finish_reason: import_zod3.z.string() | ||
finish_reason: import_zod3.z.string(), | ||
logprobs: import_zod3.z.object({ | ||
tokens: import_zod3.z.array(import_zod3.z.string()), | ||
token_logprobs: import_zod3.z.array(import_zod3.z.number()), | ||
top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable() | ||
}).nullable().optional() | ||
}) | ||
@@ -737,3 +834,8 @@ ), | ||
finish_reason: import_zod3.z.enum(["stop", "length", "content_filter"]).optional().nullable(), | ||
index: import_zod3.z.number() | ||
index: import_zod3.z.number(), | ||
logprobs: import_zod3.z.object({ | ||
tokens: import_zod3.z.array(import_zod3.z.string()), | ||
token_logprobs: import_zod3.z.array(import_zod3.z.number()), | ||
top_logprobs: import_zod3.z.array(import_zod3.z.record(import_zod3.z.string(), import_zod3.z.number())).nullable() | ||
}).nullable().optional() | ||
}) | ||
@@ -740,0 +842,0 @@ ), |
{ | ||
"name": "@ai-sdk/openai", | ||
"version": "0.0.5", | ||
"version": "0.0.6", | ||
"license": "Apache-2.0", | ||
@@ -21,4 +21,4 @@ "sideEffects": false, | ||
"dependencies": { | ||
"@ai-sdk/provider": "0.0.1", | ||
"@ai-sdk/provider-utils": "0.0.2" | ||
"@ai-sdk/provider": "0.0.2", | ||
"@ai-sdk/provider-utils": "0.0.3" | ||
}, | ||
@@ -25,0 +25,0 @@ "devDependencies": { |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
212583
2808
+ Added@ai-sdk/provider@0.0.2(transitive)
+ Added@ai-sdk/provider-utils@0.0.3(transitive)
- Removed@ai-sdk/provider@0.0.1(transitive)
- Removed@ai-sdk/provider-utils@0.0.2(transitive)
Updated@ai-sdk/provider@0.0.2
Updated@ai-sdk/provider-utils@0.0.3