@promptbook/azure-openai
Advanced tools
Comparing version 0.55.0-1 to 0.55.0-2
@@ -458,2 +458,16 @@ import { OpenAIClient, AzureKeyCredential } from '@azure/openai'; | ||
/** | ||
* Make UncertainNumber | ||
* | ||
* @param value | ||
* | ||
* @private utility for initializating UncertainNumber | ||
*/ | ||
function uncertainNumber(value) { | ||
if (value === null || value === undefined || Number.isNaN(NaN)) { | ||
return { value: 0, isUncertain: true }; | ||
} | ||
return { value: value }; | ||
} | ||
/** | ||
* Function computeUsage will create price per one token based on the string value found on openai page | ||
@@ -868,5 +882,5 @@ * | ||
usage = { | ||
price: { value: 0, isUncertain: true /* uncertainNumber */ } /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: { value: ((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: { value: ((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
price: uncertainNumber() /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: uncertainNumber((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) }, computeUsageCounts(prompt.content)), | ||
}; | ||
@@ -944,5 +958,5 @@ if (!resultContent) { | ||
usage = { | ||
price: { value: 0, isUncertain: true /* uncertainNumber */ } /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: { value: ((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: { value: ((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
price: uncertainNumber() /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: uncertainNumber((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) }, computeUsageCounts(prompt.content)), | ||
}; | ||
@@ -949,0 +963,0 @@ if (!resultContent) { |
@@ -14,3 +14,2 @@ import { prettifyPromptbookString } from '../conversion/prettify/prettifyPromptbookString'; | ||
import { UnexpectedError } from '../errors/UnexpectedError'; | ||
import { addPromptResultUsage } from '../execution/addPromptResultUsage'; | ||
import { assertsExecutionSuccessful } from '../execution/assertsExecutionSuccessful'; | ||
@@ -22,3 +21,5 @@ import { createPromptbookExecutor } from '../execution/createPromptbookExecutor'; | ||
import { SimplePromptInterfaceTools } from '../execution/plugins/user-interface-execution-tools/simple-prompt/SimplePromptInterfaceTools'; | ||
import { addUsage } from '../execution/utils/addUsage'; | ||
import { checkExpectations, isPassingExpectations } from '../execution/utils/checkExpectations'; | ||
import { usageToWorktime } from '../execution/utils/usageToWorktime'; | ||
import { createPromptbookLibraryFromDirectory } from '../library/constructors/createPromptbookLibraryFromDirectory'; | ||
@@ -36,3 +37,3 @@ import { createPromptbookLibraryFromSources } from '../library/constructors/createPromptbookLibraryFromSources'; | ||
export { ExecutionTypes, PROMPTBOOK_VERSION }; | ||
export { addPromptResultUsage, assertsExecutionSuccessful, checkExpectations, executionReportJsonToString, ExecutionReportStringOptions, ExecutionReportStringOptionsDefaults, isPassingExpectations, prettifyPromptbookString, }; | ||
export { addUsage, assertsExecutionSuccessful, checkExpectations, executionReportJsonToString, ExecutionReportStringOptions, ExecutionReportStringOptionsDefaults, isPassingExpectations, prettifyPromptbookString, usageToWorktime, }; | ||
export { createPromptbookLibraryFromDirectory, createPromptbookLibraryFromSources, createPromptbookLibraryFromUrl, createPromptbookSublibrary, SimplePromptbookLibrary, }; | ||
@@ -39,0 +40,0 @@ export { SimplePromptInterfaceTools }; |
@@ -5,2 +5,3 @@ import type { Promisable } from 'type-fest'; | ||
import type { string_name } from '../types/typeAliases'; | ||
import type { PromptResultUsage } from './PromptResult'; | ||
/** | ||
@@ -17,11 +18,15 @@ * Executor is a simple async function that takes INPUT PARAMETERs and returns result parameters _(along with all intermediate parameters and INPUT PARAMETERs = it extends input object)_. | ||
/** | ||
* Whether the execution was successful | ||
* Whether the execution was successful, details are aviable in `executionReport` | ||
*/ | ||
isSuccessful: boolean; | ||
/** | ||
* Errors that occured during the execution | ||
* Added usage of whole execution, detailed usage is aviable in `executionReport` | ||
*/ | ||
usage: PromptResultUsage; | ||
/** | ||
* Errors that occured during the execution, details are aviable in `executionReport` | ||
*/ | ||
errors: Array<Error>; | ||
/** | ||
* The report of the execution | ||
* The report of the execution with all details | ||
*/ | ||
@@ -28,0 +33,0 @@ executionReport: ExecutionReportJson; |
{ | ||
"name": "@promptbook/azure-openai", | ||
"version": "0.55.0-1", | ||
"version": "0.55.0-2", | ||
"description": "Library to supercharge your use of large language models", | ||
@@ -51,3 +51,3 @@ "private": false, | ||
"peerDependencies": { | ||
"@promptbook/core": "0.55.0-1" | ||
"@promptbook/core": "0.55.0-2" | ||
}, | ||
@@ -54,0 +54,0 @@ "main": "./umd/index.umd.js", |
@@ -465,2 +465,16 @@ (function (global, factory) { | ||
/** | ||
* Make UncertainNumber | ||
* | ||
* @param value | ||
* | ||
* @private utility for initializating UncertainNumber | ||
*/ | ||
function uncertainNumber(value) { | ||
if (value === null || value === undefined || Number.isNaN(NaN)) { | ||
return { value: 0, isUncertain: true }; | ||
} | ||
return { value: value }; | ||
} | ||
/** | ||
* Function computeUsage will create price per one token based on the string value found on openai page | ||
@@ -875,5 +889,5 @@ * | ||
usage = { | ||
price: { value: 0, isUncertain: true /* uncertainNumber */ } /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: { value: ((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: { value: ((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
price: uncertainNumber() /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: uncertainNumber((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) }, computeUsageCounts(prompt.content)), | ||
}; | ||
@@ -951,5 +965,5 @@ if (!resultContent) { | ||
usage = { | ||
price: { value: 0, isUncertain: true /* uncertainNumber */ } /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: { value: ((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: { value: ((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) || 0 /* uncertainNumber */ } }, computeUsageCounts(prompt.content)), | ||
price: uncertainNumber() /* <- TODO: [๐] Compute usage */, | ||
input: __assign({ tokensCount: uncertainNumber((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.promptTokens) }, computeUsageCounts(prompt.content)), | ||
output: __assign({ tokensCount: uncertainNumber((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completionTokens) }, computeUsageCounts(prompt.content)), | ||
}; | ||
@@ -956,0 +970,0 @@ if (!resultContent) { |
@@ -14,3 +14,2 @@ import { prettifyPromptbookString } from '../conversion/prettify/prettifyPromptbookString'; | ||
import { UnexpectedError } from '../errors/UnexpectedError'; | ||
import { addPromptResultUsage } from '../execution/addPromptResultUsage'; | ||
import { assertsExecutionSuccessful } from '../execution/assertsExecutionSuccessful'; | ||
@@ -22,3 +21,5 @@ import { createPromptbookExecutor } from '../execution/createPromptbookExecutor'; | ||
import { SimplePromptInterfaceTools } from '../execution/plugins/user-interface-execution-tools/simple-prompt/SimplePromptInterfaceTools'; | ||
import { addUsage } from '../execution/utils/addUsage'; | ||
import { checkExpectations, isPassingExpectations } from '../execution/utils/checkExpectations'; | ||
import { usageToWorktime } from '../execution/utils/usageToWorktime'; | ||
import { createPromptbookLibraryFromDirectory } from '../library/constructors/createPromptbookLibraryFromDirectory'; | ||
@@ -36,3 +37,3 @@ import { createPromptbookLibraryFromSources } from '../library/constructors/createPromptbookLibraryFromSources'; | ||
export { ExecutionTypes, PROMPTBOOK_VERSION }; | ||
export { addPromptResultUsage, assertsExecutionSuccessful, checkExpectations, executionReportJsonToString, ExecutionReportStringOptions, ExecutionReportStringOptionsDefaults, isPassingExpectations, prettifyPromptbookString, }; | ||
export { addUsage, assertsExecutionSuccessful, checkExpectations, executionReportJsonToString, ExecutionReportStringOptions, ExecutionReportStringOptionsDefaults, isPassingExpectations, prettifyPromptbookString, usageToWorktime, }; | ||
export { createPromptbookLibraryFromDirectory, createPromptbookLibraryFromSources, createPromptbookLibraryFromUrl, createPromptbookSublibrary, SimplePromptbookLibrary, }; | ||
@@ -39,0 +40,0 @@ export { SimplePromptInterfaceTools }; |
@@ -5,2 +5,3 @@ import type { Promisable } from 'type-fest'; | ||
import type { string_name } from '../types/typeAliases'; | ||
import type { PromptResultUsage } from './PromptResult'; | ||
/** | ||
@@ -17,11 +18,15 @@ * Executor is a simple async function that takes INPUT PARAMETERs and returns result parameters _(along with all intermediate parameters and INPUT PARAMETERs = it extends input object)_. | ||
/** | ||
* Whether the execution was successful | ||
* Whether the execution was successful, details are aviable in `executionReport` | ||
*/ | ||
isSuccessful: boolean; | ||
/** | ||
* Errors that occured during the execution | ||
* Added usage of whole execution, detailed usage is aviable in `executionReport` | ||
*/ | ||
usage: PromptResultUsage; | ||
/** | ||
* Errors that occured during the execution, details are aviable in `executionReport` | ||
*/ | ||
errors: Array<Error>; | ||
/** | ||
* The report of the execution | ||
* The report of the execution with all details | ||
*/ | ||
@@ -28,0 +33,0 @@ executionReport: ExecutionReportJson; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
569235
532
9667