@promptbook/anthropic-claude
Advanced tools
Comparing version 0.61.0-27 to 0.61.0-28
@@ -778,3 +778,3 @@ import Anthropic from '@anthropic-ai/sdk'; | ||
return __awaiter(this, void 0, void 0, function () { | ||
var content, parameters, modelRequirements, modelName, rawRequest, start, complete, rawResponse, resultContent, usage; | ||
var content, parameters, modelRequirements, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, resultContent, usage; | ||
return __generator(this, function (_a) { | ||
@@ -792,2 +792,3 @@ switch (_a.label) { | ||
modelName = modelRequirements.modelName || this.getDefaultChatModel().modelName; | ||
rawPromptContent = replaceParameters(content, __assign(__assign({}, parameters), { modelName: modelName })); | ||
rawRequest = { | ||
@@ -804,3 +805,3 @@ model: modelRequirements.modelName || this.getDefaultChatModel().modelName, | ||
role: 'user', | ||
content: replaceParameters(content, __assign(__assign({}, parameters), { modelName: modelName })), | ||
content: rawPromptContent, | ||
}, | ||
@@ -842,4 +843,6 @@ ], | ||
usage: usage, | ||
rawPromptContent: rawPromptContent, | ||
rawRequest: rawRequest, | ||
rawResponse: rawResponse, | ||
// <- [๐คนโโ๏ธ] | ||
// <- [๐ฏ] | ||
}]; | ||
@@ -877,3 +880,3 @@ } | ||
...modelSettings, | ||
prompt: replaceParameters(content, { ...parameters, modelName }), | ||
prompt: rawPromptContent, | ||
user: this.options.user, | ||
@@ -917,3 +920,3 @@ }; | ||
rawResponse, | ||
// <- [๐คนโโ๏ธ] | ||
// <- [๐ฏ] | ||
}; | ||
@@ -967,3 +970,3 @@ } | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.61.0-26'; | ||
var PROMPTBOOK_VERSION = '0.61.0-27'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -970,0 +973,0 @@ |
import type { string_date_iso8601 } from '../types/typeAliases'; | ||
import type { string_model_name } from '../types/typeAliases'; | ||
import type { string_prompt } from '../types/typeAliases'; | ||
import type { TODO_object } from '../utils/organization/TODO_object'; | ||
@@ -73,3 +74,17 @@ import type { EmbeddingVector } from './EmbeddingVector'; | ||
/** | ||
* Exact text of the prompt (with all replacements) | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
readonly rawPromptContent: string_prompt; | ||
/** | ||
* Raw request to the model | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
readonly rawRequest: TODO_object | null; | ||
/** | ||
* Raw response from the model | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
@@ -76,0 +91,0 @@ readonly rawResponse: TODO_object; |
@@ -53,3 +53,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools'; | ||
* TODO: [๐] Allow to list compatible models with each variant | ||
* TODO: [๐คนโโ๏ธ] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable | ||
* TODO: [๐ฏ] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable | ||
*/ |
@@ -15,6 +15,6 @@ import type { IDestroyable } from 'destroyable'; | ||
* TODO: Handle progress - support streaming | ||
* TODO: [๐คนโโ๏ธ] Do not hang up immediately but wait until client closes OR timeout | ||
* TODO: [๐คนโโ๏ธ] Timeout on chat to free up resources | ||
* TODO: [๐ฏ] Do not hang up immediately but wait until client closes OR timeout | ||
* TODO: [๐ฏ] Timeout on chat to free up resources | ||
* TODO: [๐] Pass here some security token to prevent malitious usage and/or DDoS | ||
* TODO: [0] Set unavailable models as undefined in `RemoteLlmExecutionTools` NOT throw error here | ||
*/ |
@@ -15,4 +15,4 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson'; | ||
* TODO: [๐ง] In future one preparation can take data from previous preparation and save tokens and time | ||
* TODO: [๐] !!!!!! Use here countTotalUsage | ||
* TODO: [๐] !!!!! Use here countTotalUsage | ||
* TODO: [๐ ] Actions, instruments (and maybe knowledge) => Functions and tools | ||
*/ |
{ | ||
"name": "@promptbook/anthropic-claude", | ||
"version": "0.61.0-27", | ||
"version": "0.61.0-28", | ||
"description": "Supercharge your use of large language models", | ||
@@ -52,3 +52,3 @@ "private": false, | ||
"peerDependencies": { | ||
"@promptbook/core": "0.61.0-27" | ||
"@promptbook/core": "0.61.0-28" | ||
}, | ||
@@ -55,0 +55,0 @@ "main": "./umd/index.umd.js", |
@@ -786,3 +786,3 @@ (function (global, factory) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var content, parameters, modelRequirements, modelName, rawRequest, start, complete, rawResponse, resultContent, usage; | ||
var content, parameters, modelRequirements, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, resultContent, usage; | ||
return __generator(this, function (_a) { | ||
@@ -800,2 +800,3 @@ switch (_a.label) { | ||
modelName = modelRequirements.modelName || this.getDefaultChatModel().modelName; | ||
rawPromptContent = replaceParameters(content, __assign(__assign({}, parameters), { modelName: modelName })); | ||
rawRequest = { | ||
@@ -812,3 +813,3 @@ model: modelRequirements.modelName || this.getDefaultChatModel().modelName, | ||
role: 'user', | ||
content: replaceParameters(content, __assign(__assign({}, parameters), { modelName: modelName })), | ||
content: rawPromptContent, | ||
}, | ||
@@ -850,4 +851,6 @@ ], | ||
usage: usage, | ||
rawPromptContent: rawPromptContent, | ||
rawRequest: rawRequest, | ||
rawResponse: rawResponse, | ||
// <- [๐คนโโ๏ธ] | ||
// <- [๐ฏ] | ||
}]; | ||
@@ -885,3 +888,3 @@ } | ||
...modelSettings, | ||
prompt: replaceParameters(content, { ...parameters, modelName }), | ||
prompt: rawPromptContent, | ||
user: this.options.user, | ||
@@ -925,3 +928,3 @@ }; | ||
rawResponse, | ||
// <- [๐คนโโ๏ธ] | ||
// <- [๐ฏ] | ||
}; | ||
@@ -975,3 +978,3 @@ } | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.61.0-26'; | ||
var PROMPTBOOK_VERSION = '0.61.0-27'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -978,0 +981,0 @@ |
import type { string_date_iso8601 } from '../types/typeAliases'; | ||
import type { string_model_name } from '../types/typeAliases'; | ||
import type { string_prompt } from '../types/typeAliases'; | ||
import type { TODO_object } from '../utils/organization/TODO_object'; | ||
@@ -73,3 +74,17 @@ import type { EmbeddingVector } from './EmbeddingVector'; | ||
/** | ||
* Exact text of the prompt (with all replacements) | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
readonly rawPromptContent: string_prompt; | ||
/** | ||
* Raw request to the model | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
readonly rawRequest: TODO_object | null; | ||
/** | ||
* Raw response from the model | ||
* | ||
* Note: This contains redundant information | ||
*/ | ||
@@ -76,0 +91,0 @@ readonly rawResponse: TODO_object; |
@@ -53,3 +53,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools'; | ||
* TODO: [๐] Allow to list compatible models with each variant | ||
* TODO: [๐คนโโ๏ธ] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable | ||
* TODO: [๐ฏ] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable | ||
*/ |
@@ -15,6 +15,6 @@ import type { IDestroyable } from 'destroyable'; | ||
* TODO: Handle progress - support streaming | ||
* TODO: [๐คนโโ๏ธ] Do not hang up immediately but wait until client closes OR timeout | ||
* TODO: [๐คนโโ๏ธ] Timeout on chat to free up resources | ||
* TODO: [๐ฏ] Do not hang up immediately but wait until client closes OR timeout | ||
* TODO: [๐ฏ] Timeout on chat to free up resources | ||
* TODO: [๐] Pass here some security token to prevent malitious usage and/or DDoS | ||
* TODO: [0] Set unavailable models as undefined in `RemoteLlmExecutionTools` NOT throw error here | ||
*/ |
@@ -15,4 +15,4 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson'; | ||
* TODO: [๐ง] In future one preparation can take data from previous preparation and save tokens and time | ||
* TODO: [๐] !!!!!! Use here countTotalUsage | ||
* TODO: [๐] !!!!! Use here countTotalUsage | ||
* TODO: [๐ ] Actions, instruments (and maybe knowledge) => Functions and tools | ||
*/ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
768607
14637