@promptbook/anthropic-claude
Advanced tools
Comparing version 0.66.0-6 to 0.66.0-7
@@ -10,3 +10,3 @@ import Anthropic from '@anthropic-ai/sdk'; | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.66.0-5'; | ||
var PROMPTBOOK_VERSION = '0.66.0-6'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -852,8 +852,6 @@ | ||
this.options = options; | ||
// Note: Passing only Anthropic Claude relevant options to Anthropic constructor | ||
var anthropicOptions = __assign({}, options); | ||
delete anthropicOptions.isVerbose; | ||
delete anthropicOptions.isProxied; | ||
this.client = new Anthropic(anthropicOptions); | ||
// <- TODO: !!!!!! Lazy-load client | ||
/** | ||
* Anthropic Claude API client. | ||
*/ | ||
this.client = null; | ||
} | ||
@@ -874,3 +872,38 @@ Object.defineProperty(AnthropicClaudeExecutionTools.prototype, "title", { | ||
}); | ||
AnthropicClaudeExecutionTools.prototype.getClient = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var anthropicOptions; | ||
return __generator(this, function (_a) { | ||
if (this.client === null) { | ||
anthropicOptions = __assign({}, this.options); | ||
delete anthropicOptions.isVerbose; | ||
delete anthropicOptions.isProxied; | ||
this.client = new Anthropic(anthropicOptions); | ||
} | ||
return [2 /*return*/, this.client]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* Check the `options` passed to `constructor` | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.checkConfiguration = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getClient()]; | ||
case 1: | ||
_a.sent(); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); | ||
}; | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.listModels = function () { | ||
return ANTHROPIC_CLAUDE_MODELS; | ||
}; | ||
/** | ||
* Calls Anthropic Claude API to use a chat model. | ||
@@ -880,3 +913,3 @@ */ | ||
return __awaiter(this, void 0, void 0, function () { | ||
var content, parameters, modelRequirements, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, contentBlock, resultContent, usage; | ||
var content, parameters, modelRequirements, client, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, contentBlock, resultContent, usage; | ||
return __generator(this, function (_a) { | ||
@@ -889,2 +922,5 @@ switch (_a.label) { | ||
content = prompt.content, parameters = prompt.parameters, modelRequirements = prompt.modelRequirements; | ||
return [4 /*yield*/, this.getClient()]; | ||
case 1: | ||
client = _a.sent(); | ||
// TODO: [☂] Use here more modelRequirements | ||
@@ -916,4 +952,4 @@ if (modelRequirements.modelVariant !== 'CHAT') { | ||
} | ||
return [4 /*yield*/, this.client.messages.create(rawRequest)]; | ||
case 1: | ||
return [4 /*yield*/, client.messages.create(rawRequest)]; | ||
case 2: | ||
rawResponse = _a.sent(); | ||
@@ -1049,9 +1085,2 @@ if (this.options.isVerbose) { | ||
}; | ||
// <- Note: [🤖] getDefaultXxxModel | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.listModels = function () { | ||
return ANTHROPIC_CLAUDE_MODELS; | ||
}; | ||
return AnthropicClaudeExecutionTools; | ||
@@ -1098,2 +1127,25 @@ }()); | ||
/** | ||
* Check the configuration of all execution tools | ||
*/ | ||
RemoteLlmExecutionTools.prototype.checkConfiguration = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
RemoteLlmExecutionTools.prototype.listModels = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/, (this.options.models || | ||
[ | ||
/* !!!!!! */ | ||
])]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* Creates a connection to the remote proxy server. | ||
@@ -1192,15 +1244,2 @@ */ | ||
}; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
RemoteLlmExecutionTools.prototype.listModels = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/, (this.options.models || | ||
[ | ||
/* !!! */ | ||
])]; | ||
}); | ||
}); | ||
}; | ||
return RemoteLlmExecutionTools; | ||
@@ -1258,5 +1297,13 @@ }()); | ||
Register.prototype.register = function (registered) { | ||
// !!!!!! <- TODO: What to return here | ||
// TODO: !!!!!! Compare if same is not already registered | ||
this.storage.push(registered); | ||
// <- TODO: What to return here | ||
var packageName = registered.packageName, className = registered.className; | ||
var existingRegistrationIndex = this.storage.findIndex(function (item) { return item.packageName === packageName && item.className === className; }); | ||
var existingRegistration = this.storage[existingRegistrationIndex]; | ||
if (existingRegistration) { | ||
console.warn("!!!!!! Re-registering ".concat(packageName, ".").concat(className, " again")); | ||
this.storage[existingRegistrationIndex] = registered; | ||
} | ||
else { | ||
this.storage.push(registered); | ||
} | ||
}; | ||
@@ -1263,0 +1310,0 @@ return Register; |
import { PROMPTBOOK_VERSION } from '../version'; | ||
import { __CLI } from '../cli/main'; | ||
import { _CLI } from '../cli/main'; | ||
import { _AnthropicClaudeMetadataRegistration } from '../llm-providers/anthropic-claude/register-configuration'; | ||
import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration'; | ||
export { PROMPTBOOK_VERSION }; | ||
export { __CLI }; | ||
export { _CLI }; | ||
export { _AnthropicClaudeMetadataRegistration }; | ||
export { _OpenAiMetadataRegistration }; |
@@ -7,4 +7,4 @@ import { promptbookCli } from './promptbookCli'; | ||
*/ | ||
export declare const __CLI: { | ||
__initialize: typeof promptbookCli; | ||
export declare const _CLI: { | ||
_initialize: typeof promptbookCli; | ||
}; | ||
@@ -11,0 +11,0 @@ /** |
@@ -36,2 +36,3 @@ import type { Promisable } from 'type-fest'; | ||
*/ | ||
checkConfiguration(): Promisable<void>; | ||
/** | ||
@@ -38,0 +39,0 @@ * List all available models that can be used |
@@ -1,1 +0,1 @@ | ||
import '../../../_packages/core.index'; | ||
export {}; |
@@ -1,4 +0,4 @@ | ||
import '../../../_packages/core.index'; | ||
export {}; | ||
/** | ||
* TODO: [📓] Maybe test all file in samples (not just 10-simple.md) | ||
*/ |
@@ -1,1 +0,1 @@ | ||
import '../../../_packages/core.index'; | ||
export {}; |
@@ -0,1 +1,2 @@ | ||
import '../../_packages/core.index'; | ||
import type { CreateLlmToolsFromConfigurationOptions } from './createLlmToolsFromConfiguration'; | ||
@@ -2,0 +3,0 @@ import type { LlmExecutionToolsWithTotalUsage } from './utils/count-total-usage/LlmExecutionToolsWithTotalUsage'; |
@@ -20,3 +20,3 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
*/ | ||
private readonly client; | ||
private client; | ||
/** | ||
@@ -30,3 +30,12 @@ * Creates Anthropic Claude Execution Tools. | ||
get description(): string_markdown; | ||
private getClient; | ||
/** | ||
* Check the `options` passed to `constructor` | ||
*/ | ||
checkConfiguration(): Promise<void>; | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
/** | ||
* Calls Anthropic Claude API to use a chat model. | ||
@@ -43,6 +52,2 @@ */ | ||
private getDefaultChatModel; | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
} | ||
@@ -49,0 +54,0 @@ /** |
@@ -20,3 +20,3 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
*/ | ||
private readonly client; | ||
private client; | ||
/** | ||
@@ -30,3 +30,12 @@ * Creates OpenAI Execution Tools. | ||
get description(): string_markdown; | ||
private getClient; | ||
/** | ||
* Check the `options` passed to `constructor` | ||
*/ | ||
checkConfiguration(): Promise<void>; | ||
/** | ||
* List all available Azure OpenAI models that can be used | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
/** | ||
* Calls OpenAI API to use a chat model. | ||
@@ -43,6 +52,2 @@ */ | ||
private transformAzureError; | ||
/** | ||
* List all available Azure OpenAI models that can be used | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
} | ||
@@ -49,0 +54,0 @@ /** |
@@ -21,2 +21,10 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
/** | ||
* Does nothing, just to implement the interface | ||
*/ | ||
checkConfiguration(): void; | ||
/** | ||
* List all available mocked-models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
/** | ||
* Mocks chat model | ||
@@ -29,6 +37,2 @@ */ | ||
callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<CompletionPromptResult>; | ||
/** | ||
* List all available mocked-models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
} | ||
@@ -35,0 +39,0 @@ /** |
@@ -22,2 +22,10 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
/** | ||
* Does nothing, just to implement the interface | ||
*/ | ||
checkConfiguration(): void; | ||
/** | ||
* List all available fake-models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
/** | ||
* Fakes chat model | ||
@@ -34,6 +42,2 @@ */ | ||
callEmbeddingModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessing'>): Promise<EmbeddingPromptResult>; | ||
/** | ||
* List all available fake-models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
} | ||
@@ -40,0 +44,0 @@ /** |
@@ -32,2 +32,11 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
/** | ||
* Check the configuration of all execution tools | ||
*/ | ||
checkConfiguration(): Promise<void>; | ||
/** | ||
* List all available models that can be used | ||
* This lists is a combination of all available models from all execution tools | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
/** | ||
* Calls the best available chat model | ||
@@ -50,7 +59,2 @@ */ | ||
callCommonModel(prompt: Prompt): Promise<PromptResult>; | ||
/** | ||
* List all available models that can be used | ||
* This lists is a combination of all available models from all execution tools | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
} | ||
@@ -57,0 +61,0 @@ /** |
@@ -21,3 +21,3 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
*/ | ||
private readonly client; | ||
private client; | ||
/** | ||
@@ -31,3 +31,12 @@ * Creates OpenAI Execution Tools. | ||
get description(): string_markdown; | ||
private getClient; | ||
/** | ||
* Check the `options` passed to `constructor` | ||
*/ | ||
checkConfiguration(): Promise<void>; | ||
/** | ||
* List all available OpenAI models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
/** | ||
* Calls OpenAI API to use a chat model. | ||
@@ -60,6 +69,2 @@ */ | ||
private getDefaultEmbeddingModel; | ||
/** | ||
* List all available OpenAI models that can be used | ||
*/ | ||
listModels(): Array<AvailableModel>; | ||
} | ||
@@ -66,0 +71,0 @@ /** |
@@ -28,2 +28,10 @@ import type { AvailableModel } from '../../execution/AvailableModel'; | ||
/** | ||
* Check the configuration of all execution tools | ||
*/ | ||
checkConfiguration(): Promise<void>; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
/** | ||
* Creates a connection to the remote proxy server. | ||
@@ -48,6 +56,2 @@ */ | ||
private callCommonModel; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
listModels(): Promise<Array<AvailableModel>>; | ||
} | ||
@@ -54,0 +58,0 @@ /** |
@@ -1,1 +0,1 @@ | ||
import '../../src/_packages/core.index'; | ||
export {}; |
{ | ||
"name": "@promptbook/anthropic-claude", | ||
"version": "0.66.0-6", | ||
"version": "0.66.0-7", | ||
"description": "Supercharge your use of large language models", | ||
@@ -50,3 +50,3 @@ "private": false, | ||
"peerDependencies": { | ||
"@promptbook/core": "0.66.0-6" | ||
"@promptbook/core": "0.66.0-7" | ||
}, | ||
@@ -53,0 +53,0 @@ "dependencies": { |
@@ -17,3 +17,3 @@ (function (global, factory) { | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.66.0-5'; | ||
var PROMPTBOOK_VERSION = '0.66.0-6'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -859,8 +859,6 @@ | ||
this.options = options; | ||
// Note: Passing only Anthropic Claude relevant options to Anthropic constructor | ||
var anthropicOptions = __assign({}, options); | ||
delete anthropicOptions.isVerbose; | ||
delete anthropicOptions.isProxied; | ||
this.client = new Anthropic__default["default"](anthropicOptions); | ||
// <- TODO: !!!!!! Lazy-load client | ||
/** | ||
* Anthropic Claude API client. | ||
*/ | ||
this.client = null; | ||
} | ||
@@ -881,3 +879,38 @@ Object.defineProperty(AnthropicClaudeExecutionTools.prototype, "title", { | ||
}); | ||
AnthropicClaudeExecutionTools.prototype.getClient = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var anthropicOptions; | ||
return __generator(this, function (_a) { | ||
if (this.client === null) { | ||
anthropicOptions = __assign({}, this.options); | ||
delete anthropicOptions.isVerbose; | ||
delete anthropicOptions.isProxied; | ||
this.client = new Anthropic__default["default"](anthropicOptions); | ||
} | ||
return [2 /*return*/, this.client]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* Check the `options` passed to `constructor` | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.checkConfiguration = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getClient()]; | ||
case 1: | ||
_a.sent(); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); | ||
}; | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.listModels = function () { | ||
return ANTHROPIC_CLAUDE_MODELS; | ||
}; | ||
/** | ||
* Calls Anthropic Claude API to use a chat model. | ||
@@ -887,3 +920,3 @@ */ | ||
return __awaiter(this, void 0, void 0, function () { | ||
var content, parameters, modelRequirements, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, contentBlock, resultContent, usage; | ||
var content, parameters, modelRequirements, client, modelName, rawPromptContent, rawRequest, start, complete, rawResponse, contentBlock, resultContent, usage; | ||
return __generator(this, function (_a) { | ||
@@ -896,2 +929,5 @@ switch (_a.label) { | ||
content = prompt.content, parameters = prompt.parameters, modelRequirements = prompt.modelRequirements; | ||
return [4 /*yield*/, this.getClient()]; | ||
case 1: | ||
client = _a.sent(); | ||
// TODO: [☂] Use here more modelRequirements | ||
@@ -923,4 +959,4 @@ if (modelRequirements.modelVariant !== 'CHAT') { | ||
} | ||
return [4 /*yield*/, this.client.messages.create(rawRequest)]; | ||
case 1: | ||
return [4 /*yield*/, client.messages.create(rawRequest)]; | ||
case 2: | ||
rawResponse = _a.sent(); | ||
@@ -1056,9 +1092,2 @@ if (this.options.isVerbose) { | ||
}; | ||
// <- Note: [🤖] getDefaultXxxModel | ||
/** | ||
* List all available Anthropic Claude models that can be used | ||
*/ | ||
AnthropicClaudeExecutionTools.prototype.listModels = function () { | ||
return ANTHROPIC_CLAUDE_MODELS; | ||
}; | ||
return AnthropicClaudeExecutionTools; | ||
@@ -1105,2 +1134,25 @@ }()); | ||
/** | ||
* Check the configuration of all execution tools | ||
*/ | ||
RemoteLlmExecutionTools.prototype.checkConfiguration = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
RemoteLlmExecutionTools.prototype.listModels = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/, (this.options.models || | ||
[ | ||
/* !!!!!! */ | ||
])]; | ||
}); | ||
}); | ||
}; | ||
/** | ||
* Creates a connection to the remote proxy server. | ||
@@ -1199,15 +1251,2 @@ */ | ||
}; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
RemoteLlmExecutionTools.prototype.listModels = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
return [2 /*return*/, (this.options.models || | ||
[ | ||
/* !!! */ | ||
])]; | ||
}); | ||
}); | ||
}; | ||
return RemoteLlmExecutionTools; | ||
@@ -1265,5 +1304,13 @@ }()); | ||
Register.prototype.register = function (registered) { | ||
// !!!!!! <- TODO: What to return here | ||
// TODO: !!!!!! Compare if same is not already registered | ||
this.storage.push(registered); | ||
// <- TODO: What to return here | ||
var packageName = registered.packageName, className = registered.className; | ||
var existingRegistrationIndex = this.storage.findIndex(function (item) { return item.packageName === packageName && item.className === className; }); | ||
var existingRegistration = this.storage[existingRegistrationIndex]; | ||
if (existingRegistration) { | ||
console.warn("!!!!!! Re-registering ".concat(packageName, ".").concat(className, " again")); | ||
this.storage[existingRegistrationIndex] = registered; | ||
} | ||
else { | ||
this.storage.push(registered); | ||
} | ||
}; | ||
@@ -1270,0 +1317,0 @@ return Register; |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
573823
10489