@promptbook/remote-client
Advanced tools
Comparing version 0.66.0-0 to 0.66.0-1
@@ -7,3 +7,3 @@ import { io } from 'socket.io-client'; | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.65.0'; | ||
var PROMPTBOOK_VERSION = '0.66.0-0'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -10,0 +10,0 @@ |
@@ -8,2 +8,3 @@ import { PROMPTBOOK_VERSION } from '../version'; | ||
import { createAnthropicClaudeExecutionTools } from '../llm-providers/anthropic-claude/createAnthropicClaudeExecutionTools'; | ||
import { _ } from '../llm-providers/anthropic-claude/register1'; | ||
export { PROMPTBOOK_VERSION }; | ||
@@ -16,1 +17,2 @@ export { ANTHROPIC_CLAUDE_MODELS }; | ||
export { createAnthropicClaudeExecutionTools }; | ||
export { _ }; |
@@ -18,2 +18,5 @@ import { PROMPTBOOK_VERSION } from '../version'; | ||
import { RESERVED_PARAMETER_NAMES } from '../config'; | ||
import { DEFAULT_REMOTE_URL } from '../config'; | ||
import { DEFAULT_REMOTE_URL_PATH } from '../config'; | ||
import { BOILERPLATE_LLM_TOOLS_CONFIGURATION_ } from '../config'; | ||
import { pipelineJsonToString } from '../conversion/pipelineJsonToString'; | ||
@@ -49,3 +52,2 @@ import type { PipelineStringToJsonOptions } from '../conversion/pipelineStringToJson'; | ||
import { prepareKnowledgeFromMarkdown } from '../knowledge/prepare-knowledge/markdown/prepareKnowledgeFromMarkdown'; | ||
import { LLM_CONFIGURATION_BOILERPLATES } from '../llm-providers/_common/config'; | ||
import { createLlmToolsFromConfiguration } from '../llm-providers/_common/createLlmToolsFromConfiguration'; | ||
@@ -85,2 +87,5 @@ import { cacheLlmTools } from '../llm-providers/_common/utils/cache/cacheLlmTools'; | ||
export { RESERVED_PARAMETER_NAMES }; | ||
export { DEFAULT_REMOTE_URL }; | ||
export { DEFAULT_REMOTE_URL_PATH }; | ||
export { BOILERPLATE_LLM_TOOLS_CONFIGURATION_ }; | ||
export { pipelineJsonToString }; | ||
@@ -116,3 +121,2 @@ export type { PipelineStringToJsonOptions }; | ||
export { prepareKnowledgeFromMarkdown }; | ||
export { LLM_CONFIGURATION_BOILERPLATES }; | ||
export { createLlmToolsFromConfiguration }; | ||
@@ -119,0 +123,0 @@ export { cacheLlmTools }; |
@@ -0,1 +1,2 @@ | ||
import type { LlmToolsConfiguration } from './llm-providers/_common/LlmToolsConfiguration'; | ||
/** | ||
@@ -113,4 +114,25 @@ * Warning message for the generated sections and files files | ||
* | ||
* @public exported from `@promptbook/core` | ||
*/ | ||
export declare const DEFAULT_REMOTE_URL = "https://api.pavolhejny.com/"; | ||
/** | ||
* @@@ | ||
* | ||
* @public exported from `@promptbook/core` | ||
*/ | ||
export declare const DEFAULT_REMOTE_URL_PATH = "/promptbook/socket.io"; | ||
/** | ||
* @@@ | ||
* | ||
* @public exported from `@promptbook/core` | ||
*/ | ||
export declare const BOILERPLATE_LLM_TOOLS_CONFIGURATION_: LlmToolsConfiguration; | ||
/** | ||
* @@@ | ||
* | ||
* @private within the repository | ||
*/ | ||
export declare const DEBUG_ALLOW_PAYED_TESTING: boolean; | ||
/** | ||
* TODO: [π§ ][π§ββοΈ] Maybe join remoteUrl and path into single value | ||
*/ |
@@ -32,2 +32,12 @@ import type { Promisable } from 'type-fest'; | ||
/** | ||
* Check comfiguration | ||
* | ||
* @returns nothing if configuration is correct | ||
* @throws {Error} if configuration is incorrect | ||
*/ | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
listModels(): Promisable<Array<AvailableModel>>; | ||
/** | ||
* Calls a chat model | ||
@@ -44,6 +54,2 @@ */ | ||
callEmbeddingModel?(prompt: Prompt): Promise<EmbeddingPromptResult>; | ||
/** | ||
* List all available models that can be used | ||
*/ | ||
listModels(): Promisable<Array<AvailableModel>>; | ||
}; | ||
@@ -68,4 +74,4 @@ /** | ||
/** | ||
* TODO: Implement destroyable pattern to free resources | ||
* TODO: [π³] Add `callTranslationModel` | ||
* TODO: Maybe reorder `listModels` and put it befor `callChatModel`, `callCompletionModel`, `callEmbeddingModel` | ||
* TODO: [π§ ] Emulation of one type of model with another one - emuate chat with completion; emulate translation with chat | ||
@@ -72,0 +78,0 @@ * TODO: [π][β] Some heuristic to pick the best model in listed models |
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools'; | ||
import type { TODO_any } from '../../utils/organization/TODO_any'; | ||
import type { LlmToolsConfiguration } from './LlmToolsConfiguration'; | ||
/** | ||
* @public exported from `@promptbook/core` | ||
*/ | ||
export declare const LLM_CONFIGURATION_BOILERPLATES: LlmToolsConfiguration; | ||
/** | ||
* @private internal type for `createLlmToolsFromConfiguration` | ||
@@ -13,4 +8,4 @@ */ | ||
/** | ||
* TODO: [π§ ] Better file name than `config.ts` + maybe move to two separate files | ||
* TODO: !!!!!!! Make global register for this | ||
* TODO: [π§ ][π] Adding this should be responsibility of each provider package NOT this one central place | ||
*/ |
import type OpenAI from 'openai'; | ||
import type { PartialDeep } from 'type-fest'; | ||
import type { PromptResultUsage } from '../../execution/PromptResultUsage'; | ||
@@ -14,2 +15,5 @@ import type { Prompt } from '../../types/Prompt'; | ||
export declare function computeOpenaiUsage(promptContent: Prompt['content'], // <- Note: Intentionally using [] to access type properties to bring jsdoc from Prompt/PromptResult to consumer | ||
resultContent: string, rawResponse: Pick<OpenAI.Chat.Completions.ChatCompletion | OpenAI.Completions.Completion | OpenAI.Embeddings.CreateEmbeddingResponse, 'model' | 'usage'>): PromptResultUsage; | ||
resultContent: string, rawResponse: PartialDeep<Pick<OpenAI.Chat.Completions.ChatCompletion | OpenAI.Completions.Completion | OpenAI.Embeddings.CreateEmbeddingResponse, 'model' | 'usage'>>): PromptResultUsage; | ||
/** | ||
* TODO: [π€] DRY Maybe some common abstraction between `computeOpenaiUsage` and `computeAnthropicClaudeUsage` | ||
*/ |
export {}; | ||
/** | ||
* TODO: [π€] DRY Maybe some common abstraction between `computeOpenaiUsage` and `computeAnthropicClaudeUsage` | ||
*/ |
@@ -28,3 +28,3 @@ import type { CommonExecutionToolsOptions } from '../../../execution/CommonExecutionToolsOptions'; | ||
* | ||
* TODO: [π§ ] !!!! Figure out better solution | ||
* TODO: [π§ ] !!!!!! Figure out better solution | ||
*/ | ||
@@ -55,1 +55,4 @@ readonly models?: Array<AvailableModel>; | ||
}); | ||
/** | ||
* TODO: [π§ ][π§ββοΈ] Maybe join remoteUrl and path into single value | ||
*/ |
@@ -10,1 +10,4 @@ import type { really_any } from '../organization/really_any'; | ||
export declare function $getGlobalScope(): really_any; | ||
/*** | ||
* TODO: !!!!! Make private and promptbook registry from this | ||
*/ |
{ | ||
"name": "@promptbook/remote-client", | ||
"version": "0.66.0-0", | ||
"version": "0.66.0-1", | ||
"description": "Supercharge your use of large language models", | ||
@@ -50,3 +50,3 @@ "private": false, | ||
"peerDependencies": { | ||
"@promptbook/core": "0.66.0-0" | ||
"@promptbook/core": "0.66.0-1" | ||
}, | ||
@@ -53,0 +53,0 @@ "dependencies": { |
@@ -11,3 +11,3 @@ (function (global, factory) { | ||
*/ | ||
var PROMPTBOOK_VERSION = '0.65.0'; | ||
var PROMPTBOOK_VERSION = '0.66.0-0'; | ||
// TODO: !!!! List here all the versions and annotate + put into script | ||
@@ -14,0 +14,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
427450
447
8180