Socket
Socket
Sign inDemoInstall

@promptbook/remote-client

Package Overview
Dependencies
Maintainers
0
Versions
401
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@promptbook/remote-client - npm Package Compare versions

Comparing version 0.66.0 to 0.67.0-0

esm/typings/src/errors/index.d.ts

194

esm/index.es.js
import { io } from 'socket.io-client';
import { spaceTrim } from 'spacetrim';

@@ -7,3 +8,3 @@ // ⚠️ WARNING: This code has been generated so that any manual changes will be overwritten

*/
var PROMPTBOOK_VERSION = '0.66.0-9';
var PROMPTBOOK_VERSION = '0.66.0';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -81,2 +82,98 @@

/**
* This error indicates that the pipeline collection cannot be propperly loaded
*
* @public exported from `@promptbook/core`
*/
var CollectionError = /** @class */ (function (_super) {
__extends(CollectionError, _super);
function CollectionError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'CollectionError';
Object.setPrototypeOf(_this, CollectionError.prototype);
return _this;
}
return CollectionError;
}(Error));
/**
* This error type indicates that you try to use a feature that is not available in the current environment
*
* @public exported from `@promptbook/core`
*/
var EnvironmentMismatchError = /** @class */ (function (_super) {
__extends(EnvironmentMismatchError, _super);
function EnvironmentMismatchError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'EnvironmentMismatchError';
Object.setPrototypeOf(_this, EnvironmentMismatchError.prototype);
return _this;
}
return EnvironmentMismatchError;
}(Error));
/**
* This error type indicates that some limit was reached
*
* @public exported from `@promptbook/core`
*/
var LimitReachedError = /** @class */ (function (_super) {
__extends(LimitReachedError, _super);
function LimitReachedError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'LimitReachedError';
Object.setPrototypeOf(_this, LimitReachedError.prototype);
return _this;
}
return LimitReachedError;
}(Error));
/**
* This error indicates that promptbook not found in the collection
*
* @public exported from `@promptbook/core`
*/
var NotFoundError = /** @class */ (function (_super) {
__extends(NotFoundError, _super);
function NotFoundError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'NotFoundError';
Object.setPrototypeOf(_this, NotFoundError.prototype);
return _this;
}
return NotFoundError;
}(Error));
/**
* This error type indicates that some part of the code is not implemented yet
*
* @public exported from `@promptbook/core`
*/
var NotYetImplementedError = /** @class */ (function (_super) {
__extends(NotYetImplementedError, _super);
function NotYetImplementedError(message) {
var _this = _super.call(this, spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This feature is not implemented yet but it will be soon.\n\n If you want speed up the implementation or just read more, look here:\n https://github.com/webgptorg/promptbook\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
_this.name = 'NotYetImplementedError';
Object.setPrototypeOf(_this, NotYetImplementedError.prototype);
return _this;
}
return NotYetImplementedError;
}(Error));
/**
* This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
*
* @public exported from `@promptbook/core`
*/
var ParsingError = /** @class */ (function (_super) {
__extends(ParsingError, _super);
function ParsingError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'ParsingError';
Object.setPrototypeOf(_this, ParsingError.prototype);
return _this;
}
return ParsingError;
}(Error));
/**
* This error indicates errors during the execution of the pipeline

@@ -98,2 +195,82 @@ *

/**
* This error indicates that the promptbook object has valid syntax but contains logical errors (like circular dependencies)
*
* @public exported from `@promptbook/core`
*/
var PipelineLogicError = /** @class */ (function (_super) {
__extends(PipelineLogicError, _super);
function PipelineLogicError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'PipelineLogicError';
Object.setPrototypeOf(_this, PipelineLogicError.prototype);
return _this;
}
return PipelineLogicError;
}(Error));
/**
* This error indicates errors in referencing promptbooks between each other
*
* @public exported from `@promptbook/core`
*/
var PipelineUrlError = /** @class */ (function (_super) {
__extends(PipelineUrlError, _super);
function PipelineUrlError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'PipelineUrlError';
Object.setPrototypeOf(_this, PipelineUrlError.prototype);
return _this;
}
return PipelineUrlError;
}(Error));
/**
* This error type indicates that the error should not happen and its last check before crashing with some other error
*
* @public exported from `@promptbook/core`
*/
var UnexpectedError = /** @class */ (function (_super) {
__extends(UnexpectedError, _super);
function UnexpectedError(message) {
var _this = _super.call(this, spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This error should not happen.\n It's probbably a bug in the pipeline collection\n\n Please report issue:\n https://github.com/webgptorg/promptbook/issues\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
_this.name = 'UnexpectedError';
Object.setPrototypeOf(_this, UnexpectedError.prototype);
return _this;
}
return UnexpectedError;
}(Error));
/**
* Index of all custom errors
*
* @public exported from `@promptbook/core`
*/
var ERRORS = {
CollectionError: CollectionError,
EnvironmentMismatchError: EnvironmentMismatchError,
LimitReachedError: LimitReachedError,
NotFoundError: NotFoundError,
NotYetImplementedError: NotYetImplementedError,
ParsingError: ParsingError,
PipelineExecutionError: PipelineExecutionError,
PipelineLogicError: PipelineLogicError,
PipelineUrlError: PipelineUrlError,
UnexpectedError: UnexpectedError,
// TODO: [🪑]> VersionMismatchError,
};
/**
* Deserializes the error object
*
* @public exported from `@promptbook/utils`
*/
function deserializeError(error) {
if (error.name === 'Error') {
return new Error(error.message);
}
var CustomError = ERRORS[error.name];
return new CustomError(error.message);
}
/**
* Remote server is a proxy server that uses its execution tools internally and exposes the executor interface externally.

@@ -158,3 +335,3 @@ *

llmToolsConfiguration: this.options.llmToolsConfiguration,
});
} /* <- TODO: [🤛] */);
}

@@ -165,3 +342,3 @@ else {

clientId: this.options.clientId,
});
} /* <- TODO: [🤛] */);
}

@@ -174,3 +351,3 @@ return [4 /*yield*/, new Promise(function (resolve, reject) {

socket.on('error', function (error) {
reject(new Error(error.errorMessage));
reject(deserializeError(error));
socket.disconnect();

@@ -254,4 +431,3 @@ });

prompt: prompt,
// <- TODO: [🛫] `prompt` is NOT fully serializable as JSON, it contains functions which are not serializable
});
} /* <- TODO: [🤛] */);
}

@@ -263,4 +439,3 @@ else {

prompt: prompt,
// <- TODO: [🛫] `prompt` is NOT fully serializable as JSON, it contains functions which are not serializable
});
} /* <- TODO: [🤛] */);
}

@@ -273,3 +448,3 @@ return [4 /*yield*/, new Promise(function (resolve, reject) {

socket.on('error', function (error) {
reject(new PipelineExecutionError(error.errorMessage));
reject(deserializeError(error));
socket.disconnect();

@@ -289,2 +464,3 @@ });

/**
* TODO: Maybe use `$asDeeplyFrozenSerializableJson`
* TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`

@@ -291,0 +467,0 @@ * TODO: [🍓] Allow to list compatible models with each variant

8

esm/typings/src/_packages/core.index.d.ts

@@ -30,2 +30,3 @@ import { PROMPTBOOK_VERSION } from '../version';

import { EnvironmentMismatchError } from '../errors/EnvironmentMismatchError';
import { ERRORS } from '../errors/index';
import { LimitReachedError } from '../errors/LimitReachedError';

@@ -37,5 +38,4 @@ import { NotFoundError } from '../errors/NotFoundError';

import { PipelineLogicError } from '../errors/PipelineLogicError';
import { ReferenceError } from '../errors/ReferenceError';
import { PipelineUrlError } from '../errors/PipelineUrlError';
import { UnexpectedError } from '../errors/UnexpectedError';
import { VersionMismatchError } from '../errors/VersionMismatchError';
import { assertsExecutionSuccessful } from '../execution/assertsExecutionSuccessful';

@@ -104,2 +104,3 @@ import { createPipelineExecutor } from '../execution/createPipelineExecutor';

export { EnvironmentMismatchError };
export { ERRORS };
export { LimitReachedError };

@@ -111,5 +112,4 @@ export { NotFoundError };

export { PipelineLogicError };
export { ReferenceError };
export { PipelineUrlError };
export { UnexpectedError };
export { VersionMismatchError };
export { assertsExecutionSuccessful };

@@ -116,0 +116,0 @@ export { createPipelineExecutor };

@@ -13,2 +13,3 @@ import type { PipelineCollection } from '../collection/PipelineCollection';

import type { renderPipelineMermaidOptions } from '../conversion/prettify/renderPipelineMermaidOptions';
import type { ErrorJson } from '../errors/utils/ErrorJson';
import type { AvailableModel } from '../execution/AvailableModel';

@@ -21,3 +22,3 @@ import type { CommonExecutionToolsOptions } from '../execution/CommonExecutionToolsOptions';

import type { PipelineExecutor } from '../execution/PipelineExecutor';
import type { PipelineExecutorResult } from '../execution/PipelineExecutor';
import type { PipelineExecutorResult } from '../execution/PipelineExecutorResult';
import type { PromptResult } from '../execution/PromptResult';

@@ -68,2 +69,3 @@ import type { CompletionPromptResult } from '../execution/PromptResult';

import type { FilesStorageOptions } from '../storage/files-storage/FilesStorageOptions';
import type { ExecutionPromptReportJson } from '../types/execution-report/ExecutionPromptReportJson';
import type { ExecutionReportJson } from '../types/execution-report/ExecutionReportJson';

@@ -183,2 +185,3 @@ import type { ExecutionReportString } from '../types/execution-report/ExecutionReportString';

import type { string_javascript_name } from '../types/typeAliases';
import type { string_postprocessing_function_name } from '../types/typeAliases';
import type { string_token } from '../types/typeAliases';

@@ -240,2 +243,3 @@ import type { string_license_token } from '../types/typeAliases';

export type { renderPipelineMermaidOptions };
export type { ErrorJson };
export type { AvailableModel };

@@ -294,2 +298,3 @@ export type { CommonExecutionToolsOptions };

export type { FilesStorageOptions };
export type { ExecutionPromptReportJson };
export type { ExecutionReportJson };

@@ -409,2 +414,3 @@ export type { ExecutionReportString };

export type { string_javascript_name };
export type { string_postprocessing_function_name };
export type { string_token };

@@ -411,0 +417,0 @@ export type { string_license_token };

@@ -7,2 +7,4 @@ import { PROMPTBOOK_VERSION } from '../version';

import { titleToName } from '../conversion/utils/titleToName';
import { deserializeError } from '../errors/utils/deserializeError';
import { serializeError } from '../errors/utils/serializeError';
import { forEachAsync } from '../execution/utils/forEachAsync';

@@ -12,6 +14,2 @@ import { isValidJsonString } from '../formats/json/utils/isValidJsonString';

import { $currentDate } from '../utils/$currentDate';
import { clonePipeline } from '../utils/clonePipeline';
import { deepClone } from '../utils/deepClone';
import { $deepFreeze } from '../utils/deepFreeze';
import { $getGlobalScope } from '../utils/environment/$getGlobalScope';
import { $isRunningInBrowser } from '../utils/environment/$isRunningInBrowser';

@@ -56,2 +54,7 @@ import { $isRunningInNode } from '../utils/environment/$isRunningInNode';

import { replaceParameters } from '../utils/replaceParameters';
import { $deepFreeze } from '../utils/serialization/$deepFreeze';
import { checkSerializableAsJson } from '../utils/serialization/checkSerializableAsJson';
import { clonePipeline } from '../utils/serialization/clonePipeline';
import { deepClone } from '../utils/serialization/deepClone';
import { isSerializableAsJson } from '../utils/serialization/isSerializableAsJson';
import { difference } from '../utils/sets/difference';

@@ -79,2 +82,4 @@ import { intersection } from '../utils/sets/intersection';

export { titleToName };
export { deserializeError };
export { serializeError };
export { forEachAsync };

@@ -84,6 +89,2 @@ export { isValidJsonString };

export { $currentDate };
export { clonePipeline };
export { deepClone };
export { $deepFreeze };
export { $getGlobalScope };
export { $isRunningInBrowser };

@@ -128,2 +129,7 @@ export { $isRunningInNode };

export { replaceParameters };
export { $deepFreeze };
export { checkSerializableAsJson };
export { clonePipeline };
export { deepClone };
export { isSerializableAsJson };
export { difference };

@@ -130,0 +136,0 @@ export { intersection };

/**
* Represents a command that expects a specific format.
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -4,0 +6,0 @@ export type ExpectFormatCommand = {

import type { PipelineExecutor } from './PipelineExecutor';
/**
* Asserts that the execution of a promptnook is successful
* Asserts that the execution of a Promptbook is successful
*
* @param executionResult - The partial result of the promptnook execution
* @param executionResult - The partial result of the Promptbook execution
* @throws {PipelineExecutionError} If the execution is not successful or if multiple errors occurred

@@ -7,0 +7,0 @@ * @public exported from `@promptbook/core`

@@ -14,3 +14,3 @@ import type { Arrayable } from '../types/Arrayable';

*
* Tip: Combine multiple LLM execution tools - use array of LlmExecutionTools instead of single LlmExecutionTools
* Tip: Use `createLlmToolsFromEnv()` to use all available LLM providers you configured
* @see https://github.com/webgptorg/promptbook/?tab=readme-ov-file#llm-execution-tools

@@ -24,3 +24,6 @@ */

* If none of them supports the script, an error is thrown
* Tip: Use here `new JavascriptExecutionTools()`
*
* @see https://github.com/webgptorg/promptbook/?tab=readme-ov-file#script-execution-tools
* @default [] - If not provided, no script execution will be possible
*/

@@ -27,0 +30,0 @@ script?: Arrayable<ScriptExecutionTools>;

import type { Promisable } from 'type-fest';
import type { PipelineJson } from '../types/PipelineJson/PipelineJson';
import { PipelineExecutionError } from '../errors/PipelineExecutionError';
import type { TaskProgress } from '../types/TaskProgress';
import type { ExecutionReportJson } from '../types/execution-report/ExecutionReportJson';
import type { Parameters } from '../types/typeAliases';
import type { PromptResultUsage } from './PromptResultUsage';
import type { PipelineExecutorResult } from './PipelineExecutorResult';
/**

@@ -14,4 +11,2 @@ * Executor is a simple async function that takes INPUT PARAMETERs and returns result parameters _(along with all intermediate parameters and INPUT PARAMETERs = it extends input object)_.

*
* @@@ almost-JSON (what about errors)
*
* @see https://github.com/webgptorg/promptbook#executor

@@ -23,44 +18,3 @@ */

/**
* @@@
*
* @@@ almost-JSON (what about errors)
*/
export type PipelineExecutorResult = {
/**
* Result parameters of the execution
*
* Note: If the execution was not successful, there are only some of the result parameters
*/
readonly outputParameters: Parameters;
/**
* Whether the execution was successful, details are aviable in `executionReport`
*/
readonly isSuccessful: boolean;
/**
* Added usage of whole execution, detailed usage is aviable in `executionReport`
*/
readonly usage: PromptResultUsage;
/**
* Errors that occured during the execution, details are aviable in `executionReport`
*/
readonly errors: Array<PipelineExecutionError | Error>;
/**
* Warnings that occured during the execution, details are aviable in `executionReport`
*/
readonly warnings: Array<PipelineExecutionError | Error>;
/**
* The report of the execution with all details
*/
readonly executionReport: ExecutionReportJson;
/**
* The prepared pipeline that was used for the execution
*
* Note: If you called `createPipelineExecutor` with fully prepared pipeline, this is the same object as this pipeline
* If you passed not fully prepared pipeline, this is same pipeline but fully prepared
*/
readonly preparedPipeline: PipelineJson;
};
/**
* TODO: [🧠] Should this file be in /execution or /types folder?
* TODO: [💷] `assertsExecutionSuccessful` should be the method of `PipelineExecutor` result - BUT maybe NOT?
*/

@@ -18,3 +18,3 @@ import type { string_date_iso8601 } from '../types/typeAliases';

* Note:It contains only the newly generated text NOT the whole completion
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -25,3 +25,3 @@ export type CompletionPromptResult = CommonPromptResult;

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -32,3 +32,3 @@ export type ChatPromptResult = CommonPromptResult & {};

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -44,3 +44,3 @@ export type EmbeddingPromptResult = Omit<CommonPromptResult, 'content'> & {

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -97,2 +97,3 @@ export type CommonPromptResult = {

/**
* TODO: !!!!!! [🚉] Check each provider that rawResponse is fully serializable as JSON
* TODO: [🧠] Maybe timing more accurate then seconds?

@@ -99,0 +100,0 @@ * TODO: [🧠] Should here be link to the prompt?

@@ -6,2 +6,4 @@ import type { KebabCase } from 'type-fest';

* Usage statistics for one or many prompt results
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -26,2 +28,4 @@ export type PromptResultUsage = {

* Record of all possible measurable units
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -28,0 +32,0 @@ export type PromptResultUsageCounts = Record<`${KebabCase<'TOKENS' | ExpectationUnit>}Count`, UncertainNumber>;

@@ -6,2 +6,3 @@ import type { number_positive } from '../types/typeAliases';

*
* Note: [🚉] This is fully serializable as JSON
* Note: If the value is completelly unknown, the value 0 and isUncertain is true

@@ -8,0 +9,0 @@ * Note: Not using NaN or null because it looses the value which is better to be uncertain then not to be at all

@@ -28,3 +28,2 @@ import type { PromptResult } from '../../../../execution/PromptResult';

* TODO: [🧠] Should be this exported alongsite `cacheLlmTools` through `@promptbook/utils` OR through `@promptbook/types`
* TODO: [🛫] `prompt` is NOT fully serializable as JSON, it contains functions which are not serializable, fix it
*/

@@ -1,3 +0,3 @@

import type { PostprocessingFunction } from '../../scripting/javascript/JavascriptExecutionToolsOptions';
import type { Expectations } from '../../types/PipelineJson/Expectations';
import type { string_postprocessing_function_name } from '../../types/typeAliases';
/**

@@ -12,5 +12,5 @@ * Gets the expectations and creates a fake text that meets the expectations

*/
export declare function $fakeTextToExpectations(expectations: Expectations, postprocessing?: Array<PostprocessingFunction>): Promise<string>;
export declare function $fakeTextToExpectations(expectations: Expectations, postprocessingFunctionNames?: Array<string_postprocessing_function_name>): Promise<string>;
/**
* TODO: [💝] Unite object for expecting amount and format - use here also a format
*/

@@ -32,11 +32,11 @@ import type { AvailableModel } from '../../execution/AvailableModel';

*/
callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessing'>): Promise<ChatPromptResult & CompletionPromptResult>;
callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessingFunctionNames'>): Promise<ChatPromptResult & CompletionPromptResult>;
/**
* Fakes completion model
*/
callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessing'>): Promise<CompletionPromptResult>;
callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessingFunctionNames'>): Promise<CompletionPromptResult>;
/**
* Fakes embedding model
*/
callEmbeddingModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessing'>): Promise<EmbeddingPromptResult>;
callEmbeddingModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'expectations' | 'postprocessingFunctionNames'>): Promise<EmbeddingPromptResult>;
}

@@ -43,0 +43,0 @@ /**

@@ -0,1 +1,2 @@

import type { ErrorJson } from '../../../errors/utils/ErrorJson';
/**

@@ -6,7 +7,2 @@ * Socket.io error for remote text generation

*/
export type PromptbookServer_Error = {
/**
* The error message which caused the error
*/
readonly errorMessage: string;
};
export type PromptbookServer_Error = ErrorJson;

@@ -57,2 +57,3 @@ import type { AvailableModel } from '../../execution/AvailableModel';

/**
* TODO: Maybe use `$asDeeplyFrozenSerializableJson`
* TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`

@@ -59,0 +60,0 @@ * TODO: [🍓] Allow to list compatible models with each variant

@@ -14,2 +14,3 @@ import type { IDestroyable } from 'destroyable';

/**
* TODO: Maybe use `$asDeeplyFrozenSerializableJson`
* TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`

@@ -16,0 +17,0 @@ * TODO: [⚖] Expose the collection to be able to connect to same collection via createCollectionFromUrl

import type { Promisable } from 'type-fest';
import type { CommonExecutionToolsOptions } from '../../execution/CommonExecutionToolsOptions';
import type { string_javascript_name } from '../../types/typeAliases';
import type { string_postprocessing_function_name } from '../../types/typeAliases';
/**

@@ -18,3 +18,3 @@ * Options for `JavascriptExecutionTools`

*/
functions?: Record<string_javascript_name, PostprocessingFunction>;
functions?: Record<string_postprocessing_function_name, PostprocessingFunction>;
};

@@ -21,0 +21,0 @@ /**

@@ -23,3 +23,3 @@ import { Promisable } from 'type-fest';

/**
* TODO: [🧠][🛫] Constrain `TItem` to JSON-serializable objects only
* TODO: [💺] Constrain `TItem` to [🚉] JSON-serializable objects only in type level
*/

@@ -1,6 +0,5 @@

import type { PromptResult } from '../../execution/PromptResult';
import type { Prompt } from '../Prompt';
import type { string_markdown_text } from '../typeAliases';
import type { string_pipeline_url } from '../typeAliases';
import type { string_semantic_version } from '../typeAliases';
import type { ExecutionPromptReportJson } from './ExecutionPromptReportJson';
/**

@@ -14,2 +13,3 @@ * ExecutionReport is result of executing one promptbook

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook#execution-report

@@ -41,20 +41,3 @@ */

*/
readonly promptExecutions: Array<{
/**
* The prompt wich was executed
*/
readonly prompt: Omit<Prompt, 'pipelineUrl'>;
/**
* Result of the prompt execution (if not failed during LLM execution)
*/
readonly result?: PromptResult;
/**
* The error which occured during LLM execution or during postprocessing or expectation checking
*
* Note: It makes sense to have both error and result defined, for example when the result not pass expectations
*/
readonly error?: {
readonly message: string;
};
}>;
readonly promptExecutions: Array<ExecutionPromptReportJson>;
};

@@ -10,3 +10,3 @@ import type { ModelVariant } from './ModelVariant';

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook#model-requirements

@@ -18,3 +18,3 @@ */

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -30,3 +30,3 @@ export type CompletionModelRequirements = CommonModelRequirements & {

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -46,3 +46,3 @@ export type ChatModelRequirements = CommonModelRequirements & {

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -58,3 +58,3 @@ export type EmbeddingModelRequirements = CommonModelRequirements & {

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -61,0 +61,0 @@ export type CommonModelRequirements = {

@@ -10,3 +10,3 @@ import type { TupleToUnion } from 'type-fest';

* Note: Expectations are performed after all postprocessing steps
*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/30

@@ -21,2 +21,3 @@ */

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/30

@@ -35,2 +36,3 @@ */

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/30

@@ -37,0 +39,0 @@ */

@@ -14,2 +14,4 @@ import type { EmbeddingVector } from '../../execution/EmbeddingVector';

*
* Note: [🚉] This is fully serializable as JSON
*
* @see https://github.com/webgptorg/promptbook/discussions/41

@@ -16,0 +18,0 @@ */

@@ -8,2 +8,4 @@ import type { number_id } from '../typeAliases';

*
* Note: [🚉] This is fully serializable as JSON
*
* @see https://github.com/webgptorg/promptbook/discussions/41

@@ -24,2 +26,4 @@ */

*
* Note: [🚉] This is fully serializable as JSON
*
* @see https://github.com/webgptorg/promptbook/discussions/41

@@ -26,0 +30,0 @@ */

@@ -6,2 +6,4 @@ import type { ModelRequirements } from '../ModelRequirements';

* Template for prompt to LLM
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -8,0 +10,0 @@ export type LlmTemplateJson = PromptTemplateJsonCommon & {

@@ -8,2 +8,4 @@ import type { ChatModelRequirements } from '../ModelRequirements';

*
* Note: [🚉] This is fully serializable as JSON
*
* @see https://github.com/webgptorg/promptbook/discussions/22

@@ -26,2 +28,4 @@ */

*
* Note: [🚉] This is fully serializable as JSON
*
* @see https://github.com/webgptorg/promptbook/discussions/22

@@ -28,0 +32,0 @@ */

@@ -17,2 +17,4 @@ import type { string_file_path } from '../typeAliases';

*
* Note: [🚉] This is fully serializable as JSON
*
* @see @@@ https://github.com/webgptorg/promptbook#promptbook

@@ -19,0 +21,0 @@ */

@@ -5,2 +5,3 @@ import type { PromptTemplateJsonCommon } from './PromptTemplateJsonCommon';

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/76

@@ -7,0 +8,0 @@ */

@@ -8,2 +8,4 @@ import type { ___ } from '../../utils/organization/___';

* Describes one prompt template in the promptbook
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -10,0 +12,0 @@ export type PromptTemplateJson = LlmTemplateJson | SimpleTemplateJson | ScriptJson | PromptDialogJson | ___ | ___ | ___ | ___;

import type { BlockType } from '../../commands/BLOCK/BlockTypes';
import type { ExpectFormatCommand } from '../../commands/EXPECT/ExpectFormatCommand';
import type { string_javascript } from '../typeAliases';
import type { string_javascript_name } from '../typeAliases';
import type { string_markdown } from '../typeAliases';

@@ -9,2 +8,3 @@ import type { string_markdown_text } from '../typeAliases';

import type { string_parameter_name } from '../typeAliases';
import type { string_postprocessing_function_name } from '../typeAliases';
import type { string_prompt } from '../typeAliases';

@@ -70,3 +70,3 @@ import type { string_template } from '../typeAliases';

*/
readonly postprocessingFunctionNames?: Array<string_javascript_name>;
readonly postprocessingFunctionNames?: Array<string_postprocessing_function_name>;
/**

@@ -73,0 +73,0 @@ * Expect this amount of each unit in the answer

@@ -6,2 +6,4 @@ import type { string_markdown_text } from '../typeAliases';

* Describes one parameter of the promptbook
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -8,0 +10,0 @@ export type PromptTemplateParameterJson = {

@@ -6,2 +6,3 @@ import type { ScriptLanguage } from '../ScriptLanguage';

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/77

@@ -8,0 +9,0 @@ */

@@ -5,2 +5,3 @@ import type { PromptTemplateJsonCommon } from './PromptTemplateJsonCommon';

*
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook/discussions/17

@@ -7,0 +8,0 @@ */

import type { ExpectFormatCommand } from '../commands/EXPECT/ExpectFormatCommand';
import type { PostprocessingFunction } from '../scripting/javascript/JavascriptExecutionToolsOptions';
import type { ChatModelRequirements } from './ModelRequirements';

@@ -10,2 +9,3 @@ import type { CompletionModelRequirements } from './ModelRequirements';

import type { string_pipeline_url_with_hashtemplate } from './typeAliases';
import type { string_postprocessing_function_name } from './typeAliases';
import type { string_prompt } from './typeAliases';

@@ -17,3 +17,3 @@ import type { string_template } from './typeAliases';

*
* Note: [🛫] This is NOT fully serializable as JSON, it contains functions which are not serializable
* Note: [🚉] This is fully serializable as JSON
* @see https://github.com/webgptorg/promptbook#prompt

@@ -25,3 +25,3 @@ */

*
* Note: [🛫] This is NOT fully serializable as JSON, it contains functions which are not serializable
* Note: [🚉] This is fully serializable as JSON
*/

@@ -37,3 +37,3 @@ export type CompletionPrompt = CommonPrompt & {

*
* Note: [🛫] This is NOT fully serializable as JSON, it contains functions which are not serializable
* Note: [🚉] This is fully serializable as JSON
*/

@@ -49,3 +49,3 @@ export type ChatPrompt = CommonPrompt & {

*
* Note: [🛫] This is NOT fully serializable as JSON, it contains functions which are not serializable
* Note: [🚉] This is fully serializable as JSON
*/

@@ -61,3 +61,3 @@ export type EmbeddingPrompt = CommonPrompt & {

*
* Note: This is fully serializable as JSON
* Note: [🚉] This is fully serializable as JSON
*/

@@ -84,3 +84,3 @@ export type CommonPrompt = {

*/
readonly postprocessing?: Array<PostprocessingFunction>;
readonly postprocessingFunctionNames?: Array<string_postprocessing_function_name>;
/**

@@ -87,0 +87,0 @@ * Expectations for the answer

import type { TupleToUnion } from 'type-fest';
/**
* Script language
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -5,0 +7,0 @@ export type ScriptLanguage = TupleToUnion<typeof SUPPORTED_SCRIPT_LANGUAGES>;

@@ -106,2 +106,3 @@ import type { TupleToUnion } from 'type-fest';

*
* Note: [🚉] This is fully serializable as JSON
* @see https://ptbk.io/parameters

@@ -119,2 +120,4 @@ */

* @@@
*
* Note: [🚉] This is fully serializable as JSON
*/

@@ -519,2 +522,6 @@ export type ReservedParameters = Record<string_reserved_parameter_name, string_parameter_value>;

export type string_javascript_name = string;
/**
* Semantic helper; For example "unwrapResult" or "spaceTrim"
*/
export type string_postprocessing_function_name = string;
export type string_token = string;

@@ -521,0 +528,0 @@ export type string_license_token = string_token;

@@ -7,7 +7,4 @@ import type { really_any } from '../organization/really_any';

*
* @public exported from `@promptbook/utils`
* @private internal function of `$Register`
*/
export declare function $getGlobalScope(): really_any;
/***
* TODO: !!!!! Make private and promptbook registry from this
*/
{
"name": "@promptbook/remote-client",
"version": "0.66.0",
"version": "0.67.0-0",
"description": "Supercharge your use of large language models",

@@ -50,7 +50,8 @@ "private": false,

"peerDependencies": {
"@promptbook/core": "0.66.0"
"@promptbook/core": "0.67.0-0"
},
"dependencies": {
"socket.io-client": "4.7.2"
"socket.io-client": "4.7.2",
"spacetrim": "0.11.39"
}
}
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('socket.io-client')) :
typeof define === 'function' && define.amd ? define(['exports', 'socket.io-client'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["promptbook-remote-client"] = {}, global.socket_ioClient));
})(this, (function (exports, socket_ioClient) { 'use strict';
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('socket.io-client'), require('spacetrim')) :
typeof define === 'function' && define.amd ? define(['exports', 'socket.io-client', 'spacetrim'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global["promptbook-remote-client"] = {}, global.socket_ioClient, global.spacetrim));
})(this, (function (exports, socket_ioClient, spacetrim) { 'use strict';

@@ -11,3 +11,3 @@ // ⚠️ WARNING: This code has been generated so that any manual changes will be overwritten

*/
var PROMPTBOOK_VERSION = '0.66.0-9';
var PROMPTBOOK_VERSION = '0.66.0';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -85,2 +85,98 @@

/**
* This error indicates that the pipeline collection cannot be propperly loaded
*
* @public exported from `@promptbook/core`
*/
var CollectionError = /** @class */ (function (_super) {
__extends(CollectionError, _super);
function CollectionError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'CollectionError';
Object.setPrototypeOf(_this, CollectionError.prototype);
return _this;
}
return CollectionError;
}(Error));
/**
* This error type indicates that you try to use a feature that is not available in the current environment
*
* @public exported from `@promptbook/core`
*/
var EnvironmentMismatchError = /** @class */ (function (_super) {
__extends(EnvironmentMismatchError, _super);
function EnvironmentMismatchError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'EnvironmentMismatchError';
Object.setPrototypeOf(_this, EnvironmentMismatchError.prototype);
return _this;
}
return EnvironmentMismatchError;
}(Error));
/**
* This error type indicates that some limit was reached
*
* @public exported from `@promptbook/core`
*/
var LimitReachedError = /** @class */ (function (_super) {
__extends(LimitReachedError, _super);
function LimitReachedError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'LimitReachedError';
Object.setPrototypeOf(_this, LimitReachedError.prototype);
return _this;
}
return LimitReachedError;
}(Error));
/**
* This error indicates that promptbook not found in the collection
*
* @public exported from `@promptbook/core`
*/
var NotFoundError = /** @class */ (function (_super) {
__extends(NotFoundError, _super);
function NotFoundError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'NotFoundError';
Object.setPrototypeOf(_this, NotFoundError.prototype);
return _this;
}
return NotFoundError;
}(Error));
/**
* This error type indicates that some part of the code is not implemented yet
*
* @public exported from `@promptbook/core`
*/
var NotYetImplementedError = /** @class */ (function (_super) {
__extends(NotYetImplementedError, _super);
function NotYetImplementedError(message) {
var _this = _super.call(this, spacetrim.spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This feature is not implemented yet but it will be soon.\n\n If you want speed up the implementation or just read more, look here:\n https://github.com/webgptorg/promptbook\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
_this.name = 'NotYetImplementedError';
Object.setPrototypeOf(_this, NotYetImplementedError.prototype);
return _this;
}
return NotYetImplementedError;
}(Error));
/**
* This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
*
* @public exported from `@promptbook/core`
*/
var ParsingError = /** @class */ (function (_super) {
__extends(ParsingError, _super);
function ParsingError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'ParsingError';
Object.setPrototypeOf(_this, ParsingError.prototype);
return _this;
}
return ParsingError;
}(Error));
/**
* This error indicates errors during the execution of the pipeline

@@ -102,2 +198,82 @@ *

/**
* This error indicates that the promptbook object has valid syntax but contains logical errors (like circular dependencies)
*
* @public exported from `@promptbook/core`
*/
var PipelineLogicError = /** @class */ (function (_super) {
__extends(PipelineLogicError, _super);
function PipelineLogicError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'PipelineLogicError';
Object.setPrototypeOf(_this, PipelineLogicError.prototype);
return _this;
}
return PipelineLogicError;
}(Error));
/**
* This error indicates errors in referencing promptbooks between each other
*
* @public exported from `@promptbook/core`
*/
var PipelineUrlError = /** @class */ (function (_super) {
__extends(PipelineUrlError, _super);
function PipelineUrlError(message) {
var _this = _super.call(this, message) || this;
_this.name = 'PipelineUrlError';
Object.setPrototypeOf(_this, PipelineUrlError.prototype);
return _this;
}
return PipelineUrlError;
}(Error));
/**
* This error type indicates that the error should not happen and its last check before crashing with some other error
*
* @public exported from `@promptbook/core`
*/
var UnexpectedError = /** @class */ (function (_super) {
__extends(UnexpectedError, _super);
function UnexpectedError(message) {
var _this = _super.call(this, spacetrim.spaceTrim(function (block) { return "\n ".concat(block(message), "\n\n Note: This error should not happen.\n It's probbably a bug in the pipeline collection\n\n Please report issue:\n https://github.com/webgptorg/promptbook/issues\n\n Or contact us on me@pavolhejny.com\n\n "); })) || this;
_this.name = 'UnexpectedError';
Object.setPrototypeOf(_this, UnexpectedError.prototype);
return _this;
}
return UnexpectedError;
}(Error));
/**
* Index of all custom errors
*
* @public exported from `@promptbook/core`
*/
var ERRORS = {
CollectionError: CollectionError,
EnvironmentMismatchError: EnvironmentMismatchError,
LimitReachedError: LimitReachedError,
NotFoundError: NotFoundError,
NotYetImplementedError: NotYetImplementedError,
ParsingError: ParsingError,
PipelineExecutionError: PipelineExecutionError,
PipelineLogicError: PipelineLogicError,
PipelineUrlError: PipelineUrlError,
UnexpectedError: UnexpectedError,
// TODO: [🪑]> VersionMismatchError,
};
/**
* Deserializes the error object
*
* @public exported from `@promptbook/utils`
*/
function deserializeError(error) {
if (error.name === 'Error') {
return new Error(error.message);
}
var CustomError = ERRORS[error.name];
return new CustomError(error.message);
}
/**
* Remote server is a proxy server that uses its execution tools internally and exposes the executor interface externally.

@@ -162,3 +338,3 @@ *

llmToolsConfiguration: this.options.llmToolsConfiguration,
});
} /* <- TODO: [🤛] */);
}

@@ -169,3 +345,3 @@ else {

clientId: this.options.clientId,
});
} /* <- TODO: [🤛] */);
}

@@ -178,3 +354,3 @@ return [4 /*yield*/, new Promise(function (resolve, reject) {

socket.on('error', function (error) {
reject(new Error(error.errorMessage));
reject(deserializeError(error));
socket.disconnect();

@@ -258,4 +434,3 @@ });

prompt: prompt,
// <- TODO: [🛫] `prompt` is NOT fully serializable as JSON, it contains functions which are not serializable
});
} /* <- TODO: [🤛] */);
}

@@ -267,4 +442,3 @@ else {

prompt: prompt,
// <- TODO: [🛫] `prompt` is NOT fully serializable as JSON, it contains functions which are not serializable
});
} /* <- TODO: [🤛] */);
}

@@ -277,3 +451,3 @@ return [4 /*yield*/, new Promise(function (resolve, reject) {

socket.on('error', function (error) {
reject(new PipelineExecutionError(error.errorMessage));
reject(deserializeError(error));
socket.disconnect();

@@ -293,2 +467,3 @@ });

/**
* TODO: Maybe use `$asDeeplyFrozenSerializableJson`
* TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`

@@ -295,0 +470,0 @@ * TODO: [🍓] Allow to list compatible models with each variant

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc