New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More β†’
Socket
Sign inDemoInstall
Socket

@promptbook/browser

Package Overview
Dependencies
Maintainers
1
Versions
202
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@promptbook/browser - npm Package Compare versions

Comparing version 0.65.0-2 to 0.65.0-3

esm/typings/src/llm-providers/anthropic-claude/createAnthropicClaudeExecutionTools.d.ts

2

esm/index.es.js

@@ -8,3 +8,3 @@ import { spaceTrim } from 'spacetrim';

*/
var PROMPTBOOK_VERSION = '0.65.0-1';
var PROMPTBOOK_VERSION = '0.65.0-2';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -11,0 +11,0 @@

@@ -5,2 +5,5 @@ import { PROMPTBOOK_VERSION } from '../version';

import type { AnthropicClaudeExecutionToolsOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import type { AnthropicClaudeExecutionToolsDirectOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import type { AnthropicClaudeExecutionToolsProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import { createAnthropicClaudeExecutionTools } from '../llm-providers/anthropic-claude/createAnthropicClaudeExecutionTools';
export { PROMPTBOOK_VERSION };

@@ -10,1 +13,4 @@ export { ANTHROPIC_CLAUDE_MODELS };

export type { AnthropicClaudeExecutionToolsOptions };
export type { AnthropicClaudeExecutionToolsDirectOptions };
export type { AnthropicClaudeExecutionToolsProxiedOptions };
export { createAnthropicClaudeExecutionTools };

@@ -48,2 +48,3 @@ import { PROMPTBOOK_VERSION } from '../version';

import { prepareKnowledgeFromMarkdown } from '../knowledge/prepare-knowledge/markdown/prepareKnowledgeFromMarkdown';
import { LLM_CONFIGURATION_BOILERPLATES } from '../llm-providers/_common/config';
import { createLlmToolsFromConfiguration } from '../llm-providers/_common/createLlmToolsFromConfiguration';

@@ -113,2 +114,3 @@ import { cacheLlmTools } from '../llm-providers/_common/utils/cache/cacheLlmTools';

export { prepareKnowledgeFromMarkdown };
export { LLM_CONFIGURATION_BOILERPLATES };
export { createLlmToolsFromConfiguration };

@@ -115,0 +117,0 @@ export { cacheLlmTools };

import { PROMPTBOOK_VERSION } from '../version';
import { createCollectionFromDirectory } from '../collection/constructors/createCollectionFromDirectory';
import { LLM_CONFIGURATION_BOILERPLATES } from '../llm-providers/_common/config';
import { createLlmToolsFromConfigurationFromEnv } from '../llm-providers/_common/createLlmToolsFromConfigurationFromEnv';

@@ -9,5 +8,4 @@ import { createLlmToolsFromEnv } from '../llm-providers/_common/createLlmToolsFromEnv';

export { createCollectionFromDirectory };
export { LLM_CONFIGURATION_BOILERPLATES };
export { createLlmToolsFromConfigurationFromEnv };
export { createLlmToolsFromEnv };
export { FilesStorage };
import { PROMPTBOOK_VERSION } from '../version';
import type { RemoteLlmExecutionToolsOptions } from '../llm-providers/remote/interfaces/RemoteLlmExecutionToolsOptions';
import type { RemoteServerOptions } from '../llm-providers/remote/interfaces/RemoteServerOptions';
import { RemoteLlmExecutionTools } from '../llm-providers/remote/RemoteLlmExecutionTools';
import type { RemoteLlmExecutionToolsOptions } from '../llm-providers/remote/RemoteLlmExecutionToolsOptions';
export { PROMPTBOOK_VERSION };
export type { RemoteLlmExecutionToolsOptions };
export type { RemoteServerOptions };
export { RemoteLlmExecutionTools };
export type { RemoteLlmExecutionToolsOptions };

@@ -38,2 +38,4 @@ import type { PipelineCollection } from '../collection/PipelineCollection';

import type { AnthropicClaudeExecutionToolsOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import type { AnthropicClaudeExecutionToolsDirectOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import type { AnthropicClaudeExecutionToolsProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
import type { AzureOpenAiExecutionToolsOptions } from '../llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions';

@@ -46,5 +48,9 @@ import type { LangtailExecutionToolsOptions } from '../llm-providers/langtail/LangtailExecutionToolsOptions';

import type { Promptbook_Server_Request } from '../llm-providers/remote/interfaces/Promptbook_Server_Request';
import type { Promptbook_Server_CollectionRequest } from '../llm-providers/remote/interfaces/Promptbook_Server_Request';
import type { Promptbook_Server_AnonymousRequest } from '../llm-providers/remote/interfaces/Promptbook_Server_Request';
import type { Promptbook_Server_Response } from '../llm-providers/remote/interfaces/Promptbook_Server_Response';
import type { RemoteLlmExecutionToolsOptions } from '../llm-providers/remote/interfaces/RemoteLlmExecutionToolsOptions';
import type { RemoteServerOptions } from '../llm-providers/remote/interfaces/RemoteServerOptions';
import type { RemoteLlmExecutionToolsOptions } from '../llm-providers/remote/RemoteLlmExecutionToolsOptions';
import type { AnonymousRemoteServerOptions } from '../llm-providers/remote/interfaces/RemoteServerOptions';
import type { CollectionRemoteServerOptions } from '../llm-providers/remote/interfaces/RemoteServerOptions';
import type { PrepareOptions } from '../prepare/PrepareOptions';

@@ -131,2 +137,3 @@ import type { JavascriptExecutionToolsOptions } from '../scripting/javascript/JavascriptExecutionToolsOptions';

import type { string_url } from '../types/typeAliases';
import type { string_base_url } from '../types/typeAliases';
import type { string_pipeline_url } from '../types/typeAliases';

@@ -249,2 +256,4 @@ import type { string_pipeline_url_with_hashtemplate } from '../types/typeAliases';

export type { AnthropicClaudeExecutionToolsOptions };
export type { AnthropicClaudeExecutionToolsDirectOptions };
export type { AnthropicClaudeExecutionToolsProxiedOptions };
export type { AzureOpenAiExecutionToolsOptions };

@@ -257,5 +266,9 @@ export type { LangtailExecutionToolsOptions };

export type { Promptbook_Server_Request };
export type { Promptbook_Server_CollectionRequest };
export type { Promptbook_Server_AnonymousRequest };
export type { Promptbook_Server_Response };
export type { RemoteLlmExecutionToolsOptions };
export type { RemoteServerOptions };
export type { RemoteLlmExecutionToolsOptions };
export type { AnonymousRemoteServerOptions };
export type { CollectionRemoteServerOptions };
export type { PrepareOptions };

@@ -342,2 +355,3 @@ export type { JavascriptExecutionToolsOptions };

export type { string_url };
export type { string_base_url };
export type { string_pipeline_url };

@@ -344,0 +358,0 @@ export type { string_pipeline_url_with_hashtemplate };

@@ -0,6 +1,6 @@

import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
import type { TODO_any } from '../../utils/organization/TODO_any';
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
import type { LlmToolsConfiguration } from './LlmToolsConfiguration';
/**
* @public exported from `@promptbook/node`
* @public exported from `@promptbook/core`
*/

@@ -11,3 +11,3 @@ export declare const LLM_CONFIGURATION_BOILERPLATES: LlmToolsConfiguration;

*/
export declare const EXECUTION_TOOLS_CLASSES: Record<`get${string}`, (options: TODO_any) => LlmExecutionTools>;
export declare const EXECUTION_TOOLS_CLASSES: Record<`create${string}`, (options: TODO_any) => LlmExecutionTools>;
/**

@@ -14,0 +14,0 @@ * TODO: [🧠] Better file name than `config.ts` + maybe move to two separate files

@@ -8,3 +8,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

import type { string_title } from '../../types/typeAliases';
import type { AnthropicClaudeExecutionToolsOptions } from './AnthropicClaudeExecutionToolsOptions';
import type { AnthropicClaudeExecutionToolsDirectOptions } from './AnthropicClaudeExecutionToolsOptions';
/**

@@ -14,2 +14,3 @@ * Execution Tools for calling Anthropic Claude API.

* @public exported from `@promptbook/anthropic-claude`
* @deprecated use `createAnthropicClaudeExecutionTools` instead
*/

@@ -27,3 +28,3 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools {

*/
constructor(options?: AnthropicClaudeExecutionToolsOptions);
constructor(options?: AnthropicClaudeExecutionToolsDirectOptions);
get title(): string_title & string_markdown_text;

@@ -54,4 +55,5 @@ get description(): string_markdown;

* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
* TODO: [🍜] Auto use anonymous server in browser
* TODO: [🍜] !!!!!! Auto use anonymous server in browser
* TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
* TODO: [πŸ“…] Maybe instead of `RemoteLlmExecutionToolsOptions` use `proxyWithAnonymousRemoteServer` (if implemented)
*/
import type { ClientOptions } from '@anthropic-ai/sdk';
import type { CommonExecutionToolsOptions } from '../../execution/CommonExecutionToolsOptions';
import type { RemoteLlmExecutionToolsOptions } from '../remote/interfaces/RemoteLlmExecutionToolsOptions';
/**

@@ -9,5 +10,25 @@ * Options for `AnthropicClaudeExecutionTools`

*/
export type AnthropicClaudeExecutionToolsOptions = CommonExecutionToolsOptions & ClientOptions;
export type AnthropicClaudeExecutionToolsOptions = AnthropicClaudeExecutionToolsDirectOptions | AnthropicClaudeExecutionToolsProxiedOptions;
/**
* TODO: [🍜] Auto add WebGPT / Promptbook.studio anonymous server in browser
* Options for directly used `AnthropicClaudeExecutionTools`
*
* This extends Anthropic's `ClientOptions` with are directly passed to the Anthropic client.
* @public exported from `@promptbook/anthropic-claude`
*/
export type AnthropicClaudeExecutionToolsDirectOptions = CommonExecutionToolsOptions & ClientOptions & {
isProxied?: false;
};
/**
* Options for proxied `AnthropicClaudeExecutionTools`
*
* This extends Anthropic's `ClientOptions` with are directly passed to the Anthropic client.
* @public exported from `@promptbook/anthropic-claude`
*/
export type AnthropicClaudeExecutionToolsProxiedOptions = CommonExecutionToolsOptions & ClientOptions & {
isProxied: true;
} & Pick<RemoteLlmExecutionToolsOptions, 'remoteUrl' | 'path'>;
/**
* TODO: [🍜] Default remote remoteUrl and path for anonymous server
* TODO: [🍜] !!!!!! Auto add WebGPT / Promptbook.studio anonymous server in browser
* TODO: [🧠][🀺] Detecting `user`
*/
#!/usr/bin/env ts-node
export {};
/**
* TODO: [🍜] Playground with WebGPT / Promptbook.studio anonymous server
* TODO: [🍜] !!!!!! Playground with WebGPT / Promptbook.studio anonymous server
* TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
*/

@@ -6,3 +6,3 @@ /**

*/
export interface Promptbook_Server_Error {
export type Promptbook_Server_Error = {
/**

@@ -12,2 +12,2 @@ * The error message which caused the error

readonly errorMessage: string;
}
};

@@ -7,3 +7,3 @@ import type { TaskProgress } from '../../../types/TaskProgress';

*/
export interface Promptbook_Server_Progress {
export type Promptbook_Server_Progress = {
/**

@@ -13,2 +13,2 @@ * The progress of text generation

readonly taskProgress: TaskProgress;
}
};
import type { Prompt } from '../../../types/Prompt';
import type { client_id } from '../../../types/typeAliases';
import type { LlmToolsConfiguration } from '../../_common/LlmToolsConfiguration';
/**

@@ -8,3 +9,4 @@ * Socket.io progress for remote text generation

*/
export interface Promptbook_Server_Request {
export type Promptbook_Server_Request = Promptbook_Server_CollectionRequest | Promptbook_Server_AnonymousRequest;
export type Promptbook_Server_CollectionRequest = {
/**

@@ -18,2 +20,12 @@ * Client responsible for the requests

readonly prompt: Prompt;
}
};
export type Promptbook_Server_AnonymousRequest = {
/**
* Configuration for the LLM tools
*/
readonly llmToolsConfiguration: LlmToolsConfiguration;
/**
* The Prompt to execute
*/
readonly prompt: Prompt;
};

@@ -9,2 +9,10 @@ import type { PipelineCollection } from '../../../collection/PipelineCollection';

*
* There are two modes of remote server:
*
* 1) **Collection mode** Server will recieve `collection` and execute prompts only from this collection
* 2) **Anonymous mode** Server will recieve full `LlmToolsConfiguration` (with api keys) and just acts as a proxy
* In anonymous mode, `collection` will be ignored and any prompt will be executed
*
* You can enable both modes at the same time.
*
* @public exported from `@promptbook/remote-client`

@@ -25,6 +33,18 @@ * @public exported from `@promptbook/remote-server`

readonly path: string_uri;
} & (AnonymousRemoteServerOptions | CollectionRemoteServerOptions | (AnonymousRemoteServerOptions & CollectionRemoteServerOptions));
export type AnonymousRemoteServerOptions = {
/**
* Enable anonymous mode
*/
readonly isAnonymousModeAllowed: true;
};
export type CollectionRemoteServerOptions = {
/**
* Enable collection mode
*/
readonly isCollectionModeAllowed: true;
/**
* Promptbook collection to use
*
* This is used to check validity of the prompt to prevent DDoS
* This is used to check validity of the prompt to prevent misuse
*/

@@ -38,3 +58,4 @@ readonly collection: PipelineCollection;

/**
* TODO: [🍜] Add anonymous option
* TODO: Constrain anonymous mode for specific models / providers
* TODO: [🧠][🀺] Remove `createLlmExecutionTools`, pass just `llmExecutionTools`
*/

@@ -12,3 +12,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

import type { string_title } from '../../types/typeAliases';
import type { RemoteLlmExecutionToolsOptions } from './RemoteLlmExecutionToolsOptions';
import type { RemoteLlmExecutionToolsOptions } from './interfaces/RemoteLlmExecutionToolsOptions';
/**

@@ -54,6 +54,7 @@ * Remote server is a proxy server that uses its execution tools internally and exposes the executor interface externally.

/**
* TODO: [🍜] !!!!!! Default remote remoteUrl and path for anonymous server
* TODO: [πŸ“] Allow to list compatible models with each variant
* TODO: [πŸ—―] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
* TODO: [🍜] Add anonymous option
* TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
* TODO: [🧠] Maybe remove `@promptbook/remote-client` and just use `@promptbook/core`
*/

@@ -14,3 +14,3 @@ import type { IDestroyable } from 'destroyable';

/**
* TODO: [🍜] Add anonymous option
* TODO: [🍜] !!!!!! Add anonymous option
* TODO: [βš–] Expose the collection to be able to connect to same collection via createCollectionFromUrl

@@ -22,2 +22,3 @@ * TODO: Handle progress - support streaming

* TODO: [0] Set unavailable models as undefined in `RemoteLlmExecutionTools` NOT throw error here
* TODO: Constrain anonymous mode for specific models / providers
*/

@@ -292,2 +292,8 @@ import type { TupleToUnion } from 'type-fest';

*
* For example `"https://collboard.com"`
*/
export type string_base_url = string;
/**
* Semantic helper
*
* For example `"https://promptbook.studio/webgpt/write-website-content-cs.ptbk.md"`

@@ -294,0 +300,0 @@ */

{
"name": "@promptbook/browser",
"version": "0.65.0-2",
"version": "0.65.0-3",
"description": "Supercharge your use of large language models",

@@ -50,3 +50,3 @@ "private": false,

"peerDependencies": {
"@promptbook/core": "0.65.0-2"
"@promptbook/core": "0.65.0-3"
},

@@ -53,0 +53,0 @@ "dependencies": {

@@ -11,3 +11,3 @@ (function (global, factory) {

*/
var PROMPTBOOK_VERSION = '0.65.0-1';
var PROMPTBOOK_VERSION = '0.65.0-2';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -14,0 +14,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚑️ by Socket Inc