Socket
Socket
Sign inDemoInstall

@promptbook/remote-client

Package Overview
Dependencies
Maintainers
1
Versions
401
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@promptbook/remote-client - npm Package Compare versions

Comparing version 0.62.0-0 to 0.62.0-1

esm/typings/src/llm-providers/openai/computeOpenaiUsage.test.d.ts

3

esm/index.es.js

@@ -212,2 +212,3 @@ import { io } from 'socket.io-client';

* TODO: [🗯] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
* TODO: [🍜] Add anonymous option
*/

@@ -218,3 +219,3 @@

*/
var PROMPTBOOK_VERSION = '0.61.0';
var PROMPTBOOK_VERSION = '0.62.0-0';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -221,0 +222,0 @@

@@ -29,3 +29,3 @@ declare const _default: ({

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -117,3 +117,3 @@ value: number;

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -200,3 +200,3 @@ value: number;

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -203,0 +203,0 @@ value: number;

@@ -15,2 +15,4 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';

*
* @@@ .env
*
* It looks for environment variables:

@@ -24,2 +26,3 @@ * - `process.env.OPENAI_API_KEY`

/**
* TODO: [🍜] Use `createLlmToolsFromConfiguration`
* TODO: [🔼] !!! Export via `@promptbook/node`

@@ -26,0 +29,0 @@ * TODO: @@@ write discussion about this - wizzard

import type { LlmExecutionToolsWithTotalUsage } from './utils/count-total-usage/LlmExecutionToolsWithTotalUsage';
type GetLlmToolsForCliOptions = {
/**
* @@@
*
* @default false
*/
isCacheReloaded?: boolean;
};
/**

@@ -7,3 +15,4 @@ * Returns LLM tools for CLI

*/
export declare function getLlmToolsForCli(): LlmExecutionToolsWithTotalUsage;
export declare function getLlmToolsForCli(options?: GetLlmToolsForCliOptions): LlmExecutionToolsWithTotalUsage;
export {};
/**

@@ -10,0 +19,0 @@ * Note: [🟡] This code should never be published outside of `@promptbook/cli`

import type { CreateLlmToolsFromEnvOptions } from './createLlmToolsFromEnv';
import type { LlmExecutionToolsWithTotalUsage } from './utils/count-total-usage/LlmExecutionToolsWithTotalUsage';
type GetLlmToolsForTestingAndScriptsAndPlaygroundOptions = CreateLlmToolsFromEnvOptions & {
/**
* @@@
*
* @default false
*/
isCacheReloaded?: boolean;
};
/**

@@ -8,3 +16,4 @@ * Returns LLM tools for testing purposes

*/
export declare function getLlmToolsForTestingAndScriptsAndPlayground(options?: CreateLlmToolsFromEnvOptions): LlmExecutionToolsWithTotalUsage;
export declare function getLlmToolsForTestingAndScriptsAndPlayground(options?: GetLlmToolsForTestingAndScriptsAndPlaygroundOptions): LlmExecutionToolsWithTotalUsage;
export {};
/**

@@ -11,0 +20,0 @@ * Note: [⚪] This should never be in any released package

@@ -10,2 +10,8 @@ import type { PromptbookStorage } from '../../../../storage/_common/PromptbookStorage';

storage: PromptbookStorage<CacheItem>;
/**
* @@@
*
* @default false
*/
isReloaded?: boolean;
};

@@ -8,8 +8,9 @@ import type { LlmExecutionTools } from '../../../../execution/LlmExecutionTools';

/**
* Total cost of the execution
* Get total cost of the execution up to this point
*/
totalUsage: PromptResultUsage;
getTotalUsage(): PromptResultUsage;
};
/**
* TODO: [👷‍♂️] @@@ Manual about construction of llmTools
* Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
*/

@@ -49,2 +49,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
* TODO: [🍜] Auto use anonymous server in browser
*/

@@ -9,1 +9,4 @@ import type { ClientOptions } from '@anthropic-ai/sdk';

export type AnthropicClaudeExecutionToolsOptions = CommonExecutionToolsOptions & ClientOptions;
/**
* TODO: [🍜] Auto add WebGPT / Promptbook.studio anonymous server in browser
*/
#!/usr/bin/env ts-node
export {};
/**
* TODO: [🍜] Playground with WebGPT / Promptbook.studio anonymous server
* TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
*/

@@ -29,1 +29,4 @@ import type { PipelineCollection } from '../../../collection/PipelineCollection';

};
/**
* TODO: [🍜] Add anonymous option
*/

@@ -54,2 +54,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

* TODO: [🗯] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
*/
* TODO: [🍜] Add anonymous option
*/

@@ -13,2 +13,3 @@ import type { IDestroyable } from 'destroyable';

/**
* TODO: [🍜] Add anonymous option
* TODO: [⚖] Expose the collection to be able to connect to same collection via createCollectionFromUrl

@@ -15,0 +16,0 @@ * TODO: Handle progress - support streaming

@@ -16,3 +16,3 @@ import type { PromptResultUsage } from '../../execution/PromptResultUsage';

*/
readonly modelUsage: PromptResultUsage;
readonly usage: PromptResultUsage;
};

@@ -19,0 +19,0 @@ /**

{
"name": "@promptbook/remote-client",
"version": "0.62.0-0",
"version": "0.62.0-1",
"description": "Supercharge your use of large language models",

@@ -50,3 +50,3 @@ "private": false,

"peerDependencies": {
"@promptbook/core": "0.62.0-0"
"@promptbook/core": "0.62.0-1"
},

@@ -53,0 +53,0 @@ "main": "./umd/index.umd.js",

@@ -216,2 +216,3 @@ (function (global, factory) {

* TODO: [🗯] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
* TODO: [🍜] Add anonymous option
*/

@@ -222,3 +223,3 @@

*/
var PROMPTBOOK_VERSION = '0.61.0';
var PROMPTBOOK_VERSION = '0.62.0-0';
// TODO: !!!! List here all the versions and annotate + put into script

@@ -225,0 +226,0 @@

@@ -29,3 +29,3 @@ declare const _default: ({

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -117,3 +117,3 @@ value: number;

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -200,3 +200,3 @@ value: number;

promptbookVersion: string;
modelUsage: {
usage: {
price: {

@@ -203,0 +203,0 @@ value: number;

@@ -15,2 +15,4 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';

*
* @@@ .env
*
* It looks for environment variables:

@@ -24,2 +26,3 @@ * - `process.env.OPENAI_API_KEY`

/**
* TODO: [🍜] Use `createLlmToolsFromConfiguration`
* TODO: [🔼] !!! Export via `@promptbook/node`

@@ -26,0 +29,0 @@ * TODO: @@@ write discussion about this - wizzard

import type { LlmExecutionToolsWithTotalUsage } from './utils/count-total-usage/LlmExecutionToolsWithTotalUsage';
type GetLlmToolsForCliOptions = {
/**
* @@@
*
* @default false
*/
isCacheReloaded?: boolean;
};
/**

@@ -7,3 +15,4 @@ * Returns LLM tools for CLI

*/
export declare function getLlmToolsForCli(): LlmExecutionToolsWithTotalUsage;
export declare function getLlmToolsForCli(options?: GetLlmToolsForCliOptions): LlmExecutionToolsWithTotalUsage;
export {};
/**

@@ -10,0 +19,0 @@ * Note: [🟡] This code should never be published outside of `@promptbook/cli`

import type { CreateLlmToolsFromEnvOptions } from './createLlmToolsFromEnv';
import type { LlmExecutionToolsWithTotalUsage } from './utils/count-total-usage/LlmExecutionToolsWithTotalUsage';
type GetLlmToolsForTestingAndScriptsAndPlaygroundOptions = CreateLlmToolsFromEnvOptions & {
/**
* @@@
*
* @default false
*/
isCacheReloaded?: boolean;
};
/**

@@ -8,3 +16,4 @@ * Returns LLM tools for testing purposes

*/
export declare function getLlmToolsForTestingAndScriptsAndPlayground(options?: CreateLlmToolsFromEnvOptions): LlmExecutionToolsWithTotalUsage;
export declare function getLlmToolsForTestingAndScriptsAndPlayground(options?: GetLlmToolsForTestingAndScriptsAndPlaygroundOptions): LlmExecutionToolsWithTotalUsage;
export {};
/**

@@ -11,0 +20,0 @@ * Note: [⚪] This should never be in any released package

@@ -10,2 +10,8 @@ import type { PromptbookStorage } from '../../../../storage/_common/PromptbookStorage';

storage: PromptbookStorage<CacheItem>;
/**
* @@@
*
* @default false
*/
isReloaded?: boolean;
};

@@ -8,8 +8,9 @@ import type { LlmExecutionTools } from '../../../../execution/LlmExecutionTools';

/**
* Total cost of the execution
* Get total cost of the execution up to this point
*/
totalUsage: PromptResultUsage;
getTotalUsage(): PromptResultUsage;
};
/**
* TODO: [👷‍♂️] @@@ Manual about construction of llmTools
* Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
*/

@@ -49,2 +49,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
* TODO: [🍜] Auto use anonymous server in browser
*/

@@ -9,1 +9,4 @@ import type { ClientOptions } from '@anthropic-ai/sdk';

export type AnthropicClaudeExecutionToolsOptions = CommonExecutionToolsOptions & ClientOptions;
/**
* TODO: [🍜] Auto add WebGPT / Promptbook.studio anonymous server in browser
*/
#!/usr/bin/env ts-node
export {};
/**
* TODO: [🍜] Playground with WebGPT / Promptbook.studio anonymous server
* TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
*/

@@ -29,1 +29,4 @@ import type { PipelineCollection } from '../../../collection/PipelineCollection';

};
/**
* TODO: [🍜] Add anonymous option
*/

@@ -54,2 +54,3 @@ import type { AvailableModel } from '../../execution/LlmExecutionTools';

* TODO: [🗯] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
*/
* TODO: [🍜] Add anonymous option
*/

@@ -13,2 +13,3 @@ import type { IDestroyable } from 'destroyable';

/**
* TODO: [🍜] Add anonymous option
* TODO: [⚖] Expose the collection to be able to connect to same collection via createCollectionFromUrl

@@ -15,0 +16,0 @@ * TODO: Handle progress - support streaming

@@ -16,3 +16,3 @@ import type { PromptResultUsage } from '../../execution/PromptResultUsage';

*/
readonly modelUsage: PromptResultUsage;
readonly usage: PromptResultUsage;
};

@@ -19,0 +19,0 @@ /**

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc