Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@promptbook/remote-client

Package Overview
Dependencies
Maintainers
0
Versions
450
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@promptbook/remote-client - npm Package Compare versions

Comparing version 0.68.4 to 0.68.5

8

esm/index.es.js

@@ -8,4 +8,4 @@ import { io } from 'socket.io-client';

*/
var PROMPTBOOK_VERSION = '0.68.3';
// TODO: !!!! List here all the versions and annotate + put into script
var PROMPTBOOK_VERSION = '0.68.4';
// TODO:[main] !!!! List here all the versions and annotate + put into script

@@ -279,3 +279,3 @@ /*! *****************************************************************************

* TODO: [🧠][🛣] More elegant way to tracking than passing `name`
* TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
* TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
* Note: [🐠] This is how `checkSerializableAsJson` + `isSerializableAsJson` together can just retun true/false or rich error message

@@ -638,3 +638,3 @@ */

});
// TODO: !!!! Better timeout handling
// TODO:[main] !!!! Better timeout handling
setTimeout(function () {

@@ -641,0 +641,0 @@ reject(new Error("Timeout while connecting to ".concat(_this.options.remoteUrl)));

@@ -9,3 +9,3 @@ import type { Command as Program } from 'commander';

/**
* TODO: [🥃] !!! Allow `ptbk make` without configuring any llm tools
* TODO: [🥃][main] !!! Allow `ptbk make` without configuring any llm tools
* TODO: Maybe remove this command - "about" command should be enough?

@@ -12,0 +12,0 @@ * TODO: [0] DRY Javascript and typescript - Maybe make ONLY typescript and for javascript just remove types

@@ -28,3 +28,3 @@ import type { string_url } from '../../types/typeAliases';

/**
* TODO: !!!! [🧠] Library precompilation and do not mix markdown and json promptbooks
* TODO:[main] !!!! [🧠] Library precompilation and do not mix markdown and json promptbooks
*/

@@ -71,3 +71,3 @@ /**

* @@@
* TODO: [🐝] !!! Use
* TODO: [🐝][main] !!! Use
*

@@ -79,3 +79,3 @@ * @public exported from `@promptbook/core`

* @@@
* TODO: [🐝] !!! Use
* TODO: [🐝][main] !!! Use
*

@@ -82,0 +82,0 @@ * @public exported from `@promptbook/core`

@@ -21,3 +21,3 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson';

/**
* TODO: !!!! Warn if used only sync version
* TODO:[main] !!!! Warn if used only sync version
* TODO: [🚞] Report here line/column of error

@@ -24,0 +24,0 @@ * TODO: Use spaceTrim more effectively

@@ -30,9 +30,9 @@ import type { PipelineJson } from '../../types/PipelineJson/PipelineJson';

/**
* TODO: [🐣] !!!! Validate that all samples match expectations
* TODO: [🐣][🐝] !!!! Validate that knowledge is valid (non-void)
* TODO: [🐣] !!!! Validate that persona can be used only with CHAT variant
* TODO: [🐣] !!!! Validate that parameter with reserved name not used RESERVED_PARAMETER_NAMES
* TODO: [🐣] !!!! Validate that reserved parameter is not used as joker
* TODO: [🐣][main] !!!! Validate that all samples match expectations
* TODO: [🐣][🐝][main] !!!! Validate that knowledge is valid (non-void)
* TODO: [🐣][main] !!!! Validate that persona can be used only with CHAT variant
* TODO: [🐣][main] !!!! Validate that parameter with reserved name not used RESERVED_PARAMETER_NAMES
* TODO: [🐣][main] !!!! Validate that reserved parameter is not used as joker
* TODO: [🧠] Validation not only logic itself but imports around - files and websites and rerefenced pipelines exists
* TODO: [🛠] Actions, instruments (and maybe knowledge) => Functions and tools
*/

@@ -61,3 +61,3 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson';

/**
* TODO: !!! Identify not only pipeline BUT exact template ${block(pipelineIdentification)}
* TODO:[main] !!! Identify not only pipeline BUT exact template ${block(pipelineIdentification)}
* TODO: Use isVerbose here (not only pass to `preparePipeline`)

@@ -64,0 +64,0 @@ * TODO: [🧠][🌳] Use here `countTotalUsage` and put preparation and prepared pipiline to report

@@ -11,5 +11,5 @@ import type { PrepareOptions } from '../../../prepare/PrepareOptions';

/**
* TODO: [🐝][🔼] !!! Export via `@promptbook/markdown`
* TODO: [🐝][🔼][main] !!! Export via `@promptbook/markdown`
* TODO: [🪂] Do it in parallel 11:11
* Note: No need to aggregate usage here, it is done by intercepting the llmTools
*/

@@ -11,3 +11,3 @@ import type { PrepareOptions } from '../../../prepare/PrepareOptions';

/**
* TODO: [🐝][🔼] !!! Export via `@promptbook/pdf`
* TODO: [🐝][🔼][main] !!! Export via `@promptbook/pdf`
* TODO: [🧺] In future, content can be alse File or Blob BUT for now for wider compatibility its only base64

@@ -14,0 +14,0 @@ * @see https://stackoverflow.com/questions/14653349/node-js-cant-create-blobs

@@ -19,3 +19,3 @@ import type { AvailableModel } from '../../execution/AvailableModel';

* Note: [🤖] Add models of new variant
* TODO: [🧠] !!! Add embedding models OR Anthropic has only chat+completion models?
* TODO: [🧠][main] !!! Add embedding models OR Anthropic has only chat+completion models?
* TODO: [🧠] Some mechanism to propagate unsureness

@@ -22,0 +22,0 @@ * TODO: [🧠][👮‍♀️] Put here more info like description, isVision, trainingDateCutoff, languages, strengths ( Top-level performance, intelligence, fluency, and understanding), contextWindow,...

@@ -0,1 +1,2 @@

import Anthropic from '@anthropic-ai/sdk';
import type { AvailableModel } from '../../execution/AvailableModel';

@@ -29,3 +30,3 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';

get description(): string_markdown;
private getClient;
getClient(): Promise<Anthropic>;
/**

@@ -32,0 +33,0 @@ * Check the `options` passed to `constructor`

@@ -14,4 +14,4 @@ import { RemoteLlmExecutionTools } from '../remote/RemoteLlmExecutionTools';

/**
* TODO: [🧠] !!!! Make anonymous this with all LLM providers
* TODO: [🧠][🧱] !!!! Maybe change all `new AnthropicClaudeExecutionTools` -> `createAnthropicClaudeExecutionTools` in manual
* TODO: [🧠][main] !!!! Make anonymous this with all LLM providers
* TODO: [🧠][🧱][main] !!!! Maybe change all `new AnthropicClaudeExecutionTools` -> `createAnthropicClaudeExecutionTools` in manual
* TODO: [🧠] Maybe auto-detect usage in browser and determine default value of `isProxied`

@@ -18,0 +18,0 @@ * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?

#!/usr/bin/env ts-node
export {};
/**
* TODO: !!! Playground with WebGPT / Promptbook.studio anonymous server
* TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
* TODO:[main] !!! Playground with WebGPT / Promptbook.studio anonymous server
* TODO:[main] !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
*/

@@ -0,1 +1,2 @@

import { OpenAIClient } from '@azure/openai';
import type { AvailableModel } from '../../execution/AvailableModel';

@@ -29,3 +30,3 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';

get description(): string_markdown;
private getClient;
getClient(): Promise<OpenAIClient>;
/**

@@ -32,0 +33,0 @@ * Check the `options` passed to `constructor`

@@ -0,1 +1,2 @@

import OpenAI from 'openai';
import type { AvailableModel } from '../../execution/AvailableModel';

@@ -30,3 +31,3 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';

get description(): string_markdown;
private getClient;
getClient(): Promise<OpenAI>;
/**

@@ -33,0 +34,0 @@ * Check the `options` passed to `constructor`

#!/usr/bin/env ts-node
export {};
/**
* TODO: !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
* TODO:[main] !!! Test here that `systemMessage`, `temperature` and `seed` are working correctly
*/

@@ -12,3 +12,3 @@ import type { PrepareOptions } from '../prepare/PrepareOptions';

/**
* TODO: [🔃] !!!!! If the persona was prepared with different version or different set of models, prepare it once again
* TODO: [🔃][main] !!!!! If the persona was prepared with different version or different set of models, prepare it once again
* TODO: [🏢] !! Check validity of `modelName` in pipeline

@@ -15,0 +15,0 @@ * TODO: [🏢] !! Check validity of `systemMessage` in pipeline

@@ -9,3 +9,3 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson';

/**
* TODO: [🔃] !!!!! If the pipeline was prepared with different version or different set of models, prepare it once again
* TODO: [🔃][main] !!!!! If the pipeline was prepared with different version or different set of models, prepare it once again
* TODO: [🐠] Maybe base this on `makeValidator`

@@ -12,0 +12,0 @@ * TODO: [🧊] Pipeline can be partially prepared, this should return true ONLY if fully prepared

@@ -26,3 +26,3 @@ import type { PipelineJson } from '../types/PipelineJson/PipelineJson';

* TODO: [🧠] What is better name `prepareTemplate` or `prepareTemplateAndParameters`
* TODO: [♨] !!! Prepare index the samples and maybe templates
* TODO: [♨][main] !!! Prepare index the samples and maybe templates
* TODO: Write tests for `preparePipeline`

@@ -29,0 +29,0 @@ * TODO: [🏏] Leverage the batch API and build queues @see https://platform.openai.com/docs/guides/batch

@@ -595,5 +595,5 @@ import type { TupleToUnion } from 'type-fest';

/**.
* TODO: !!! Change "For example" to @example
* TODO:[main] !!! Change "For example" to @example
* TODO: !! Change to branded types
* TODO: Delete type aliases that are not exported or used internally
*/

@@ -25,4 +25,4 @@ import type { string_name } from '../../types/typeAliases';

* TODO: [🧠][🛣] More elegant way to tracking than passing `name`
* TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
* TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
* Note: [🐠] This is how `checkSerializableAsJson` + `isSerializableAsJson` together can just retun true/false or rich error message
*/

@@ -22,4 +22,4 @@ /**

/**
* TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
* TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
* TODO: [🧠][💺] Can be done this on type-level?
*/
{
"name": "@promptbook/remote-client",
"version": "0.68.4",
"version": "0.68.5",
"description": "Supercharge your use of large language models",

@@ -50,3 +50,3 @@ "private": false,

"peerDependencies": {
"@promptbook/core": "0.68.4"
"@promptbook/core": "0.68.5"
},

@@ -53,0 +53,0 @@ "dependencies": {

@@ -36,2 +36,3 @@ <!-- ⚠️ WARNING: This code has been generated so that any manual changes will be overwritten -->

---

@@ -38,0 +39,0 @@

@@ -15,4 +15,4 @@ (function (global, factory) {

*/
var PROMPTBOOK_VERSION = '0.68.3';
// TODO: !!!! List here all the versions and annotate + put into script
var PROMPTBOOK_VERSION = '0.68.4';
// TODO:[main] !!!! List here all the versions and annotate + put into script

@@ -286,3 +286,3 @@ /*! *****************************************************************************

* TODO: [🧠][🛣] More elegant way to tracking than passing `name`
* TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
* TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
* Note: [🐠] This is how `checkSerializableAsJson` + `isSerializableAsJson` together can just retun true/false or rich error message

@@ -645,3 +645,3 @@ */

});
// TODO: !!!! Better timeout handling
// TODO:[main] !!!! Better timeout handling
setTimeout(function () {

@@ -648,0 +648,0 @@ reject(new Error("Timeout while connecting to ".concat(_this.options.remoteUrl)));

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc