New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@markprompt/core

Package Overview
Dependencies
Maintainers
1
Versions
109
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@markprompt/core - npm Package Compare versions

Comparing version 0.4.1 to 0.4.2

LICENSE

40

dist/index.d.ts
import type { OpenAIModelId } from './types.js';
export type { OpenAIModelId };
type Options = {
export type Options = {
/** URL at which to fetch completions */
completionsUrl?: string;
/** The placeholder for the text input */
placeholder?: string;
/** Message returned when the model does not have an answer */
iDontKnowMessage?: string;
/** The heading of the references section */
referencesHeading?: string;
/** The loading heading of the references section */
loadingHeading?: string;
/** If true, include the branding footer */
includeBranding?: boolean;
/** The OpenAI model to use */

@@ -12,9 +20,33 @@ model?: OpenAIModelId;

promptTemplate?: string;
/** The model temperature */
temperature?: number;
/** The model top P */
topP?: number;
/** The model frequency penalty */
frequencyPenalty?: number;
/** The model present penalty */
presencePenalty?: number;
/** The max number of tokens to include in the response */
maxTokens?: number;
/** The number of sections to include in the prompt context */
sectionsMatchCount?: number;
/** The similarity threshold between the input question and selected sections */
sectionsMatchThreshold?: number;
/** AbortController signal */
signal?: AbortSignal;
};
export declare const DEFAULT_MODEL: OpenAIModelId;
export declare const I_DONT_KNOW_MESSAGE = "Sorry, I am not sure how to answer that.";
export declare const MARKPROMPT_COMPLETIONS_URL = "https://api.markprompt.com/v1/completions";
export declare const STREAM_SEPARATOR = "___START_RESPONSE_STREAM___";
export declare const DEFAULT_MODEL: OpenAIModelId;
export declare const DEFAULT_I_DONT_KNOW_MESSAGE = "Sorry, I am not sure how to answer that.";
export declare const DEFAULT_REFERENCES_HEADING = "Answer generated from the following pages:";
export declare const DEFAULT_LOADING_HEADING = "Fetching relevant pages...";
export declare const DEFAULT_PROMPT_TEMPLATE = "You are a very enthusiastic company representative who loves to help people! Given the following sections from the documentation (preceded by a section id), answer the question using only that information, outputted in Markdown format. If you are unsure and the answer is not explicitly written in the documentation, say \"{{I_DONT_KNOW}}\".\n\nContext sections:\n---\n{{CONTEXT}}\n\nQuestion: \"{{PROMPT}}\"\n\nAnswer (including related code snippets if available):";
export declare const DEFAULT_TEMPERATURE = 0.1;
export declare const DEFAULT_TOP_P = 1;
export declare const DEFAULT_FREQUENCY_PENALTY = 0;
export declare const DEFAULT_PRESENCE_PENALTY = 0;
export declare const DEFAULT_MAX_TOKENS = 500;
export declare const DEFAULT_SECTIONS_MATCH_COUNT = 10;
export declare const DEFAULT_SECTIONS_MATCH_THRESHOLD = 0.5;
/**

@@ -28,3 +60,3 @@ * @param {string} prompt - Prompt to submit to the model

*/
export declare function submitPrompt(prompt: string, projectKey: string, onAnswerChunk: (answerChunk: string) => boolean, onReferences: (references: string[]) => void, onError: (error: Error) => void, options?: Options): Promise<void>;
export declare function submitPrompt(prompt: string, projectKey: string, onAnswerChunk: (answerChunk: string) => boolean | undefined | void, onReferences: (references: string[]) => void, onError: (error: Error) => void, options?: Options): Promise<void>;
//# sourceMappingURL=index.d.ts.map

@@ -1,5 +0,23 @@

export const DEFAULT_MODEL = 'gpt-3.5-turbo';
export const I_DONT_KNOW_MESSAGE = 'Sorry, I am not sure how to answer that.';
export const MARKPROMPT_COMPLETIONS_URL = 'https://api.markprompt.com/v1/completions';
export const STREAM_SEPARATOR = '___START_RESPONSE_STREAM___';
export const DEFAULT_MODEL = 'gpt-3.5-turbo';
export const DEFAULT_I_DONT_KNOW_MESSAGE = 'Sorry, I am not sure how to answer that.';
export const DEFAULT_REFERENCES_HEADING = 'Answer generated from the following pages:';
export const DEFAULT_LOADING_HEADING = 'Fetching relevant pages...';
export const DEFAULT_PROMPT_TEMPLATE = `You are a very enthusiastic company representative who loves to help people! Given the following sections from the documentation (preceded by a section id), answer the question using only that information, outputted in Markdown format. If you are unsure and the answer is not explicitly written in the documentation, say "{{I_DONT_KNOW}}".
Context sections:
---
{{CONTEXT}}
Question: "{{PROMPT}}"
Answer (including related code snippets if available):`;
export const DEFAULT_TEMPERATURE = 0.1;
export const DEFAULT_TOP_P = 1;
export const DEFAULT_FREQUENCY_PENALTY = 0;
export const DEFAULT_PRESENCE_PENALTY = 0;
export const DEFAULT_MAX_TOKENS = 500;
export const DEFAULT_SECTIONS_MATCH_COUNT = 10;
export const DEFAULT_SECTIONS_MATCH_THRESHOLD = 0.5;
/**

@@ -19,3 +37,3 @@ * @param {string} prompt - Prompt to submit to the model

return;
const iDontKnowMessage = options.iDontKnowMessage ?? I_DONT_KNOW_MESSAGE;
const iDontKnowMessage = options.iDontKnowMessage ?? DEFAULT_I_DONT_KNOW_MESSAGE;
try {

@@ -33,2 +51,9 @@ const res = await fetch(options.completionsUrl ?? MARKPROMPT_COMPLETIONS_URL, {

promptTemplate: options.promptTemplate,
temperature: options.temperature,
topP: options.topP,
frequencyPenalty: options.frequencyPenalty,
presencePenalty: options.presencePenalty,
maxTokens: options.maxTokens,
sectionsMatchCount: options.sectionsMatchCount,
sectionsMatchThreshold: options.sectionsMatchThreshold,
}),

@@ -77,3 +102,2 @@ signal: options.signal,

catch (error) {
onAnswerChunk(iDontKnowMessage);
onError(error instanceof Error ? error : new Error(`${error}`));

@@ -80,0 +104,0 @@ }

18

package.json
{
"name": "@markprompt/core",
"version": "0.4.1",
"license": "MIT",
"version": "0.4.2",
"repository": {

@@ -10,2 +9,7 @@ "type": "git",

},
"license": "MIT",
"sideEffects": false,
"type": "module",
"exports": "./dist/index.js",
"main": "./dist/index.js",
"files": [

@@ -15,11 +19,7 @@ "README.md",

],
"type": "module",
"main": "./dist/index.js",
"exports": "./dist/index.js",
"sideEffects": false,
"scripts": {
"prepack": "tsc --build",
"build": "tsc --build",
"dev": "tsc --build --watch"
"build": "tsc --build --clean && tsc --build",
"dev": "tsc --build --clean && tsc --build --watch",
"prepack": "tsc --build --clean && tsc --build"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc