🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

ai-text-processor

Package Overview
Dependencies
Maintainers
1
Versions
19
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ai-text-processor - npm Package Compare versions

Comparing version

to
1.0.9

127

dist/components/AITextProcessor.js

@@ -33,3 +33,3 @@ "use strict";

const AITextProcessor = (_a) => {
var _b, _c, _d, _e, _f, _g;
var _b, _c, _d, _e, _f, _g, _h;
var props = __rest(_a, []);

@@ -43,2 +43,5 @@ const localSettings = (0, useLocalSettings_1.useLocalSettings)();

const [userPrompt, setUserPrompt] = localSettings[useLocalSettings_1.LocalSettingsKeys.userPrompt];
const variables = [...new Set((_b = `${systemPrompt}\n${userPrompt}`.match(/\{\{([\w_-]+)\}\}/g)) !== null && _b !== void 0 ? _b : [])];
const [variableValues, setVariableValues] = localSettings[useLocalSettings_1.LocalSettingsKeys.variableValues];
const [variableOptions, setVariableOptions] = localSettings[useLocalSettings_1.LocalSettingsKeys.variableOptions];
const [input, setInput] = localSettings[useLocalSettings_1.LocalSettingsKeys.input];

@@ -67,3 +70,9 @@ const [outputs, setOutputs] = localSettings[useLocalSettings_1.LocalSettingsKeys.outputs];

const retryingRef = react_1.default.useRef(false);
const chunks = TextUtils_1.TextUtils.getChunks(`${systemPrompt}`, `${userPrompt}`, `${input}`, (_b = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _b !== void 0 ? _b : 0, {
let preparedUserPrompt = userPrompt !== null && userPrompt !== void 0 ? userPrompt : '';
variables.forEach((variable) => {
var _a;
const replacement = (_a = variableValues[variable]) !== null && _a !== void 0 ? _a : '';
preparedUserPrompt = preparedUserPrompt.split(variable).join(replacement);
});
const chunks = TextUtils_1.TextUtils.getChunks(`${systemPrompt}`, `${preparedUserPrompt}`, `${input}`, (_c = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _c !== void 0 ? _c : 0, {
averageTokenLength: averageTokenLength !== null && averageTokenLength !== void 0 ? averageTokenLength : 4,

@@ -112,3 +121,3 @@ requestMaxTokenRatio: requestMaxTokenRatio !== null && requestMaxTokenRatio !== void 0 ? requestMaxTokenRatio : 0.6,

}
messages.push({ role: 'user', content: `${userPrompt}\n\n${chunk}`.trim() });
messages.push({ role: 'user', content: `${preparedUserPrompt}\n\n${chunk}`.trim() });
// Make the call and store a reference to the XMLHttpRequest

@@ -155,2 +164,25 @@ const xhr = openai_ext_1.OpenAIExt.streamClientChatCompletion({

};
const handleSetInput = (text) => {
if (autoShrinkEnabled) {
setInput(TextUtils_1.TextUtils.shrinkText(text).substring(0, exports.CHAR_LIMIT));
}
else {
setInput(text.substring(0, exports.CHAR_LIMIT));
}
};
const handlePaste = () => {
navigator.permissions
.query({
name: 'clipboard-read',
})
.then((permission) => {
navigator.clipboard
.readText()
.then((text) => {
handleSetInput(text);
})
.catch((e) => console.error(e));
})
.catch((e) => console.error(e));
};
const handleClearInput = () => {

@@ -163,3 +195,3 @@ setInput('');

const handleSelectPreset = (presetName) => {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m;
if (!presetName) {

@@ -176,2 +208,4 @@ setSelectedPresetName('');

setAutoShrinkEnabled(useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.autoShrinkEnabled]);
setVariableValues(useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.variableValues]);
setVariableOptions(useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.variableOptions]);
}

@@ -191,3 +225,5 @@ else {

setAutoShrinkEnabled((_j = selectedPreset.autoShrink) !== null && _j !== void 0 ? _j : useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.chunkPrefix]);
(_k = inputTextFieldRef.current) === null || _k === void 0 ? void 0 : _k.select();
setVariableValues((_k = selectedPreset.variableValues) !== null && _k !== void 0 ? _k : useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.variableValues]);
setVariableOptions((_l = selectedPreset.variableOptions) !== null && _l !== void 0 ? _l : useLocalSettings_1.LocalSettingsDefaults[useLocalSettings_1.LocalSettingsKeys.variableOptions]);
(_m = inputTextFieldRef.current) === null || _m === void 0 ? void 0 : _m.select();
}

@@ -207,2 +243,4 @@ }

autoShrink: !!autoShrinkEnabled,
variableValues: variableValues !== null && variableValues !== void 0 ? variableValues : {},
variableOptions: variableOptions !== null && variableOptions !== void 0 ? variableOptions : {},
};

@@ -275,6 +313,6 @@ const newPresets = [...Object.values(mergedPresets !== null && mergedPresets !== void 0 ? mergedPresets : {}), presetToSave];

react_1.default.createElement("div", null,
react_1.default.createElement(react_bootstrap_1.Badge, { pill: true, bg: "secondary" }, TextUtils_1.TextUtils.getEstimatedTokenCount(systemPrompt + userPrompt + chunk, averageTokenLength !== null && averageTokenLength !== void 0 ? averageTokenLength : 0))))),
react_1.default.createElement(react_bootstrap_1.Badge, { pill: true, bg: "secondary" }, TextUtils_1.TextUtils.getEstimatedTokenCount(systemPrompt + preparedUserPrompt + chunk, averageTokenLength !== null && averageTokenLength !== void 0 ? averageTokenLength : 0))))),
react_1.default.createElement("hr", null),
systemPrompt && react_1.default.createElement("p", null, systemPrompt),
userPrompt && react_1.default.createElement("p", null, userPrompt),
preparedUserPrompt && react_1.default.createElement("p", null, preparedUserPrompt),
chunk && react_1.default.createElement("p", null, chunk)));

@@ -286,2 +324,44 @@ });

});
const handleSetVariableValue = (variable, value) => {
const newVariableValues = Object.assign({}, variableValues);
newVariableValues[variable] = value;
setVariableValues(newVariableValues);
};
const handleAddVariableOption = (variable, option) => {
const newVariableOptions = Object.assign({}, variableOptions);
let newOptions = Array.isArray(newVariableOptions[variable]) ? newVariableOptions[variable] : [];
newOptions = [...new Set([...newOptions, option])].sort((a, b) => a.toLowerCase().localeCompare(b.toLowerCase()));
newVariableOptions[variable] = newOptions;
setVariableOptions(newVariableOptions);
};
const handleDeleteVariableOption = (variable, option) => {
const newVariableOptions = Object.assign({}, variableOptions);
const newOptions = Array.isArray(newVariableOptions[variable]) ? newVariableOptions[variable] : [];
if (newOptions.includes(option)) {
newOptions.splice(newOptions.indexOf(option), 1);
newVariableOptions[variable] = newOptions;
setVariableOptions(newVariableOptions);
}
handleSetVariableValue(variable, '');
};
const variableElements = variables.map((variable, i) => {
var _a;
const currVarValue = (_a = (variableValues !== null && variableValues !== void 0 ? variableValues : {})[variable]) !== null && _a !== void 0 ? _a : '';
const currVarName = variable
.substring(2, variable.length - 2)
.split('_')
.join(' ');
const currVarOpts = Array.isArray(variableOptions[variable]) ? variableOptions[variable] : [];
const currValueOptionElements = currVarOpts.map((varValue, j) => (react_1.default.createElement("option", { key: `var-${i}-opt-${j}`, value: varValue }, varValue)));
return (react_1.default.createElement("div", { key: `variable-${i}`, className: "d-flex gap-1 mb-1" },
react_1.default.createElement(react_bootstrap_1.Form.Control, { size: "sm", type: "text", disabled: true, value: currVarName, style: { width: 150 } }),
react_1.default.createElement(react_bootstrap_1.Form.Control, { type: "text", size: "sm", placeholder: "Value", value: currVarValue, onChange: (e) => handleSetVariableValue(variable, e.target.value), style: { width: 150 } }),
react_1.default.createElement(react_bootstrap_1.Form.Select, { size: "sm", value: currVarValue, onChange: (e) => handleSetVariableValue(variable, e.target.value), style: { width: 0 } },
react_1.default.createElement("option", { value: "" }),
currValueOptionElements),
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-primary", size: "sm", onClick: () => handleAddVariableOption(variable, currVarValue) },
react_1.default.createElement(fa_1.FaPlus, { className: "mb-1" })),
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-danger", size: "sm", onClick: () => handleDeleteVariableOption(variable, currVarValue) },
react_1.default.createElement(fa_1.FaTrashAlt, { className: "mb-1" }))));
});
const showProcessingAlert = processingRef.current && ((outputs !== null && outputs !== void 0 ? outputs : []).length < currentChunkIndex + 1 || retryingRef.current);

@@ -298,3 +378,5 @@ const selectedPreset = (mergedPresets !== null && mergedPresets !== void 0 ? mergedPresets : {})[selectedPresetName];

selectedPreset.chunkPrefix !== chunkPrefix ||
!!selectedPreset.autoShrink !== !!autoShrinkEnabled;
!!selectedPreset.autoShrink !== !!autoShrinkEnabled ||
JSON.stringify(selectedPreset.variableValues) !== JSON.stringify(variableValues !== null && variableValues !== void 0 ? variableValues : {}) ||
JSON.stringify(selectedPreset.variableOptions) !== JSON.stringify(variableOptions !== null && variableOptions !== void 0 ? variableOptions : {});
const canSave = !!presetName.trim() && hasChanges;

@@ -350,6 +432,8 @@ const configured = !!openAiModel && !!userPrompt;

react_1.default.createElement("div", { className: "d-flex justify-content-between gap-2" },
react_1.default.createElement(react_bootstrap_1.Form.Text, { className: "text-muted" }, "Provide the prompt used to process the text. The input text will be appended to the end of this prompt."),
react_1.default.createElement(react_bootstrap_1.Form.Text, { className: "text-muted" },
"Provide the prompt used to process the text. The input text will be appended to the end of this prompt. You can optionally include variables in double curly braces, like so: ",
`{{Var_Name}}`),
react_1.default.createElement("div", { className: "d-flex align-items-center gap-1 small" },
react_1.default.createElement(react_bootstrap_1.Form.Text, { className: "text-muted my-0" }, "Tokens:"),
react_1.default.createElement(react_bootstrap_1.Badge, { pill: true, bg: "secondary" }, TextUtils_1.TextUtils.getEstimatedTokenCount(userPrompt, averageTokenLength !== null && averageTokenLength !== void 0 ? averageTokenLength : 0))))),
react_1.default.createElement(react_bootstrap_1.Badge, { pill: true, bg: "secondary" }, TextUtils_1.TextUtils.getEstimatedTokenCount(preparedUserPrompt, averageTokenLength !== null && averageTokenLength !== void 0 ? averageTokenLength : 0))))),
react_1.default.createElement(react_bootstrap_1.Accordion, null,

@@ -378,10 +462,10 @@ react_1.default.createElement(react_bootstrap_1.Accordion.Item, { eventKey: "0" },

"), and the max tokens for the model is",
' ', (_c = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _c !== void 0 ? _c : 4000,
' ', (_d = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _d !== void 0 ? _d : 4000,
", each request (chunk) will have",
' ',
Math.ceil(((_d = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _d !== void 0 ? _d : 4000) * requestMaxTokenRatio),
Math.ceil(((_e = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _e !== void 0 ? _e : 4000) * requestMaxTokenRatio),
" tokens max. This would leave about",
' ',
((_e = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _e !== void 0 ? _e : 4000) -
Math.ceil(((_f = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _f !== void 0 ? _f : 4000) * requestMaxTokenRatio),
((_f = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _f !== void 0 ? _f : 4000) -
Math.ceil(((_g = currentOpenAiModelInfo === null || currentOpenAiModelInfo === void 0 ? void 0 : currentOpenAiModelInfo.maxTokens) !== null && _g !== void 0 ? _g : 4000) * requestMaxTokenRatio),
' ',

@@ -405,16 +489,15 @@ "tokens for a meaningful response, per request. For each chunk, we want to make sure there is still a decent amount of tokens left for the response.")),

react_1.default.createElement(fa_1.FaTrashAlt, { className: "mb-1" })))))),
variableElements.length > 0 && (react_1.default.createElement(react_bootstrap_1.Form.Group, { controlId: "variables-group" },
react_1.default.createElement(react_bootstrap_1.Form.Label, { className: "small fw-bold mb-1" }, "Variables:"),
variableElements)),
react_1.default.createElement(react_bootstrap_1.Form.Group, { controlId: "form-group-input-text" },
react_1.default.createElement(react_bootstrap_1.Form.Label, { className: "small fw-bold mb-1" }, "Input Text"),
react_1.default.createElement(react_bootstrap_1.Form.Control, { ref: inputTextFieldRef, as: "textarea", placeholder: "Enter text to process", rows: 8, value: input, onChange: (e) => {
if (autoShrinkEnabled) {
setInput(TextUtils_1.TextUtils.shrinkText(e.target.value).substring(0, exports.CHAR_LIMIT));
}
else {
setInput(e.target.value.substring(0, exports.CHAR_LIMIT));
}
handleSetInput(e.target.value);
}, onFocus: handleInputTextFieldFocus })),
react_1.default.createElement("div", { className: "d-flex justify-content-between align-items-start gap-2" },
react_1.default.createElement("div", { className: "d-flex align-items-center gap-2" },
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-primary", size: "sm", onClick: handlePaste }, "Paste"),
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-danger", size: "sm", onClick: handleClearInput, disabled: !hasInput }, "Clear"),
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-primary", size: "sm", onClick: handleShrink, disabled: !hasInput },
react_1.default.createElement(react_bootstrap_1.Button, { variant: "outline-secondary", size: "sm", onClick: handleShrink, disabled: !hasInput },
react_1.default.createElement("div", { className: "d-flex align-items-center gap-1" }, "Shrink")),

@@ -482,3 +565,3 @@ react_1.default.createElement(react_bootstrap_1.Form.Check, { inline: true, label: "Auto-shrink", className: "user-select-none small mb-0", id: "auto-shrink-checkbox", checked: !!autoShrinkEnabled, onChange: (e) => setAutoShrinkEnabled(e.target.checked) })),

errorAlertElements,
((_g = (outputs !== null && outputs !== void 0 ? outputs : [])) === null || _g === void 0 ? void 0 : _g.length) > 0 && (react_1.default.createElement("h5", { className: "text-center text-muted" },
((_h = (outputs !== null && outputs !== void 0 ? outputs : [])) === null || _h === void 0 ? void 0 : _h.length) > 0 && (react_1.default.createElement("h5", { className: "text-center text-muted" },
"If this project helped you, please",

@@ -485,0 +568,0 @@ ' ',

@@ -11,2 +11,4 @@ export interface Preset {

autoShrink: boolean;
variableValues: Record<string, string>;
variableOptions: Record<string, string[]>;
}

@@ -13,0 +15,0 @@ export declare const defaultPresets: Preset[];

@@ -15,2 +15,4 @@ "use strict";

autoShrink: false,
variableValues: {},
variableOptions: {},
},

@@ -27,4 +29,79 @@ {

autoShrink: false,
variableValues: {},
variableOptions: {},
},
{
name: 'Translate Text',
aiModel: 'gpt-3.5-turbo',
systemPrompt: 'You are a helpful assistant.',
userPrompt: 'Translate the following from {{Start_Language}} to {{End_Language}}:',
averageTokenLength: 4.5,
requestMaxTokenRatio: 0.4,
chunkOverlapWordCount: 0,
chunkPrefix: '',
autoShrink: true,
variableValues: {
'{{Start_Language}}': 'English',
'{{End_Language}}': 'German',
},
variableOptions: {
'{{Start_Language}}': [
'Arabic',
'Bengali',
'Bulgarian',
'Catalan',
'Czech',
'Danish',
'Dutch',
'English',
'Estonian',
'Finnish',
'French',
'German',
'Greek',
'Hindi',
'Hungarian',
'Indonesian',
'Italian',
'Japanese',
'Korean',
'Mandarin Chinese',
'Polish',
'Portuguese',
'Russian',
'Spanish',
'Tamil',
'Turkish',
],
'{{End_Language}}': [
'Arabic',
'Bengali',
'Bulgarian',
'Catalan',
'Czech',
'Danish',
'Dutch',
'English',
'Estonian',
'Finnish',
'French',
'German',
'Greek',
'Hindi',
'Hungarian',
'Indonesian',
'Italian',
'Japanese',
'Korean',
'Mandarin Chinese',
'Polish',
'Portuguese',
'Russian',
'Spanish',
'Tamil',
'Turkish',
],
},
},
{
name: 'YouTube Transcript Formatter',

@@ -39,2 +116,4 @@ aiModel: 'gpt-3.5-turbo',

autoShrink: true,
variableValues: {},
variableOptions: {},
},

@@ -41,0 +120,0 @@ ];

@@ -16,2 +16,4 @@ import { AIModelInfo } from './AIModelInfo';

userPrompt = "userPrompt",
variableValues = "variableValues",
variableOptions = "variableOptions",
input = "input",

@@ -37,2 +39,4 @@ outputs = "outputs",

userPrompt: string;
variableValues: {};
variableOptions: {};
input: string;

@@ -58,2 +62,4 @@ outputs: never[];

userPrompt: import("react-storage-complete").StorageState<any>;
variableValues: import("react-storage-complete").StorageState<any>;
variableOptions: import("react-storage-complete").StorageState<any>;
input: import("react-storage-complete").StorageState<any>;

@@ -60,0 +66,0 @@ outputs: import("react-storage-complete").StorageState<string[]>;

@@ -25,2 +25,4 @@ "use strict";

LocalSettingsKeys["userPrompt"] = "userPrompt";
LocalSettingsKeys["variableValues"] = "variableValues";
LocalSettingsKeys["variableOptions"] = "variableOptions";
LocalSettingsKeys["input"] = "input";

@@ -46,2 +48,4 @@ LocalSettingsKeys["outputs"] = "outputs";

[LocalSettingsKeys.userPrompt]: '',
[LocalSettingsKeys.variableValues]: {},
[LocalSettingsKeys.variableOptions]: {},
[LocalSettingsKeys.input]: '',

@@ -73,2 +77,4 @@ [LocalSettingsKeys.outputs]: [],

[LocalSettingsKeys.userPrompt]: (0, react_storage_complete_1.useLocalStorage)(LocalSettingsKeys.userPrompt, exports.LocalSettingsDefaults[LocalSettingsKeys.userPrompt], storageOptions),
[LocalSettingsKeys.variableValues]: (0, react_storage_complete_1.useLocalStorage)(LocalSettingsKeys.variableValues, exports.LocalSettingsDefaults[LocalSettingsKeys.variableValues], storageOptions),
[LocalSettingsKeys.variableOptions]: (0, react_storage_complete_1.useLocalStorage)(LocalSettingsKeys.variableOptions, exports.LocalSettingsDefaults[LocalSettingsKeys.variableOptions], storageOptions),
[LocalSettingsKeys.input]: (0, react_storage_complete_1.useLocalStorage)(LocalSettingsKeys.input, exports.LocalSettingsDefaults[LocalSettingsKeys.input], storageOptions),

@@ -75,0 +81,0 @@ [LocalSettingsKeys.outputs]: (0, react_storage_complete_1.useLocalStorage)(LocalSettingsKeys.outputs, exports.LocalSettingsDefaults[LocalSettingsKeys.outputs], storageOptions),

2

package.json
{
"name": "ai-text-processor",
"version": "1.0.8",
"version": "1.0.9",
"author": "Justin Mahar <contact@justinmahar.com>",

@@ -5,0 +5,0 @@ "description": "Utility for processing text using AI. Summarize, analyze, extract, translate, format, and more! ✨",