Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlbridge/core

Package Overview
Dependencies
Maintainers
1
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlbridge/core - npm Package Compare versions

Comparing version 0.2.5 to 0.2.6

2

cjs/nlbridge-core.js

@@ -1,1 +0,1 @@

"use strict";var t=require("openai");const e=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],o=t=>e.includes(t),n={};const s=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},c=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],a="gpt-3.5-turbo",r=t=>{console.warn(""+t+"")},i={chat:async(e,o)=>{const n=new t,c=[];if(o.getContextData){const t=await o.getContextData();t&&c.push({role:"system",content:s(t)})}c.push({role:"user",content:e});const r=await n.chat.completions.create({stream:!1,model:o.config?.chatModel||a,messages:c});return r.choices&&0!==r.choices.length&&r.choices[0].message.content?{success:!0,message:r.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(e,o,n)=>{const s=new t,c=[];c.push({role:"user",content:e});try{let t=(await s.chat.completions.create({stream:!0,model:n.config?.chatModel||a,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const n=e.value;if("stop"===(n.choices?.length>0?n.choices[0].finish_reason:void 0))break;const s=n.choices?.[0].delta.content;"string"==typeof s?o.next(s):r(`Undecodable message - value: ${n}`),e=await t.next()}o.complete()}catch(t){r(`Error: ${t}`)}},"set-context":async(t,e)=>{let o;do{o=Math.random().toString(36).substring(2,14),n[o]&&(o="")}while(!o);return n[o]={...t},{success:!0,contextId:o}},"update-context":async(t,e,o)=>n[t]?(n[t]={...n[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,o)=>{if(!t||!n[t])return{success:!1,error:"Context not found"};const s=n[t];return e?s[e]?{success:!0,data:s[e]}:{success:!1,error:"Item not found"}:{success:!0,data:s}},"clear-context":async(t,e)=>t&&n[t]?(n[t]=void 0,delete n[t],{success:!0}):{success:!1,error:"Context not found"}};exports.actionIds=e,exports.asOpenAiChatModel=t=>{if(c.includes(t))return t},exports.asValidActionId=t=>o(t)?t:void 0,exports.createRuntime=(t,e)=>({run:(o,...n)=>{const s=t[o];if(!s)throw new Error("Unsupported action");const c=Array.isArray(n)&&n.length>0?n.slice(0,-1):[],a=Array.isArray(n)&&n.length>0?n[n.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const o=await t["get-context-data"](a.contextId,e,r);if(o&&o.success)return o.data}),s(...c,r)}}),exports.defaultHandlers=i,exports.isValidActionId=o,exports.openAiDefaultChatModel=a,exports.supportedOpenAiChatModels=c;
"use strict";var t=require("openai");const e=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],o=t=>e.includes(t),n={};const s=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},c=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],a="gpt-3.5-turbo",r=t=>{console.warn(""+t+"")},i={chat:async(e,o)=>{const n=new t,c=[];if(o.getContextData){const t=await o.getContextData();t&&c.push({role:"system",content:s(t)})}c.push({role:"user",content:e});const r=await n.chat.completions.create({stream:!1,model:o.config?.chatModel||a,messages:c});return r.choices&&0!==r.choices.length&&r.choices[0].message.content?{success:!0,message:r.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(e,o,n)=>{const s=new t,c=[];c.push({role:"user",content:e});try{let t=(await s.chat.completions.create({stream:!0,model:n.config?.chatModel||a,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const n=e.value;if("stop"===(n.choices?.length>0?n.choices[0].finish_reason:void 0))break;const s=n.choices?.[0].delta.content;"string"==typeof s?o.next(s):r(`Undecodable message - value: ${n}`),e=await t.next()}o.complete()}catch(t){r(`Error: ${t}`)}},"set-context":async(t,e)=>{let o;do{o=Math.random().toString(36).substring(2,14),n[o]&&(o="")}while(!o);return n[o]={...t},{success:!0,contextId:o}},"update-context":async(t,e,o)=>n[t]?(n[t]={...n[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,o)=>{if(!t||!n[t])return{success:!1,error:"Context not found"};const s=n[t];return e?s[e]?{success:!0,data:s[e]}:{success:!1,error:"Item not found"}:{success:!0,data:s}},"clear-context":async(t,e)=>t&&n[t]?(n[t]=void 0,delete n[t],{success:!0}):{success:!1,error:"Context not found"}};exports.actionIds=e,exports.asOpenAiChatModel=t=>{if(c.includes(t))return t},exports.asValidActionId=t=>o(t)?t:void 0,exports.createRuntime=(t,e)=>({run:(o,...n)=>{const s=t[o];if(!s)throw new Error("Unsupported action");const c=Array.isArray(n)&&n.length>0?n.slice(0,-1):[],a=Array.isArray(n)&&n.length>0?n[n.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const o=await t["get-context-data"](a.contextId,e,r);if(o&&o.success)return o.data}),s(...c,r)}}),exports.defaultActionHandlers=i,exports.isValidActionId=o,exports.openAiDefaultChatModel=a,exports.supportedOpenAiChatModels=c;

@@ -1,1 +0,1 @@

import t from"openai";const e=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],n=t=>e.includes(t),o=t=>n(t)?t:void 0,s={};const c=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],r="gpt-3.5-turbo",i=t=>{if(a.includes(t))return t},u=t=>{console.warn(""+t+"")},g={chat:async(e,n)=>{const o=new t,s=[];if(n.getContextData){const t=await n.getContextData();t&&s.push({role:"system",content:c(t)})}s.push({role:"user",content:e});const a=await o.chat.completions.create({stream:!1,model:n.config?.chatModel||r,messages:s});return a.choices&&0!==a.choices.length&&a.choices[0].message.content?{success:!0,message:a.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(e,n,o)=>{const s=new t,c=[];c.push({role:"user",content:e});try{let t=(await s.chat.completions.create({stream:!0,model:o.config?.chatModel||r,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const o=e.value;if("stop"===(o.choices?.length>0?o.choices[0].finish_reason:void 0))break;const s=o.choices?.[0].delta.content;"string"==typeof s?n.next(s):u(`Undecodable message - value: ${o}`),e=await t.next()}n.complete()}catch(t){u(`Error: ${t}`)}},"set-context":async(t,e)=>{let n;do{n=Math.random().toString(36).substring(2,14),s[n]&&(n="")}while(!n);return s[n]={...t},{success:!0,contextId:n}},"update-context":async(t,e,n)=>s[t]?(s[t]={...s[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,n)=>{if(!t||!s[t])return{success:!1,error:"Context not found"};const o=s[t];return e?o[e]?{success:!0,data:o[e]}:{success:!1,error:"Item not found"}:{success:!0,data:o}},"clear-context":async(t,e)=>t&&s[t]?(s[t]=void 0,delete s[t],{success:!0}):{success:!1,error:"Context not found"}},p=(t,e)=>({run:(n,...o)=>{const s=t[n];if(!s)throw new Error("Unsupported action");const c=Array.isArray(o)&&o.length>0?o.slice(0,-1):[],a=Array.isArray(o)&&o.length>0?o[o.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const n=await t["get-context-data"](a.contextId,e,r);if(n&&n.success)return n.data}),s(...c,r)}});export{e as actionIds,i as asOpenAiChatModel,o as asValidActionId,p as createRuntime,g as defaultHandlers,n as isValidActionId,r as openAiDefaultChatModel,a as supportedOpenAiChatModels};
import t from"openai";const e=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],n=t=>e.includes(t),o=t=>n(t)?t:void 0,s={};const c=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],r="gpt-3.5-turbo",i=t=>{if(a.includes(t))return t},u=t=>{console.warn(""+t+"")},g={chat:async(e,n)=>{const o=new t,s=[];if(n.getContextData){const t=await n.getContextData();t&&s.push({role:"system",content:c(t)})}s.push({role:"user",content:e});const a=await o.chat.completions.create({stream:!1,model:n.config?.chatModel||r,messages:s});return a.choices&&0!==a.choices.length&&a.choices[0].message.content?{success:!0,message:a.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(e,n,o)=>{const s=new t,c=[];c.push({role:"user",content:e});try{let t=(await s.chat.completions.create({stream:!0,model:o.config?.chatModel||r,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const o=e.value;if("stop"===(o.choices?.length>0?o.choices[0].finish_reason:void 0))break;const s=o.choices?.[0].delta.content;"string"==typeof s?n.next(s):u(`Undecodable message - value: ${o}`),e=await t.next()}n.complete()}catch(t){u(`Error: ${t}`)}},"set-context":async(t,e)=>{let n;do{n=Math.random().toString(36).substring(2,14),s[n]&&(n="")}while(!n);return s[n]={...t},{success:!0,contextId:n}},"update-context":async(t,e,n)=>s[t]?(s[t]={...s[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,n)=>{if(!t||!s[t])return{success:!1,error:"Context not found"};const o=s[t];return e?o[e]?{success:!0,data:o[e]}:{success:!1,error:"Item not found"}:{success:!0,data:o}},"clear-context":async(t,e)=>t&&s[t]?(s[t]=void 0,delete s[t],{success:!0}):{success:!1,error:"Context not found"}},p=(t,e)=>({run:(n,...o)=>{const s=t[n];if(!s)throw new Error("Unsupported action");const c=Array.isArray(o)&&o.length>0?o.slice(0,-1):[],a=Array.isArray(o)&&o.length>0?o[o.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const n=await t["get-context-data"](a.contextId,e,r);if(n&&n.success)return n.data}),s(...c,r)}});export{e as actionIds,i as asOpenAiChatModel,o as asValidActionId,p as createRuntime,g as defaultActionHandlers,n as isValidActionId,r as openAiDefaultChatModel,a as supportedOpenAiChatModels};

@@ -16,3 +16,3 @@ declare const actionsList: string[];

type Extras<RuntimeConfig = any> = {
type ActionExtras<RuntimeConfig = any> = {
contextId?: string;

@@ -31,3 +31,3 @@ getContextData?: (itemId?: string) => Promise<ContextData | undefined>;

};
type ChatHandler = (prompt: string, extras: Extras) => Promise<ChatResult>;
type ChatHandler = (prompt: string, extras: ActionExtras) => Promise<ChatResult>;

@@ -39,3 +39,3 @@ type ChatStreamObserver = {

};
type ChatStreamHandler = (prompt: string, observer: ChatStreamObserver, extras: Extras) => void;
type ChatStreamHandler = (prompt: string, observer: ChatStreamObserver, extras: ActionExtras) => void;

@@ -48,3 +48,3 @@ type ClearContextResult = {

};
type ClearContextHandler = (contextId: string, extras: Extras) => Promise<ClearContextResult>;
type ClearContextHandler = (contextId: string, extras: ActionExtras) => Promise<ClearContextResult>;

@@ -58,3 +58,3 @@ type GetContextDataResult = {

};
type GetContextDataHandler = (contextId: string, itemId: string | undefined, extras: Extras) => Promise<GetContextDataResult>;
type GetContextDataHandler = (contextId: string, itemId: string | undefined, extras: ActionExtras) => Promise<GetContextDataResult>;

@@ -68,3 +68,3 @@ type SetContextResult = {

};
type SetContextHandler = (initialData: Record<string, any> | undefined, extras: Extras) => Promise<SetContextResult>;
type SetContextHandler = (initialData: Record<string, any> | undefined, extras: ActionExtras) => Promise<SetContextResult>;

@@ -77,3 +77,3 @@ type UpdateContextResult = {

};
type UpdateContextHandler = (contextId: string, data: ContextData, extras: Extras) => Promise<UpdateContextResult>;
type UpdateContextHandler = (contextId: string, data: ContextData, extras: ActionExtras) => Promise<UpdateContextResult>;

@@ -90,3 +90,3 @@ type ActionHandlerConfig = {

type RunAction = <ActionId extends keyof ActionHandlerConfig>(action: ActionId, ...parameters: Parameters<ActionHandlerConfig[ActionId]>) => ReturnType<ActionHandlerConfig[ActionId]>;
declare const createRuntime: <RuntimeConfig = any>(actionCallbacks: ActionHandlerConfig, config?: RuntimeConfig | undefined) => {
declare const createRuntime: <RuntimeConfig = any>(actionHandlers: ActionHandlerConfig, config?: RuntimeConfig | undefined) => {
run: RunAction;

@@ -107,4 +107,4 @@ };

declare const defaultHandlers: ActionHandlerConfig;
declare const defaultActionHandlers: ActionHandlerConfig;
export { type ActionId, type ChatHandler, type ChatStreamHandler, type ChatStreamObserver, type OpenAiChatModel, type OpenAiRuntimeConfig, type RunAction, actionIds, asOpenAiChatModel, asValidActionId, createRuntime, defaultHandlers, isValidActionId, openAiDefaultChatModel, supportedOpenAiChatModels };
export { type ActionExtras, type ActionHandlerConfig, type ActionId, type ChatHandler, type ChatResult, type ChatStreamHandler, type ChatStreamObserver, type ClearContextHandler, type GetContextDataHandler, type OpenAiChatModel, type OpenAiRuntimeConfig, type RunAction, type SetContextHandler, type UpdateContextHandler, actionIds, asOpenAiChatModel, asValidActionId, createRuntime, defaultActionHandlers, isValidActionId, openAiDefaultChatModel, supportedOpenAiChatModels };
{
"name": "@nlbridge/core",
"version": "0.2.5",
"version": "0.2.6",
"description": "The core library content for @nlbridge",

@@ -5,0 +5,0 @@ "keywords": [

@@ -1,1 +0,1 @@

!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self)["@nlbridge/core"]={},t.OpenAI)}(this,(function(t,e){"use strict";const n=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],o=t=>n.includes(t),s={};const c=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],r="gpt-3.5-turbo",i=t=>{console.warn(""+t+"")},u={chat:async(t,n)=>{const o=new e,s=[];if(n.getContextData){const t=await n.getContextData();t&&s.push({role:"system",content:c(t)})}s.push({role:"user",content:t});const a=await o.chat.completions.create({stream:!1,model:n.config?.chatModel||r,messages:s});return a.choices&&0!==a.choices.length&&a.choices[0].message.content?{success:!0,message:a.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(t,n,o)=>{const s=new e,c=[];c.push({role:"user",content:t});try{let t=(await s.chat.completions.create({stream:!0,model:o.config?.chatModel||r,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const o=e.value;if("stop"===(o.choices?.length>0?o.choices[0].finish_reason:void 0))break;const s=o.choices?.[0].delta.content;"string"==typeof s?n.next(s):i(`Undecodable message - value: ${o}`),e=await t.next()}n.complete()}catch(t){i(`Error: ${t}`)}},"set-context":async(t,e)=>{let n;do{n=Math.random().toString(36).substring(2,14),s[n]&&(n="")}while(!n);return s[n]={...t},{success:!0,contextId:n}},"update-context":async(t,e,n)=>s[t]?(s[t]={...s[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,n)=>{if(!t||!s[t])return{success:!1,error:"Context not found"};const o=s[t];return e?o[e]?{success:!0,data:o[e]}:{success:!1,error:"Item not found"}:{success:!0,data:o}},"clear-context":async(t,e)=>t&&s[t]?(s[t]=void 0,delete s[t],{success:!0}):{success:!1,error:"Context not found"}};t.actionIds=n,t.asOpenAiChatModel=t=>{if(a.includes(t))return t},t.asValidActionId=t=>o(t)?t:void 0,t.createRuntime=(t,e)=>({run:(n,...o)=>{const s=t[n];if(!s)throw new Error("Unsupported action");const c=Array.isArray(o)&&o.length>0?o.slice(0,-1):[],a=Array.isArray(o)&&o.length>0?o[o.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const n=await t["get-context-data"](a.contextId,e,r);if(n&&n.success)return n.data}),s(...c,r)}}),t.defaultHandlers=u,t.isValidActionId=o,t.openAiDefaultChatModel=r,t.supportedOpenAiChatModels=a}));
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],e):e((t="undefined"!=typeof globalThis?globalThis:t||self)["@nlbridge/core"]={},t.OpenAI)}(this,(function(t,e){"use strict";const n=["chat","chat-stream","set-context","update-context","clear-context","get-context-data"],o=t=>n.includes(t),s={};const c=t=>{const e=JSON.stringify(t,null,2);return"The conversation is taking as part while user is using a web or mobile application.\nBelow is a JSON object that contains contextual information about user's session.\nThis context data should be taken into account when generating a response.\nYou should not display JSON code from the context, but rather use it to\ngenerate a response that is relevant to the user based on that context.\n\nContext JSON object:\n\n{{context}}\n".replace("{{context}}","\n\n"+e+"\n\n")},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],r="gpt-3.5-turbo",i=t=>{console.warn(""+t+"")},u={chat:async(t,n)=>{const o=new e,s=[];if(n.getContextData){const t=await n.getContextData();t&&s.push({role:"system",content:c(t)})}s.push({role:"user",content:t});const a=await o.chat.completions.create({stream:!1,model:n.config?.chatModel||r,messages:s});return a.choices&&0!==a.choices.length&&a.choices[0].message.content?{success:!0,message:a.choices[0].message.content}:{success:!1,error:"No response from OpenAI."}},"chat-stream":async(t,n,o)=>{const s=new e,c=[];c.push({role:"user",content:t});try{let t=(await s.chat.completions.create({stream:!0,model:o.config?.chatModel||r,messages:c}))[Symbol.asyncIterator](),e=await t.next();for(;!e.done;){const o=e.value;if("stop"===(o.choices?.length>0?o.choices[0].finish_reason:void 0))break;const s=o.choices?.[0].delta.content;"string"==typeof s?n.next(s):i(`Undecodable message - value: ${o}`),e=await t.next()}n.complete()}catch(t){i(`Error: ${t}`)}},"set-context":async(t,e)=>{let n;do{n=Math.random().toString(36).substring(2,14),s[n]&&(n="")}while(!n);return s[n]={...t},{success:!0,contextId:n}},"update-context":async(t,e,n)=>s[t]?(s[t]={...s[t],...e},{success:!0}):{success:!1,error:"Context not found"},"get-context-data":async(t,e,n)=>{if(!t||!s[t])return{success:!1,error:"Context not found"};const o=s[t];return e?o[e]?{success:!0,data:o[e]}:{success:!1,error:"Item not found"}:{success:!0,data:o}},"clear-context":async(t,e)=>t&&s[t]?(s[t]=void 0,delete s[t],{success:!0}):{success:!1,error:"Context not found"}};t.actionIds=n,t.asOpenAiChatModel=t=>{if(a.includes(t))return t},t.asValidActionId=t=>o(t)?t:void 0,t.createRuntime=(t,e)=>({run:(n,...o)=>{const s=t[n];if(!s)throw new Error("Unsupported action");const c=Array.isArray(o)&&o.length>0?o.slice(0,-1):[],a=Array.isArray(o)&&o.length>0?o[o.length-1]:{},r="object"==typeof a&&null!==a?{...a,config:e}:{config:e};return a.contextId&&!a.getContextData&&(r.getContextData=async e=>{const n=await t["get-context-data"](a.contextId,e,r);if(n&&n.success)return n.data}),s(...c,r)}}),t.defaultActionHandlers=u,t.isValidActionId=o,t.openAiDefaultChatModel=r,t.supportedOpenAiChatModels=a}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc