Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlbridge/core

Package Overview
Dependencies
Maintainers
1
Versions
17
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlbridge/core - npm Package Compare versions

Comparing version 0.1.3-beta to 0.2.0-beta

2

cjs/nlbridge-core.js

@@ -1,1 +0,1 @@

"use strict";var e=require("openai");const o=["respond-to-prompt"],t=e=>o.includes(e);exports.actionRunner=(o,t)=>async(o,s)=>{if("respond-to-prompt"===o){const{apiKey:o}=t,r=new e({apiKey:o}),p="gpt-3.5-turbo",n=[];n.push({role:"user",content:s.message});const i=await r.chat.completions.create({stream:!1,model:p,messages:n});if(!i.choices||0===i.choices.length)throw new Error("No response from OpenAI.");return i.choices[0].message.content}},exports.actions=o,exports.asValidAction=e=>t(e)?e:void 0,exports.execute=(e,o)=>"respond-to-prompt"===e?Promise.resolve({type:"respond-to-prompt",success:!0,payload:{message:"Hi! I executed the action respond-to-prompt!"}}):Promise.reject("Invalid action"),exports.isValidAction=t,exports.isValidPayloadForAction=(e,o)=>"respond-to-prompt"===e&&(e=>"object"==typeof e&&null!==e&&"string"==typeof e.message)(o);
"use strict";var e=require("openai");const t=["chat","chat-stream"],o=e=>t.includes(e),s=e=>"object"==typeof e&&null!==e&&"string"==typeof e.message,r=s,n=console.warn,c=(t,o)=>{const{apiKey:s,chatModel:r}=o,c=new e({apiKey:s});return(e,t)=>{if("chat"===e)return((e,t)=>async(o,s)=>{const r=[];r.push({role:"user",content:s.message});const n=await e.chat.completions.create({stream:!1,model:t,messages:r});return n.choices&&0!==n.choices.length&&n.choices[0].message.content?{success:!0,result:{message:n.choices[0].message.content}}:{success:!1,error:"No response from OpenAI."}})(c,r)("chat",t);if("chat-stream"===e)return((e,t)=>async(o,s)=>{const r=[];let c,a,i;return r.push({role:"user",content:s.message}),e.chat.completions.create({stream:!0,model:t,messages:r}).then((async e=>{let t=e[Symbol.asyncIterator](),o=await t.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;if(void 0===e.choices||0===e.choices.length||void 0===e.choices[0].delta){n("No response from OpenAI.");continue}const s=e.choices?.[0].delta.content;"string"==typeof s?c&&c(s):(n("Undecodable message"),n(e)),o=await t.next()}a&&(a(),c=void 0,a=void 0,i=void 0)})).catch((e=>{n(e),i&&(i(`${e}`),c=void 0,a=void 0,i=void 0)})),{success:!0,result:{onChunkReceived:e=>{c=e},onComplete:e=>{a=e},onError:e=>{i=e}}}})(c,r)("chat-stream",t);throw new Error("Unsupported action")}},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"];exports.actionRunner=(e,t)=>{if("openai"!==e)throw new Error("Unsupported API");return c(0,t)},exports.actions=t,exports.asOpenAiChatModel=e=>{if(a.includes(e))return e},exports.asValidAction=e=>o(e)?e:void 0,exports.isValidAction=o,exports.isValidPayloadForAction=(e,t)=>{switch(e){case"chat":return s(t);case"chat-stream":return r(t);default:return!1}},exports.openAiDefaultChatModel="gpt-3.5-turbo";

@@ -1,1 +0,1 @@

import e from"openai";const o=["respond-to-prompt"],t=e=>o.includes(e),s=e=>t(e)?e:void 0,r=(e,o)=>"respond-to-prompt"===e&&(e=>"object"==typeof e&&null!==e&&"string"==typeof e.message)(o),p=(e,o)=>"respond-to-prompt"===e?Promise.resolve({type:"respond-to-prompt",success:!0,payload:{message:"Hi! I executed the action respond-to-prompt!"}}):Promise.reject("Invalid action"),n=(o,t)=>async(o,s)=>{if("respond-to-prompt"===o){const{apiKey:o}=t,r=new e({apiKey:o}),p="gpt-3.5-turbo",n=[];n.push({role:"user",content:s.message});const c=await r.chat.completions.create({stream:!1,model:p,messages:n});if(!c.choices||0===c.choices.length)throw new Error("No response from OpenAI.");return c.choices[0].message.content}};export{n as actionRunner,o as actions,s as asValidAction,p as execute,t as isValidAction,r as isValidPayloadForAction};
import e from"openai";const t=["chat","chat-stream"],o=e=>t.includes(e),s=e=>o(e)?e:void 0,r=e=>"object"==typeof e&&null!==e&&"string"==typeof e.message,c=r,n=(e,t)=>{switch(e){case"chat":return r(t);case"chat-stream":return c(t);default:return!1}},a=console.warn,i=(t,o)=>{const{apiKey:s,chatModel:r}=o,c=new e({apiKey:s});return(e,t)=>{if("chat"===e)return((e,t)=>async(o,s)=>{const r=[];r.push({role:"user",content:s.message});const c=await e.chat.completions.create({stream:!1,model:t,messages:r});return c.choices&&0!==c.choices.length&&c.choices[0].message.content?{success:!0,result:{message:c.choices[0].message.content}}:{success:!1,error:"No response from OpenAI."}})(c,r)("chat",t);if("chat-stream"===e)return((e,t)=>async(o,s)=>{const r=[];let c,n,i;return r.push({role:"user",content:s.message}),e.chat.completions.create({stream:!0,model:t,messages:r}).then((async e=>{let t=e[Symbol.asyncIterator](),o=await t.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;if(void 0===e.choices||0===e.choices.length||void 0===e.choices[0].delta){a("No response from OpenAI.");continue}const s=e.choices?.[0].delta.content;"string"==typeof s?c&&c(s):(a("Undecodable message"),a(e)),o=await t.next()}n&&(n(),c=void 0,n=void 0,i=void 0)})).catch((e=>{a(e),i&&(i(`${e}`),c=void 0,n=void 0,i=void 0)})),{success:!0,result:{onChunkReceived:e=>{c=e},onComplete:e=>{n=e},onError:e=>{i=e}}}})(c,r)("chat-stream",t);throw new Error("Unsupported action")}},p=(e,t)=>{if("openai"!==e)throw new Error("Unsupported API");return i(0,t)},u=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"],h="gpt-3.5-turbo",g=e=>{if(u.includes(e))return e};export{p as actionRunner,t as actions,g as asOpenAiChatModel,s as asValidAction,o as isValidAction,n as isValidPayloadForAction,h as openAiDefaultChatModel};

@@ -1,2 +0,2 @@

type ActionId = 'respond-to-prompt';
type ActionId = 'chat' | 'chat-stream';
declare const actions: ReadonlyArray<ActionId>;

@@ -9,8 +9,42 @@

declare const execute: (action: ActionId, payload: any) => Promise<any>;
type ChatStreamActionPayload = {
message: string;
};
type ChatStreamActionRunner = (action: 'chat-stream', payload: ChatStreamActionPayload) => Promise<{
success: true;
result: {
onChunkReceived: (callback: (chunk: string) => void) => void;
onComplete: (callback: () => void) => void;
onError: (callback: (error: string) => void) => void;
};
} | {
success: false;
error: string;
}>;
declare const actionRunner: (api: 'openai', config: {
type ChatActionPayload = {
message: string;
};
type ChatActionRunner = (action: 'chat', payload: ChatActionPayload) => Promise<{
success: true;
result: {
message: string;
};
} | {
success: false;
error: string;
}>;
type OpenAiConfig = {
apiKey: string;
}) => (action: ActionId, payload: any) => Promise<string | null | undefined>;
chatModel: OpenAiChatModel;
};
declare const supportedOpenAiChatModels: string[];
type OpenAiChatModel = typeof supportedOpenAiChatModels[number];
declare const openAiDefaultChatModel: OpenAiChatModel;
export { type ActionId, actionRunner, actions, asValidAction, execute, isValidAction, isValidPayloadForAction };
declare const actionRunner: (api: 'openai', config: OpenAiConfig) => ChatActionRunner | ChatStreamActionRunner;
declare const asOpenAiChatModel: (value: any) => OpenAiChatModel | undefined;
export { type ActionId, type OpenAiChatModel, type OpenAiConfig, actionRunner, actions, asOpenAiChatModel, asValidAction, isValidAction, isValidPayloadForAction, openAiDefaultChatModel };
{
"name": "@nlbridge/core",
"version": "0.1.3-beta",
"version": "0.2.0-beta",
"description": "The core library content for @nlbridge",

@@ -5,0 +5,0 @@ "keywords": [

@@ -1,1 +0,18 @@

# nlbridge core
# `nlbridge` Express.js 🌲💬⚙️
![Free And Open Source](https://img.shields.io/badge/Free%20%26%20Open%20Source-1ccb61)
## About `nlbridge`
`@lbridge` is a Node.js library that provides utilities, middleware, and a development server for **building APIs
powered by large language models**.
This package `@nlbridge/core` provides utilities, types, and functions for building custom APIs that connect to large
language models. It's designed to easily integrate with [`nlux`](https://nlux.ai) (the conversational UI React and JS
library), but it can also be used independently with any server or client.
## `nlbridge` with Express.js
To use the `nlbridge` with Express.js, you can use the [@nlbridge/express]() package. It provides an Express.js
middleware
for `nlbridge` and it offers an simple and powerful way to build custom APIs that connect to large language models.

@@ -1,1 +0,1 @@

!function(e,o){"object"==typeof exports&&"undefined"!=typeof module?o(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],o):o((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlbridge/core"]={},e.OpenAI)}(this,(function(e,o){"use strict";const t=["respond-to-prompt"],n=e=>t.includes(e);e.actionRunner=(e,t)=>async(e,n)=>{if("respond-to-prompt"===e){const{apiKey:e}=t,s=new o({apiKey:e}),i="gpt-3.5-turbo",r=[];r.push({role:"user",content:n.message});const p=await s.chat.completions.create({stream:!1,model:i,messages:r});if(!p.choices||0===p.choices.length)throw new Error("No response from OpenAI.");return p.choices[0].message.content}},e.actions=t,e.asValidAction=e=>n(e)?e:void 0,e.execute=(e,o)=>"respond-to-prompt"===e?Promise.resolve({type:"respond-to-prompt",success:!0,payload:{message:"Hi! I executed the action respond-to-prompt!"}}):Promise.reject("Invalid action"),e.isValidAction=n,e.isValidPayloadForAction=(e,o)=>"respond-to-prompt"===e&&(e=>"object"==typeof e&&null!==e&&"string"==typeof e.message)(o)}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlbridge/core"]={},e.OpenAI)}(this,(function(e,t){"use strict";const o=["chat","chat-stream"],s=e=>o.includes(e),n=e=>"object"==typeof e&&null!==e&&"string"==typeof e.message,r=n,c=console.warn,i=(e,o)=>{const{apiKey:s,chatModel:n}=o,r=new t({apiKey:s});return(e,t)=>{if("chat"===e)return((e,t)=>async(o,s)=>{const n=[];n.push({role:"user",content:s.message});const r=await e.chat.completions.create({stream:!1,model:t,messages:n});return r.choices&&0!==r.choices.length&&r.choices[0].message.content?{success:!0,result:{message:r.choices[0].message.content}}:{success:!1,error:"No response from OpenAI."}})(r,n)("chat",t);if("chat-stream"===e)return((e,t)=>async(o,s)=>{const n=[];let r,i,a;return n.push({role:"user",content:s.message}),e.chat.completions.create({stream:!0,model:t,messages:n}).then((async e=>{let t=e[Symbol.asyncIterator](),o=await t.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;if(void 0===e.choices||0===e.choices.length||void 0===e.choices[0].delta){c("No response from OpenAI.");continue}const s=e.choices?.[0].delta.content;"string"==typeof s?r&&r(s):(c("Undecodable message"),c(e)),o=await t.next()}i&&(i(),r=void 0,i=void 0,a=void 0)})).catch((e=>{c(e),a&&(a(`${e}`),r=void 0,i=void 0,a=void 0)})),{success:!0,result:{onChunkReceived:e=>{r=e},onComplete:e=>{i=e},onError:e=>{a=e}}}})(r,n)("chat-stream",t);throw new Error("Unsupported action")}},a=["gpt-4-0125-preview","gpt-4-turbo-preview","gpt-4-1106-preview","gpt-4-vision-preview","gpt-4","gpt-4-0314","gpt-4-0613","gpt-4-32k","gpt-4-32k-0314","gpt-4-32k-0613","gpt-3.5-turbo","gpt-3.5-turbo-16k","gpt-3.5-turbo-0301","gpt-3.5-turbo-0613","gpt-3.5-turbo-1106","gpt-3.5-turbo-0125","gpt-3.5-turbo-16k-0613"];e.actionRunner=(e,t)=>{if("openai"!==e)throw new Error("Unsupported API");return i(0,t)},e.actions=o,e.asOpenAiChatModel=e=>{if(a.includes(e))return e},e.asValidAction=e=>s(e)?e:void 0,e.isValidAction=s,e.isValidPayloadForAction=(e,t)=>{switch(e){case"chat":return n(t);case"chat-stream":return r(t);default:return!1}},e.openAiDefaultChatModel="gpt-3.5-turbo"}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc