Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlux/openai

Package Overview
Dependencies
Maintainers
1
Versions
203
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlux/openai - npm Package Compare versions

Comparing version 0.7.3 to 0.8.0

2

cjs/openai.js

@@ -1,1 +0,1 @@

"use strict";var e=require("@nlux/nlux"),t=require("openai");const s="stream",a=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},r=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),i=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class n{constructor({systemMessage:a,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??s,this.model=i??"gpt-4",this.openai=new t({apiKey:r,dangerouslyAllowBrowser:!0}),a&&(this.systemMessage=a),e.warn('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return i}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends n{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}async fetchText(t){const s=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];s.push({role:"user",content:t});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:s});return this.decode(e)}catch(t){throw e.warn("Error while making API call to OpenAI"),e.warn(t),new e.NluxUsageError({source:this.constructor.name,message:t?.message||"Error while making API call to OpenAI",exceptionId:a(t)??void 0})}}streamText(t,s){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class c extends n{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return r}fetchText(t){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(t,s){const r=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];r.push({role:"user",content:t}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async t=>{let a=t[Symbol.asyncIterator](),r=await a.next();for(;!r.done;){const t=r.value;if("stop"===(t.choices?.length>0?t.choices[0].finish_reason:void 0))break;const o=await this.decode(t);void 0!==o?s.next(o):(e.warn("Undecodable message"),e.warn(t)),r=await a.next()}s.complete()})).catch((t=>{e.warn(t),s.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:a(t)??void 0}))}))}}class d{constructor(e){this.apiKey=null,this.dataTransferMode=s,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const t={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new c(t):new l(t)}withApiKey(t){if(this.setApiKeyCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withDataTransferMode(t){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=t,this.setStreamOrFetchCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}withSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.OpenAiFetchAdapter=l,exports.OpenAiStreamingAdapter=c,exports.createAdapter=()=>new d;
"use strict";var e=require("@nlux/core"),t=require("openai");const s="stream",a=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},r=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),i=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class n{constructor({systemMessage:a,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??s,this.model=i??"gpt-4",this.openai=new t({apiKey:r,dangerouslyAllowBrowser:!0}),a&&(this.systemMessage=a),e.warn('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return i}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends n{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}async fetchText(t){const s=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];s.push({role:"user",content:t});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:s});return this.decode(e)}catch(t){throw e.warn("Error while making API call to OpenAI"),e.warn(t),new e.NluxUsageError({source:this.constructor.name,message:t?.message||"Error while making API call to OpenAI",exceptionId:a(t)??void 0})}}streamText(t,s){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class c extends n{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return r}fetchText(t){throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(t,s){const r=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];r.push({role:"user",content:t}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async t=>{let a=t[Symbol.asyncIterator](),r=await a.next();for(;!r.done;){const t=r.value;if("stop"===(t.choices?.length>0?t.choices[0].finish_reason:void 0))break;const o=await this.decode(t);void 0!==o?s.next(o):(e.warn("Undecodable message"),e.warn(t)),r=await a.next()}s.complete()})).catch((t=>{e.warn(t),s.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:a(t)??void 0}))}))}}class d{constructor(e){this.apiKey=null,this.dataTransferMode=s,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const t={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new c(t):new l(t)}withApiKey(t){if(this.setApiKeyCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withDataTransferMode(t){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=t,this.setStreamOrFetchCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}withSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.OpenAiFetchAdapter=l,exports.OpenAiStreamingAdapter=c,exports.createAdapter=()=>new d;

@@ -1,1 +0,1 @@

import{warn as e,NluxUsageError as t}from"@nlux/nlux";export{debug}from"@nlux/nlux";import s from"openai";const a="stream",r=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({systemMessage:t,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??a,this.model=i??"gpt-4",this.openai=new s({apiKey:r,dangerouslyAllowBrowser:!0}),t&&(this.systemMessage=t),e('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return i}async fetchText(s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];a.push({role:"user",content:s});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a});return this.decode(e)}catch(s){throw e("Error while making API call to OpenAI"),e(s),new t({source:this.constructor.name,message:s?.message||"Error while making API call to OpenAI",exceptionId:r(s)??void 0})}}streamText(e,s){throw new t({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class d extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}fetchText(e){throw new t({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(s,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];o.push({role:"user",content:s}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async t=>{let s=t[Symbol.asyncIterator](),r=await s.next();for(;!r.done;){const t=r.value;if("stop"===(t.choices?.length>0?t.choices[0].finish_reason:void 0))break;const o=await this.decode(t);void 0!==o?a.next(o):(e("Undecodable message"),e(t)),r=await s.next()}a.complete()})).catch((s=>{e(s),a.error(new t({source:this.constructor.name,message:s.message,exceptionId:r(s)??void 0}))}))}}class h{constructor(e){this.apiKey=null,this.dataTransferMode=a,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new t({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new d(e):new c(e)}withApiKey(e){if(this.setApiKeyCalled)throw new t({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withDataTransferMode(e){if(this.setStreamOrFetchCalled)throw new t({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.setStreamOrFetchCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}withSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}}const m=()=>new h;export{c as OpenAiFetchAdapter,d as OpenAiStreamingAdapter,m as createAdapter};
import{warn as e,NluxUsageError as t}from"@nlux/core";export{debug}from"@nlux/core";import s from"openai";const a="stream",r=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class c{constructor({systemMessage:t,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??a,this.model=i??"gpt-4",this.openai=new s({apiKey:r,dangerouslyAllowBrowser:!0}),t&&(this.systemMessage=t),e('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends c{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return i}async fetchText(s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];a.push({role:"user",content:s});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a});return this.decode(e)}catch(s){throw e("Error while making API call to OpenAI"),e(s),new t({source:this.constructor.name,message:s?.message||"Error while making API call to OpenAI",exceptionId:r(s)??void 0})}}streamText(e,s){throw new t({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class d extends c{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}fetchText(e){throw new t({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(s,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];o.push({role:"user",content:s}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async t=>{let s=t[Symbol.asyncIterator](),r=await s.next();for(;!r.done;){const t=r.value;if("stop"===(t.choices?.length>0?t.choices[0].finish_reason:void 0))break;const o=await this.decode(t);void 0!==o?a.next(o):(e("Undecodable message"),e(t)),r=await s.next()}a.complete()})).catch((s=>{e(s),a.error(new t({source:this.constructor.name,message:s.message,exceptionId:r(s)??void 0}))}))}}class h{constructor(e){this.apiKey=null,this.dataTransferMode=a,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new t({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new d(e):new l(e)}withApiKey(e){if(this.setApiKeyCalled)throw new t({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withDataTransferMode(e){if(this.setStreamOrFetchCalled)throw new t({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.setStreamOrFetchCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}withSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}}const m=()=>new h;export{l as OpenAiFetchAdapter,d as OpenAiStreamingAdapter,m as createAdapter};

@@ -1,3 +0,3 @@

import { DataTransferMode, AdapterBuilder, StandardAdapter, StandardAdapterStatus, StandardAdapterConfig, StandardAdapterInfo, StreamingAdapterObserver } from '@nlux/nlux';
export { Adapter, DataTransferMode, StandardAdapter, StreamingAdapterObserver, debug } from '@nlux/nlux';
import { DataTransferMode, AdapterBuilder, StandardAdapter, StandardAdapterStatus, StandardAdapterConfig, StandardAdapterInfo, StreamingAdapterObserver } from '@nlux/core';
export { Adapter, DataTransferMode, StandardAdapter, StreamingAdapterObserver, debug } from '@nlux/core';
import OpenAI from 'openai';

@@ -4,0 +4,0 @@

{
"name": "@nlux/openai",
"version": "0.7.3",
"version": "0.8.0",
"description": "The OpenAI adapters for NLUX, the javascript library for building conversational AI interfaces.",

@@ -11,3 +11,5 @@ "keywords": [

"large-language-model",
"convo",
"ai-chatbot",
"ai-chat",
"ai-bot",
"chat-gpt",

@@ -57,3 +59,3 @@ "openai",

"openai": "^4.20.1",
"@nlux/nlux": "0.7.3"
"@nlux/core": "0.8.0"
},

@@ -60,0 +62,0 @@ "peerDependencies": {},

@@ -6,2 +6,2 @@ # NLUX OpenAI Adapter

Please check out the [@nlux/nlux](https://www.npmjs.com/package/@nlux/nlux) package for more information.
Please check the [@nlux/core](https://www.npmjs.com/package/@nlux/core) package for more information.

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/nlux"),require("openai")):"function"==typeof define&&define.amd?define(["exports","@nlux/nlux","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.nlux,e.OpenAI)}(this,(function(e,t,s){"use strict";const a="stream",r=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({systemMessage:e,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??a,this.model=i??"gpt-4",this.openai=new s({apiKey:r,dangerouslyAllowBrowser:!0}),e&&(this.systemMessage=e),t.warn('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return i}async fetchText(e){const s=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];s.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:s});return this.decode(e)}catch(e){throw t.warn("Error while making API call to OpenAI"),t.warn(e),new t.NluxUsageError({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:r(e)??void 0})}}streamText(e,s){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class d extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}fetchText(e){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];a.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:a}).then((async e=>{let a=e[Symbol.asyncIterator](),r=await a.next();for(;!r.done;){const e=r.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const o=await this.decode(e);void 0!==o?s.next(o):(t.warn("Undecodable message"),t.warn(e)),r=await a.next()}s.complete()})).catch((e=>{t.warn(e),s.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:r(e)??void 0}))}))}}class h{constructor(e){this.apiKey=null,this.dataTransferMode=a,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new d(e):new c(e)}withApiKey(e){if(this.setApiKeyCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withDataTransferMode(e){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.setStreamOrFetchCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}withSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.OpenAiFetchAdapter=c,e.OpenAiStreamingAdapter=d,e.createAdapter=()=>new h}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/core"),require("openai")):"function"==typeof define&&define.amd?define(["exports","@nlux/core","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.core,e.OpenAI)}(this,(function(e,t,s){"use strict";const a="stream",r=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({systemMessage:e,apiKey:r,dataTransferMode:o,model:i}){this.currentStatus="disconnected",this.systemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.theDataTransferMode=o??a,this.model=i??"gpt-4",this.openai=new s({apiKey:r,dangerouslyAllowBrowser:!0}),e&&(this.systemMessage=e),t.warn('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return i}async fetchText(e){const s=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];s.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:s});return this.decode(e)}catch(e){throw t.warn("Error while making API call to OpenAI"),t.warn(e),new t.NluxUsageError({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:r(e)??void 0})}}streamText(e,s){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}class d extends l{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}get config(){return o}fetchText(e){throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];a.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:a}).then((async e=>{let a=e[Symbol.asyncIterator](),r=await a.next();for(;!r.done;){const e=r.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const o=await this.decode(e);void 0!==o?s.next(o):(t.warn("Undecodable message"),t.warn(e)),r=await a.next()}s.complete()})).catch((e=>{t.warn(e),s.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:r(e)??void 0}))}))}}class h{constructor(e){this.apiKey=null,this.dataTransferMode=a,this.model=null,this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,this.systemMessage=null,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new d(e):new c(e)}withApiKey(e){if(this.setApiKeyCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withDataTransferMode(e){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.setStreamOrFetchCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}withSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.OpenAiFetchAdapter=c,e.OpenAiStreamingAdapter=d,e.createAdapter=()=>new h}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc