Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlux/openai

Package Overview
Dependencies
Maintainers
1
Versions
203
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlux/openai - npm Package Compare versions

Comparing version 2.0.7-alpha to 2.0.8-alpha

2

cjs/openai.js

@@ -1,1 +0,1 @@

"use strict";var e=require("openai");const t=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},s=[];var a=Object.defineProperty,r=(e,t,s)=>((e,t,s)=>t in e?a(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class o extends Error{constructor(e={}){super(e.message),r(this,"exceptionId"),r(this,"message"),r(this,"source"),r(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class n extends o{}const i="stream",l=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},c=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let s;if("string"==typeof e.message||"number"===e.message?s=`${e.message}`:"object"===e.message&&(s=JSON.stringify(e.message)),void 0!==s)return{role:c(e.role),content:s};t("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),d=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class m{constructor({systemMessage:s,apiKey:a,dataTransferMode:r,model:o}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{let t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=r??i,this.model=o??"gpt-3.5-turbo",this.openai=new e({apiKey:a,dangerouslyAllowBrowser:!0}),s&&(this.systemMessage=s),t('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return d}}class u extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(s.conversationHistory){const e=h(s.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),s=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===s)throw t("Undecodable message received from OpenAI"),new n({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return s}catch(e){throw t("Error while making API call to OpenAI"),t(e),new n({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:l(e)??void 0})}}streamText(e,t,s){throw new n({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class y extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new n({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s,a){const r=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));r.push(...e)}r.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async e=>{let a=e[Symbol.asyncIterator](),r=await a.next();for(;!r.done;){const e=r.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const o=await p(e);void 0!==o?s.next(o):(t("Undecodable message"),t(e)),r=await a.next()}s.complete()})).catch((e=>{t(e),s.error(new n({source:this.constructor.name,message:e.message,exceptionId:l(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=i,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new n({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new y(e):new u(e)}withApiKey(e){if(this.withApiKeyCalled)throw new n({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new n({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new n({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new n({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}exports.createUnsafeChatAdapter=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",s.includes(e)||(s.push(e),t(e)),new g};
"use strict";var e=require("openai");const t=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},s=[];var a=Object.defineProperty,r=(e,t,s)=>((e,t,s)=>t in e?a(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class o extends Error{constructor(e={}){super(e.message),r(this,"exceptionId"),r(this,"message"),r(this,"source"),r(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class n extends o{}const i="stream",c=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},l=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let s;if("string"==typeof e.message||"number"===e.message?s=`${e.message}`:"object"===e.message&&(s=JSON.stringify(e.message)),void 0!==s)return{role:l(e.role),content:s};t("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),d=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class m{constructor({systemMessage:s,apiKey:a,dataTransferMode:r,model:o}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=r??i,this.model=o??"gpt-3.5-turbo",this.openai=new e({apiKey:a,dangerouslyAllowBrowser:!0}),s&&(this.systemMessage=s),t('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return d}}class u extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(s.conversationHistory){const e=h(s.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),s=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===s)throw t("Undecodable message received from OpenAI"),new n({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return s}catch(e){throw t("Error while making API call to OpenAI"),t(e),new n({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:c(e)??void 0})}}streamText(e,t,s){throw new n({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class y extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new n({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s,a){const r=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));r.push(...e)}r.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async e=>{const a=e[Symbol.asyncIterator]();let r=await a.next();for(;!r.done;){const e=r.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const o=await p(e);void 0!==o?s.next(o):(t("Undecodable message"),t(e)),r=await a.next()}s.complete()})).catch((e=>{t(e),s.error(new n({source:this.constructor.name,message:e.message,exceptionId:c(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=i,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new n({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new y(e):new u(e)}withApiKey(e){if(this.withApiKeyCalled)throw new n({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new n({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new n({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new n({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}exports.createUnsafeChatAdapter=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",s.includes(e)||(s.push(e),t(e)),new g};

@@ -1,1 +0,1 @@

import e from"openai";const t=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},s=[];var a=Object.defineProperty,o=(e,t,s)=>((e,t,s)=>t in e?a(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class r extends Error{constructor(e={}){super(e.message),o(this,"exceptionId"),o(this,"message"),o(this,"source"),o(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class n extends r{}const i="stream",l=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},c=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let s;if("string"==typeof e.message||"number"===e.message?s=`${e.message}`:"object"===e.message&&(s=JSON.stringify(e.message)),void 0!==s)return{role:c(e.role),content:s};t("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),d=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class m{constructor({systemMessage:s,apiKey:a,dataTransferMode:o,model:r}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{let t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=o??i,this.model=r??"gpt-3.5-turbo",this.openai=new e({apiKey:a,dangerouslyAllowBrowser:!0}),s&&(this.systemMessage=s),t('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return d}}class u extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(s.conversationHistory){const e=h(s.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),s=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===s)throw t("Undecodable message received from OpenAI"),new n({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return s}catch(e){throw t("Error while making API call to OpenAI"),t(e),new n({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:l(e)??void 0})}}streamText(e,t,s){throw new n({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class y extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new n({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));o.push(...e)}o.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{let a=e[Symbol.asyncIterator](),o=await a.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const r=await p(e);void 0!==r?s.next(r):(t("Undecodable message"),t(e)),o=await a.next()}s.complete()})).catch((e=>{t(e),s.error(new n({source:this.constructor.name,message:e.message,exceptionId:l(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=i,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new n({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new y(e):new u(e)}withApiKey(e){if(this.withApiKeyCalled)throw new n({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new n({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new n({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new n({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}const f=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",s.includes(e)||(s.push(e),t(e)),new g};export{f as createUnsafeChatAdapter};
import e from"openai";const t=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},s=[];var a=Object.defineProperty,o=(e,t,s)=>((e,t,s)=>t in e?a(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class r extends Error{constructor(e={}){super(e.message),o(this,"exceptionId"),o(this,"message"),o(this,"source"),o(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class n extends r{}const i="stream",c=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},l=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let s;if("string"==typeof e.message||"number"===e.message?s=`${e.message}`:"object"===e.message&&(s=JSON.stringify(e.message)),void 0!==s)return{role:l(e.role),content:s};t("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),d=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class m{constructor({systemMessage:s,apiKey:a,dataTransferMode:o,model:r}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=o??i,this.model=r??"gpt-3.5-turbo",this.openai=new e({apiKey:a,dangerouslyAllowBrowser:!0}),s&&(this.systemMessage=s),t('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return d}}class u extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,s){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(s.conversationHistory){const e=h(s.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),s=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===s)throw t("Undecodable message received from OpenAI"),new n({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return s}catch(e){throw t("Error while making API call to OpenAI"),t(e),new n({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:c(e)??void 0})}}streamText(e,t,s){throw new n({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const p=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class y extends m{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new n({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,s,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));o.push(...e)}o.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{const a=e[Symbol.asyncIterator]();let o=await a.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const r=await p(e);void 0!==r?s.next(r):(t("Undecodable message"),t(e)),o=await a.next()}s.complete()})).catch((e=>{t(e),s.error(new n({source:this.constructor.name,message:e.message,exceptionId:c(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=i,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new n({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new y(e):new u(e)}withApiKey(e){if(this.withApiKeyCalled)throw new n({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new n({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new n({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new n({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}const f=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",s.includes(e)||(s.push(e),t(e)),new g};export{f as createUnsafeChatAdapter};
import { DataTransferMode, ChatAdapterBuilder as ChatAdapterBuilder$1, StandardChatAdapter } from '@nlux/core';
export { ChatAdapter, DataTransferMode, StandardChatAdapter, StreamingAdapterObserver } from '@nlux/core';
type OpenAiModel = (string & {}) | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613';
type OpenAiModel = (string & NonNullable<unknown>) | 'gpt-4-0125-preview' | 'gpt-4-turbo-preview' | 'gpt-4-1106-preview' | 'gpt-4-vision-preview' | 'gpt-4' | 'gpt-4-0314' | 'gpt-4-0613' | 'gpt-4-32k' | 'gpt-4-32k-0314' | 'gpt-4-32k-0613' | 'gpt-3.5-turbo' | 'gpt-3.5-turbo-16k' | 'gpt-3.5-turbo-0301' | 'gpt-3.5-turbo-0613' | 'gpt-3.5-turbo-1106' | 'gpt-3.5-turbo-0125' | 'gpt-3.5-turbo-16k-0613';

@@ -6,0 +6,0 @@ type ChatAdapterOptions = {

{
"name": "@nlux/openai",
"version": "2.0.7-alpha",
"version": "2.0.8-alpha",
"description": "The OpenAI adapters for nlux, the javascript library for building conversational AI interfaces.",

@@ -63,3 +63,3 @@ "keywords": [

"openai": "^4.44.0",
"@nlux/core": "2.0.7-alpha"
"@nlux/core": "2.0.8-alpha"
},

@@ -66,0 +66,0 @@ "peerDependencies": {},

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.OpenAI)}(this,(function(e,t){"use strict";const s=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=[];var o=Object.defineProperty,r=(e,t,s)=>((e,t,s)=>t in e?o(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class n extends Error{constructor(e={}){super(e.message),r(this,"exceptionId"),r(this,"message"),r(this,"source"),r(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class i extends n{}const l="stream",c=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},d=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let t;if("string"==typeof e.message||"number"===e.message?t=`${e.message}`:"object"===e.message&&(t=JSON.stringify(e.message)),void 0!==t)return{role:d(e.role),content:t};s("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),m=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class u{constructor({systemMessage:e,apiKey:a,dataTransferMode:o,model:r}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{let t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=o??l,this.model=r??"gpt-3.5-turbo",this.openai=new t({apiKey:a,dangerouslyAllowBrowser:!0}),e&&(this.systemMessage=e),s('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return m}}class p extends u{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,t){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(t.conversationHistory){const e=h(t.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),t=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===t)throw s("Undecodable message received from OpenAI"),new i({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return t}catch(e){throw s("Error while making API call to OpenAI"),s(e),new i({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:c(e)??void 0})}}streamText(e,t,s){throw new i({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const y=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class f extends u{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new i({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,t,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));o.push(...e)}o.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{let a=e[Symbol.asyncIterator](),o=await a.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const r=await y(e);void 0!==r?t.next(r):(s("Undecodable message"),s(e)),o=await a.next()}t.complete()})).catch((e=>{s(e),t.error(new i({source:this.constructor.name,message:e.message,exceptionId:c(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=l,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new i({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new f(e):new p(e)}withApiKey(e){if(this.withApiKeyCalled)throw new i({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new i({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new i({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new i({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}e.createUnsafeChatAdapter=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",a.includes(e)||(a.push(e),s(e)),new g}}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("openai")):"function"==typeof define&&define.amd?define(["exports","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.OpenAI)}(this,(function(e,t){"use strict";const s=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=[];var o=Object.defineProperty,r=(e,t,s)=>((e,t,s)=>t in e?o(e,t,{enumerable:!0,configurable:!0,writable:!0,value:s}):e[t]=s)(e,"symbol"!=typeof t?t+"":t,s);class n extends Error{constructor(e={}){super(e.message),r(this,"exceptionId"),r(this,"message"),r(this,"source"),r(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class i extends n{}const l="stream",c=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},d=e=>{switch(e){case"system":return"system";case"user":default:return"user";case"ai":return"assistant"}},h=e=>e.map((e=>{let t;if("string"==typeof e.message||"number"===e.message?t=`${e.message}`:"object"===e.message&&(t=JSON.stringify(e.message)),void 0!==t)return{role:d(e.role),content:t};s("Empty message or unsupported message format found in conversation history and will not be included in the conversation history sent to OpenAI.")})).filter((e=>void 0!==e)),m=Object.freeze({id:"nlux-openai-adapter",capabilities:{chat:!0,fileUpload:!1,speechToText:!1,textToSpeech:!1}});class u{constructor({systemMessage:e,apiKey:a,dataTransferMode:o,model:r}){this.systemMessage="Act as a helpful assistant to the user",this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.theDataTransferMode=o??l,this.model=r??"gpt-3.5-turbo",this.openai=new t({apiKey:a,dangerouslyAllowBrowser:!0}),e&&(this.systemMessage=e),s('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". To learn more about OpenAI\' recommendation for handling API keys, please visit:\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\nThe useUnsafeChatAdapter/createUnsafeChatAdapter are only intended for development and testing purposes.\n\nFor production use, we recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters')}get dataTransferMode(){return this.theDataTransferMode}get id(){return this.__instanceId}get info(){return m}}class p extends u{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"fetch"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}async fetchText(e,t){const a=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(t.conversationHistory){const e=h(t.conversationHistory);a.push(...e)}a.push({role:"user",content:e});try{const e=await this.openai.chat.completions.create({stream:!1,model:this.model,messages:a}),t=await(async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message?.content;if(null!==t)return t})(e);if(void 0===t)throw s("Undecodable message received from OpenAI"),new i({source:this.constructor.name,message:"Undecodable message received from OpenAI"});return t}catch(e){throw s("Error while making API call to OpenAI"),s(e),new i({source:this.constructor.name,message:e?.message||"Error while making API call to OpenAI",exceptionId:c(e)??void 0})}}streamText(e,t,s){throw new i({source:this.constructor.name,message:"Cannot stream text from the fetch adapter!"})}}const y=async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t};class f extends u{constructor({apiKey:e,model:t,systemMessage:s}){super({apiKey:e,model:t,systemMessage:s,dataTransferMode:"stream"}),void 0!==s&&s.length>0&&(this.systemMessage=s)}fetchText(e){throw new i({source:this.constructor.name,message:"Cannot fetch text from the streaming adapter!"})}streamText(e,t,a){const o=this.systemMessage?[{role:"system",content:this.systemMessage}]:[];if(a.conversationHistory){const e=h(a.conversationHistory).map((e=>({content:"string"==typeof e.content?e.content:JSON.stringify(e.content),role:e.role})));o.push(...e)}o.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{const a=e[Symbol.asyncIterator]();let o=await a.next();for(;!o.done;){const e=o.value;if("stop"===(e.choices?.length>0?e.choices[0].finish_reason:void 0))break;const r=await y(e);void 0!==r?t.next(r):(s("Undecodable message"),s(e)),o=await a.next()}t.complete()})).catch((e=>{s(e),t.error(new i({source:this.constructor.name,message:e.message,exceptionId:c(e)??void 0}))}))}}class g{constructor(e){this.apiKey=null,this.dataTransferMode=l,this.model=null,this.systemMessage=null,this.withApiKeyCalled=!1,this.withDataTransferModeCalled=!1,this.withModelCalled=!1,this.withSystemMessageCalled=!1,e&&(this.apiKey=e.apiKey,this.dataTransferMode=e.dataTransferMode,this.model=e.model,this.systemMessage=e.systemMessage,this.withApiKeyCalled=e.withApiKeyCalled,this.withSystemMessageCalled=e.withSystemMessageCalled,this.withModelCalled=e.withModelCalled,this.withDataTransferModeCalled=e.withDataTransferModeCalled)}create(){if(!this.apiKey)throw new i({source:this.constructor.name,message:"Unable to create OpenAI adapter. API key is missing. Make sure you are calling withApiKey() before calling create()."});const e={apiKey:this.apiKey,dataTransferMode:this.dataTransferMode,model:this.model??void 0,systemMessage:this.systemMessage??void 0};return"stream"===this.dataTransferMode?new f(e):new p(e)}withApiKey(e){if(this.withApiKeyCalled)throw new i({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.withApiKeyCalled=!0,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new i({source:this.constructor.name,message:"Unable to set data loading mode. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataTransferMode=e,this.withDataTransferModeCalled=!0,this}withModel(e){if(this.withModelCalled)throw new i({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.withModelCalled=!0,this}withSystemMessage(e){if(this.withSystemMessageCalled)throw new i({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withSystemMessage() twice."});return this.systemMessage=e??null,this.withSystemMessageCalled=!0,this}}e.createUnsafeChatAdapter=()=>{var e;return e="You just have created an OpenAI adapter that connects to the API directly from the browser. This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. To learn more about how to create custom adapters for nlux, visit:\nhttps://nlux.dev/learn/adapters/custom-adapters",a.includes(e)||(a.push(e),s(e)),new g}}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc