@nlux/openai
Advanced tools
Comparing version 0.4.0 to 0.4.1
@@ -1,1 +0,1 @@ | ||
"use strict";var e=require("@nlux/nlux"),t=require("openai");const s=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},a=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),r=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),o=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class n{constructor({initialSystemMessage:e,apiKey:s,dataExchangeMode:i,model:r}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=i,this.openai=new t({apiKey:s,dangerouslyAllowBrowser:!0}),this.model=r,e&&(this.initialSystemMessage=e),a('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return o}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends n{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return i}send(t){if("string"!=typeof t||0===t.length)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const i=new e.Observable,r=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];return r.push({role:"user",content:t}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async e=>{let t=e[Symbol.asyncIterator](),s=await t.next();for(;!s.done;){const e=s.value,r=await this.decode(e);void 0!==r?i.next(r):(a("Undecodable message"),a(e)),s=await t.next()}i.complete()})).catch((t=>{a(t),i.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))})),i}}class c{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(t){if(this.setApiKeyCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withInitialSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}}class h extends n{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return r}send(t){return new Promise(((i,r)=>{if("string"!=typeof t||0===t.length)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const o=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];o.push({role:"user",content:t}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:o}).then((async t=>{const s=await this.decode(t);"string"!=typeof s?r(new e.NluxUsageError({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):i(s)})).catch((t=>{a("Error while making API call to OpenAI"),a(t),r(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))}))}))}}class d extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new h({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new u(this)}}class u extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new l({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new d(this)}useStreamingMode(){return this}}exports.GptFetchAdapter=h,exports.GptStreamingAdapter=l,exports.createAdapter=t=>{if("openai/gpt"!==t)throw new e.NluxUsageError({source:"createAdapter",message:"Adapter type not supported"});return new u}; | ||
"use strict";var e=require("@nlux/nlux"),t=require("openai");const s=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},a=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),r=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),o=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class n{constructor({initialSystemMessage:e,apiKey:s,dataExchangeMode:i,model:r}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=i,this.openai=new t({apiKey:s,dangerouslyAllowBrowser:!0}),this.model=r,e&&(this.initialSystemMessage=e),a('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return o}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends n{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return i}send(t,i){const r=t;if("string"!=typeof r||0===r.length)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const o=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];o.push({role:"user",content:r}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{let t=e[Symbol.asyncIterator](),s=await t.next();for(;!s.done;){const e=s.value,r=await this.decode(e);void 0!==r?i.next(r):(a("Undecodable message"),a(e)),s=await t.next()}i.complete()})).catch((t=>{a(t),i.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))}))}}class c{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(t){if(this.setApiKeyCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withInitialSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}}class h extends n{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return r}send(t,i){const r=t;if("string"!=typeof r||0===r.length)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const o=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];o.push({role:"user",content:r}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:o}).then((async t=>{const s=await this.decode(t);"string"!=typeof s?i.error(new e.NluxUsageError({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):i.next(s)})).catch((t=>{a("Error while making API call to OpenAI"),a(t),i.error(new e.NluxUsageError({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))}))}}class d extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new h({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new u(this)}}class u extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new l({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new d(this)}useStreamingMode(){return this}}exports.GptFetchAdapter=h,exports.GptStreamingAdapter=l,exports.createAdapter=t=>{if("openai/gpt"!==t)throw new e.NluxUsageError({source:"createAdapter",message:"Adapter type not supported"});return new u}; |
@@ -1,1 +0,1 @@ | ||
import{NluxUsageError as e,Observable as t}from"@nlux/nlux";import s from"openai";const a=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},i=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),n=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),r=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({initialSystemMessage:e,apiKey:t,dataExchangeMode:a,model:o}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=a,this.openai=new s({apiKey:t,dangerouslyAllowBrowser:!0}),this.model=o,e&&(this.initialSystemMessage=e),i('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return r}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return o}send(s){if("string"!=typeof s||0===s.length)throw new e({source:this.constructor.name,message:"Cannot send empty messages"});const o=new t,n=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];return n.push({role:"user",content:s}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:n}).then((async e=>{let t=e[Symbol.asyncIterator](),s=await t.next();for(;!s.done;){const e=s.value,a=await this.decode(e);void 0!==a?o.next(a):(i("Undecodable message"),i(e)),s=await t.next()}o.complete()})).catch((t=>{i(t),o.error(new e({source:this.constructor.name,message:t.message,exceptionId:a(t)??void 0}))})),o}}class h{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(t){if(this.setApiKeyCalled)throw new e({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withInitialSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}}class d extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return n}send(t){return new Promise(((s,o)=>{if("string"!=typeof t||0===t.length)throw new e({source:this.constructor.name,message:"Cannot send empty messages"});const n=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];n.push({role:"user",content:t}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:n}).then((async t=>{const a=await this.decode(t);"string"!=typeof a?o(new e({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):s(a)})).catch((t=>{i("Error while making API call to OpenAI"),i(t),o(new e({source:this.constructor.name,message:t.message,exceptionId:a(t)??void 0}))}))}))}}class m extends h{constructor(e){super(e)}create(){if(!this.apiKey)throw new e({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new d({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new e({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new u(this)}}class u extends h{constructor(e){super(e)}create(){if(!this.apiKey)throw new e({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new c({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new e({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new m(this)}useStreamingMode(){return this}}const y=t=>{if("openai/gpt"!==t)throw new e({source:"createAdapter",message:"Adapter type not supported"});return new u};export{d as GptFetchAdapter,c as GptStreamingAdapter,y as createAdapter}; | ||
import{NluxUsageError as e}from"@nlux/nlux";import t from"openai";const s=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},a=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},i=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class r{constructor({initialSystemMessage:e,apiKey:s,dataExchangeMode:i,model:o}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=i,this.openai=new t({apiKey:s,dangerouslyAllowBrowser:!0}),this.model=o,e&&(this.initialSystemMessage=e),a('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class l extends r{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return i}send(t,i){const o=t;if("string"!=typeof o||0===o.length)throw new e({source:this.constructor.name,message:"Cannot send empty messages"});const n=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];n.push({role:"user",content:o}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:n}).then((async e=>{let t=e[Symbol.asyncIterator](),s=await t.next();for(;!s.done;){const e=s.value,o=await this.decode(e);void 0!==o?i.next(o):(a("Undecodable message"),a(e)),s=await t.next()}i.complete()})).catch((t=>{a(t),i.error(new e({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))}))}}class c{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(t){if(this.setApiKeyCalled)throw new e({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=t,this.setApiKeyCalled=!0,this}withInitialSystemMessage(t){if(this.setInitialSystemMessageCalled)throw new e({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=t??null,this.setInitialSystemMessageCalled=!0,this}withModel(t){if(this.setModelCalled)throw new e({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=t,this.setModelCalled=!0,this}}class h extends r{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return o}send(t,i){const o=t;if("string"!=typeof o||0===o.length)throw new e({source:this.constructor.name,message:"Cannot send empty messages"});const n=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];n.push({role:"user",content:o}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:n}).then((async t=>{const s=await this.decode(t);"string"!=typeof s?i.error(new e({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):i.next(s)})).catch((t=>{a("Error while making API call to OpenAI"),a(t),i.error(new e({source:this.constructor.name,message:t.message,exceptionId:s(t)??void 0}))}))}}class d extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new h({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new e({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new m(this)}}class m extends c{constructor(e){super(e)}create(){if(!this.apiKey)throw new e({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new l({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new e({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new d(this)}useStreamingMode(){return this}}const u=t=>{if("openai/gpt"!==t)throw new e({source:"createAdapter",message:"Adapter type not supported"});return new m};export{h as GptFetchAdapter,l as GptStreamingAdapter,u as createAdapter}; |
@@ -1,2 +0,2 @@ | ||
import { ISseAdapter, AdapterStatus, AdapterConfig, AdapterInfo, Message, Observable, AdapterBuilder, Adapter } from '@nlux/nlux'; | ||
import { NluxAdapter, NluxAdapterStatus, NluxAdapterConfig, NluxAdapterInfo, Message, StreamingAdapterObserver, AdapterBuilder } from '@nlux/nlux'; | ||
import OpenAI from 'openai'; | ||
@@ -6,7 +6,7 @@ | ||
declare abstract class GptAbstractAdapter<InboundPayload, OutboundPayload> implements ISseAdapter<InboundPayload, OutboundPayload> { | ||
declare abstract class GptAbstractAdapter<InboundPayload, OutboundPayload> implements NluxAdapter<InboundPayload, OutboundPayload> { | ||
protected readonly dataExchangeMode: 'stream' | 'fetch'; | ||
protected readonly model: OpenAIChatModel; | ||
protected readonly openai: OpenAI; | ||
protected currentStatus: AdapterStatus; | ||
protected currentStatus: NluxAdapterStatus; | ||
protected initialSystemMessage: string | null; | ||
@@ -19,9 +19,9 @@ protected constructor({ initialSystemMessage, apiKey, dataExchangeMode, model, }: { | ||
}); | ||
abstract get config(): AdapterConfig<InboundPayload, OutboundPayload>; | ||
abstract get config(): NluxAdapterConfig<InboundPayload, OutboundPayload>; | ||
get id(): string; | ||
get info(): AdapterInfo; | ||
get status(): AdapterStatus; | ||
get info(): NluxAdapterInfo; | ||
get status(): NluxAdapterStatus; | ||
decode(payload: InboundPayload): Promise<Message>; | ||
encode(message: Message): Promise<OutboundPayload>; | ||
abstract send(message: Message): Observable<Message> | Promise<Message>; | ||
abstract send(message: Message, observer: StreamingAdapterObserver): void; | ||
} | ||
@@ -39,3 +39,3 @@ | ||
}>; | ||
send(message: Message): Promise<Message>; | ||
send(message: Message, observer: StreamingAdapterObserver<Message>): void; | ||
} | ||
@@ -53,3 +53,3 @@ | ||
}>; | ||
send(message: Message): Observable<Message>; | ||
send(message: Message, observer: StreamingAdapterObserver<Message>): void; | ||
} | ||
@@ -67,3 +67,3 @@ | ||
protected constructor(cloneFrom?: OpenAIGptAbstractBuilder); | ||
abstract create(): (GptFetchAdapter | GptStreamingAdapter) & Adapter<any, any>; | ||
abstract create(): (GptFetchAdapter | GptStreamingAdapter) & NluxAdapter<any, any>; | ||
abstract useFetchingMode(): OpenAIGptAbstractBuilder; | ||
@@ -70,0 +70,0 @@ abstract useStreamingMode(): OpenAIGptAbstractBuilder; |
{ | ||
"name": "@nlux/openai", | ||
"version": "0.4.0", | ||
"version": "0.4.1", | ||
"description": "The OpenAI adapters for NLUX, the javascript library for building conversational AI interfaces.", | ||
@@ -5,0 +5,0 @@ "keywords": [ |
@@ -1,1 +0,1 @@ | ||
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/nlux"),require("openai")):"function"==typeof define&&define.amd?define(["exports","@nlux/nlux","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.nlux,e.OpenAI)}(this,(function(e,t,s){"use strict";const a=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},i=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),n=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),r=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({initialSystemMessage:e,apiKey:t,dataExchangeMode:a,model:o}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=a,this.openai=new s({apiKey:t,dangerouslyAllowBrowser:!0}),this.model=o,e&&(this.initialSystemMessage=e),i('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return r}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return o}send(e){if("string"!=typeof e||0===e.length)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const s=new t.Observable,o=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];return o.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:o}).then((async e=>{let t=e[Symbol.asyncIterator](),a=await t.next();for(;!a.done;){const e=a.value,o=await this.decode(e);void 0!==o?s.next(o):(i("Undecodable message"),i(e)),a=await t.next()}s.complete()})).catch((e=>{i(e),s.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:a(e)??void 0}))})),s}}class d{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(e){if(this.setApiKeyCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withInitialSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}}class h extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return n}send(e){return new Promise(((s,o)=>{if("string"!=typeof e||0===e.length)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const n=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];n.push({role:"user",content:e}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:n}).then((async e=>{const a=await this.decode(e);"string"!=typeof a?o(new t.NluxUsageError({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):s(a)})).catch((e=>{i("Error while making API call to OpenAI"),i(e),o(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:a(e)??void 0}))}))}))}}class u extends d{constructor(e){super(e)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new h({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new m(this)}}class m extends d{constructor(e){super(e)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new c({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new u(this)}useStreamingMode(){return this}}e.GptFetchAdapter=h,e.GptStreamingAdapter=c,e.createAdapter=e=>{if("openai/gpt"!==e)throw new t.NluxUsageError({source:"createAdapter",message:"Adapter type not supported"});return new m}})); | ||
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/nlux"),require("openai")):"function"==typeof define&&define.amd?define(["exports","@nlux/nlux","openai"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/openai"]={},e.nlux,e.OpenAI)}(this,(function(e,t,s){"use strict";const a=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},i=e=>{"string"==typeof e?console.warn(`${e}`):console.warn(JSON.stringify(e,null,2))},o=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].delta.content;if("string"==typeof t)return t}}),r=Object.freeze({encodeMessage:async e=>({role:"user",content:e}),decodeMessage:async e=>{if(!e.choices||!e.choices[0])throw Error("Invalid payload");const t=e.choices[0].message.content;if("string"==typeof t)return t}}),n=Object.freeze({id:"nlux-gpt-adapter",connectionType:"http",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:"https://api.openai.com/v1/chat/completion"},inputFormats:["text"],outputFormats:["text"]});class l{constructor({initialSystemMessage:e,apiKey:t,dataExchangeMode:a,model:o}){this.dataExchangeMode="fetch",this.currentStatus="disconnected",this.initialSystemMessage="Act as a helpful assistant to the user",this.currentStatus="disconnected",this.dataExchangeMode=a,this.openai=new s({apiKey:t,dangerouslyAllowBrowser:!0}),this.model=o,e&&(this.initialSystemMessage=e),i('OpenAI GPT adapter has been initialized in browser mode using option "dangerouslyAllowBrowser". This is not recommended for production use. We recommend that you implement a server-side proxy and configure a customized adapter for it. Read more at https://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety')}get id(){return this.info.id}get info(){return n}get status(){return this.currentStatus}async decode(e){const{decodeMessage:t}=this.config;return t(e)}async encode(e){const{encodeMessage:t}=this.config;return t(e)}}class c extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"stream"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return o}send(e,s){const o=e;if("string"!=typeof o||0===o.length)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const r=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];r.push({role:"user",content:o}),this.openai.chat.completions.create({stream:!0,model:this.model,messages:r}).then((async e=>{let t=e[Symbol.asyncIterator](),a=await t.next();for(;!a.done;){const e=a.value,o=await this.decode(e);void 0!==o?s.next(o):(i("Undecodable message"),i(e)),a=await t.next()}s.complete()})).catch((e=>{i(e),s.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:a(e)??void 0}))}))}}class d{constructor(e){this.apiKey=null,this.dataExchangeMode="stream",this.initialSystemMessage=null,this.model="gpt-4",this.setApiKeyCalled=!1,this.setInitialSystemMessageCalled=!1,this.setModelCalled=!1,this.setStreamOrFetchCalled=!1,e&&(this.apiKey=e.apiKey,this.dataExchangeMode=e.dataExchangeMode,this.initialSystemMessage=e.initialSystemMessage,this.model=e.model,this.setApiKeyCalled=e.setApiKeyCalled,this.setInitialSystemMessageCalled=e.setInitialSystemMessageCalled,this.setModelCalled=e.setModelCalled,this.setStreamOrFetchCalled=e.setStreamOrFetchCalled)}withApiKey(e){if(this.setApiKeyCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set API key. API key setter has already been called by this builder. Make sure you are not calling withApiKey() twice."});return this.apiKey=e,this.setApiKeyCalled=!0,this}withInitialSystemMessage(e){if(this.setInitialSystemMessageCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set initial system message. Initial system message setter has already been called by this builder. Make sure you are not calling withInitialSystemMessage() twice."});return this.initialSystemMessage=e??null,this.setInitialSystemMessageCalled=!0,this}withModel(e){if(this.setModelCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set model. Model setter has already been called by this builder. Make sure you are not calling withModel() twice."});return this.model=e,this.setModelCalled=!0,this}}class h extends l{constructor({apiKey:e,model:t,initialSystemMessage:s}){super({apiKey:e,model:t,initialSystemMessage:s,dataExchangeMode:"fetch"}),void 0!==s&&s.length>0&&(this.initialSystemMessage=s)}get config(){return r}send(e,s){const o=e;if("string"!=typeof o||0===o.length)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot send empty messages"});const r=this.initialSystemMessage?[{role:"system",content:this.initialSystemMessage}]:[];r.push({role:"user",content:o}),this.openai.chat.completions.create({stream:!1,model:this.model,messages:r}).then((async e=>{const a=await this.decode(e);"string"!=typeof a?s.error(new t.NluxUsageError({source:this.constructor.name,message:"Unable to decode response from OpenAI"})):s.next(a)})).catch((e=>{i("Error while making API call to OpenAI"),i(e),s.error(new t.NluxUsageError({source:this.constructor.name,message:e.message,exceptionId:a(e)??void 0}))}))}}class u extends d{constructor(e){super(e)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new h({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){return this}useStreamingMode(){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to stream. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="stream",this.setStreamOrFetchCalled=!0,new m(this)}}class m extends d{constructor(e){super(e)}create(){if(!this.apiKey)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to create ChatGPT adapter. API key is not set. You should call useApiKey() on instance to set the API key, orpass the API key as an option with useAdapter() hook."});return new c({apiKey:this.apiKey,model:this.model,initialSystemMessage:this.initialSystemMessage??void 0})}useFetchingMode(){if(this.setStreamOrFetchCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Unable to set data loading mode to fetch. Stream or fetch setter has already been called by this builder. Make sure you are not calling stream() or fetch() twice."});return this.dataExchangeMode="fetch",this.setStreamOrFetchCalled=!0,new u(this)}useStreamingMode(){return this}}e.GptFetchAdapter=h,e.GptStreamingAdapter=c,e.createAdapter=e=>{if("openai/gpt"!==e)throw new t.NluxUsageError({source:"createAdapter",message:"Adapter type not supported"});return new m}})); |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
26816