Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@nlux/hf

Package Overview
Dependencies
Maintainers
1
Versions
170
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlux/hf - npm Package Compare versions

Comparing version 0.5.3 to 0.5.4

2

cjs/hf.js

@@ -1,1 +0,1 @@

"use strict";var e=require("@nlux/nlux");const t=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},s=e=>e.ok?null:e.status>=500&&e.status<600?"NX-NT-003":e.status>=400&&e.status<500?"NX-NT-004":"NX-NT-001",o=class o{constructor(t){if(!t.model)throw new e.NluxValidationError({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...t}}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){if("string"==typeof e)return Promise.resolve(e);if(Array.isArray(e)){if(0===e.length)return Promise.resolve("");const t=e[0];if("object"==typeof t&&t.hasOwnProperty("generated_text"))return Promise.resolve(t.generated_text)}return""}async encode(t){const s=t,{inputPreProcessor:o}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,{...this.options});e.warn("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return t}send(t,s){if(!t)throw new e.NluxValidationError({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new e.NluxValidationError({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() NaN"});if("fetch"===this.dataTransferMode)return this.sendFetch(t);this.sendStream(t,s)}async sendFetch(r){if(!this.options.model)throw new e.NluxValidationError({source:this.constructor.name,message:'You must provide a valid model or endpoint through the "model" option when creating the Hugging Face adapter!'});const n={"Content-Type":"application/json"};this.options.authToken&&(n.Authorization=`Bearer ${this.options.authToken}`);const a="endpoint"===(e=>{const t=e.toLowerCase()||"";return t.startsWith("https://")||t.startsWith("http://")?"endpoint":"model"})(this.options.model)?this.options.model:`${o.baseUrl}/${this.options.model}`;let i;try{const e=await this.encode(r);i=await fetch(a,{headers:n,method:"POST",body:JSON.stringify({inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}})})}catch(s){throw new e.NluxError({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${s.message}`,exceptionId:t(s)??void 0})}if(!i.ok)throw new e.NluxError({source:this.constructor.name,message:`The Hugging Face adapter received an invalid response from the server: ${i.status}`,exceptionId:s(i)??void 0});return await this.decode(await i.json())}sendStream(e,t){throw new Error("Not implemented yet!")}};o.baseUrl="https://api-inference.huggingface.co/models",o.defaultMaxNewTokens=500;let r=o;class n{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModelOrEndpoint=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(null===this.theModelOrEndpoint)throw new e.NluxValidationError({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method to create a Hugging Face adapter!'});return new r({dataTransferMode:this.theDataTransferMode,model:this.theModelOrEndpoint,authToken:this.theAuthToken??void 0,inputPreProcessor:this.theInputPreProcessor??void 0,maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(t){if(null!==this.theAuthToken)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=t,this}withDataTransferMode(t){if(this.withDataTransferModeCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=t,this.withDataTransferModeCalled=!0,this}withEndpoint(t){if(null!==this.theModelOrEndpoint)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theModelOrEndpoint=t,this}withInputPreProcessor(t){if(null!==this.theInputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=t,this}withMaxNewTokens(t){if(null!==this.theMaxNewTokens)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=t,this}withModel(t){if(null!==this.theModelOrEndpoint)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModelOrEndpoint=t,this}withSystemMessage(t){if(null!==this.theSystemMessage)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=t,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.createAdapter=()=>new n,exports.llama2InputPreProcessor=(e,t,s)=>`<s>[INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>></s><s>[INST] ${e} [/INST]`;
"use strict";var e=require("@nlux/nlux"),t=require("@huggingface/inference");const s=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=class o{constructor(s){if(!s.model)throw new e.NluxValidationError({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...s},this.inference=new t.HfInference(s.authToken)}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode??o.defaultDataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}return"object"==typeof e&&e&&"string"==typeof e.generated_text?e.generated_text:"object"==typeof e&&e&&"string"==typeof e.text?e.text:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(t){const s=t,{preProcessors:{input:o}={}}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,this.options);e.warn("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return t}send(t,s){const o=new Promise((async(o,n)=>{if(!t)throw new e.NluxValidationError({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new e.NluxValidationError({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() method or set the data loading mode to fetch when creating the adapter."});try{const e=await this.encode(t);if("stream"===this.dataTransferMode)return void this.sendStream(e,s);o(await this.sendFetch(e))}catch(e){n(e)}}));if("fetch"===this.dataTransferMode)return o}async sendFetch(t){if(!this.options.model&&!this.options.endpoint)throw new e.NluxValidationError({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}};let r;try{if(this.options.endpoint){const e=this.inference.endpoint(this.options.endpoint);r=await e.textGeneration(n)}else r=await this.inference.textGeneration({model:this.options.model,...n})}catch(t){throw new e.NluxError({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${t.message}`,exceptionId:s(t)??void 0})}return await this.decode(r)}sendStream(t,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new e.NluxValidationError({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}};let r;try{if(this.options.endpoint){r=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else r=this.inference.textGenerationStream({model:this.options.model,...n});for(;;){const e=await r.next(),{done:t,value:o}=e;if(t)break;s.next(await this.decode(o.token))}s.complete()}catch(t){s.error(t),e.warn("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}};o.baseUrl="https://api-inference.huggingface.co/models",o.defaultDataTransferMode="fetch",o.defaultMaxNewTokens=500;let n=o;class r{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new e.NluxValidationError({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new n({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(t){if(null!==this.theAuthToken)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=t,this}withDataTransferMode(t){if(this.withDataTransferModeCalled)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=t,this.withDataTransferModeCalled=!0,this}withEndpoint(t){if(null!==this.theEndpoint)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=t,this}withInputPreProcessor(t){if(null!==this.theInputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=t,this}withMaxNewTokens(t){if(null!==this.theMaxNewTokens)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=t,this}withModel(t){if(null!==this.theModel)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=t,this}withOutputPreProcessor(t){if(null!==this.theOutputPreProcessor)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=t,this}withSystemMessage(t){if(null!==this.theSystemMessage)throw new e.NluxUsageError({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=t,this}}Object.defineProperty(exports,"debug",{enumerable:!0,get:function(){return e.debug}}),exports.createAdapter=()=>new r,exports.llama2InputPreProcessor=(e,t,s)=>`<s> [INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,exports.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):"";

@@ -1,1 +0,1 @@

import{NluxValidationError as e,warn as t,NluxError as s,NluxUsageError as o}from"@nlux/nlux";export{debug}from"@nlux/nlux";const n=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},r=e=>e.ok?null:e.status>=500&&e.status<600?"NX-NT-003":e.status>=400&&e.status<500?"NX-NT-004":"NX-NT-001",a=class o{constructor(t){if(!t.model)throw new e({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...t}}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){if("string"==typeof e)return Promise.resolve(e);if(Array.isArray(e)){if(0===e.length)return Promise.resolve("");const t=e[0];if("object"==typeof t&&t.hasOwnProperty("generated_text"))return Promise.resolve(t.generated_text)}return""}async encode(e){const s=e,{inputPreProcessor:o}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,{...this.options});t("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}send(t,s){if(!t)throw new e({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new e({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() NaN"});if("fetch"===this.dataTransferMode)return this.sendFetch(t);this.sendStream(t,s)}async sendFetch(t){if(!this.options.model)throw new e({source:this.constructor.name,message:'You must provide a valid model or endpoint through the "model" option when creating the Hugging Face adapter!'});const a={"Content-Type":"application/json"};this.options.authToken&&(a.Authorization=`Bearer ${this.options.authToken}`);const i="endpoint"===(e=>{const t=e.toLowerCase()||"";return t.startsWith("https://")||t.startsWith("http://")?"endpoint":"model"})(this.options.model)?this.options.model:`${o.baseUrl}/${this.options.model}`;let h;try{const e=await this.encode(t);h=await fetch(i,{headers:a,method:"POST",body:JSON.stringify({inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}})})}catch(e){throw new s({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${e.message}`,exceptionId:n(e)??void 0})}if(!h.ok)throw new s({source:this.constructor.name,message:`The Hugging Face adapter received an invalid response from the server: ${h.status}`,exceptionId:r(h)??void 0});return await this.decode(await h.json())}sendStream(e,t){throw new Error("Not implemented yet!")}};a.baseUrl="https://api-inference.huggingface.co/models",a.defaultMaxNewTokens=500;let i=a;class h{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModelOrEndpoint=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(null===this.theModelOrEndpoint)throw new e({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method to create a Hugging Face adapter!'});return new i({dataTransferMode:this.theDataTransferMode,model:this.theModelOrEndpoint,authToken:this.theAuthToken??void 0,inputPreProcessor:this.theInputPreProcessor??void 0,maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theModelOrEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theModelOrEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModelOrEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModelOrEndpoint=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}const u=()=>new h,d=(e,t,s)=>`<s>[INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>></s><s>[INST] ${e} [/INST]`;export{u as createAdapter,d as llama2InputPreProcessor};
import{NluxValidationError as e,warn as t,NluxError as s,NluxUsageError as o}from"@nlux/nlux";export{debug}from"@nlux/nlux";import{HfInference as n}from"@huggingface/inference";const r=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},i=class o{constructor(t){if(!t.model)throw new e({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...t},this.inference=new n(t.authToken)}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode??o.defaultDataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}return"object"==typeof e&&e&&"string"==typeof e.generated_text?e.generated_text:"object"==typeof e&&e&&"string"==typeof e.text?e.text:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const s=e,{preProcessors:{input:o}={}}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,this.options);t("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}send(t,s){const o=new Promise((async(o,n)=>{if(!t)throw new e({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new e({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() method or set the data loading mode to fetch when creating the adapter."});try{const e=await this.encode(t);if("stream"===this.dataTransferMode)return void this.sendStream(e,s);o(await this.sendFetch(e))}catch(e){n(e)}}));if("fetch"===this.dataTransferMode)return o}async sendFetch(t){if(!this.options.model&&!this.options.endpoint)throw new e({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}};let i;try{if(this.options.endpoint){const e=this.inference.endpoint(this.options.endpoint);i=await e.textGeneration(n)}else i=await this.inference.textGeneration({model:this.options.model,...n})}catch(e){throw new s({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${e.message}`,exceptionId:r(e)??void 0})}return await this.decode(i)}sendStream(s,n){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new e({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const r={inputs:s,parameters:{max_new_tokens:this.options.maxNewTokens??o.defaultMaxNewTokens}};let i;try{if(this.options.endpoint){i=this.inference.endpoint(this.options.endpoint).textGenerationStream(r)}else i=this.inference.textGenerationStream({model:this.options.model,...r});for(;;){const e=await i.next(),{done:t,value:s}=e;if(t)break;n.next(await this.decode(s.token))}n.complete()}catch(e){n.error(e),t("An error occurred while sending the message to the Hugging Face streaming API: \n"+e.message)}}))}};i.baseUrl="https://api-inference.huggingface.co/models",i.defaultDataTransferMode="fetch",i.defaultMaxNewTokens=500;let a=i;class h{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new e({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new a({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}const u=()=>new h,d=(e,t,s)=>`<s> [INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,c=e=>e?e.replace(/<[^>]*>/g,""):"";export{u as createAdapter,d as llama2InputPreProcessor,c as llama2OutputPreProcessor};

@@ -6,7 +6,13 @@ import { DataTransferMode, AdapterBuilder, StandardAdapter } from '@nlux/nlux';

type HfOutputPreProcessor = (output: string) => string;
type HfAdapterOptions = {
dataTransferMode: DataTransferMode;
model: string;
dataTransferMode?: DataTransferMode;
model?: string;
endpoint?: string;
authToken?: string;
inputPreProcessor?: HfInputPreProcessor;
preProcessors?: {
input?: HfInputPreProcessor;
output?: HfOutputPreProcessor;
};
maxNewTokens?: number;

@@ -91,2 +97,12 @@ systemMessage?: string;

/**
* This function will be called after receiving the output from the Hugging Face Inference API, and before
* displaying it to the user. It can be used to preprocess the output before displaying it to the user.
* This is useful if the model returns output in an unexpected format (JSON, special syntax),
* and you want to convert it to a format that can be displayed in the UI.
*
* @param {HfOutputPreProcessor} outputPreProcessor
* @returns {HfAdapterBuilder}
*/
withOutputPreProcessor(outputPreProcessor: HfOutputPreProcessor): HfAdapterBuilder;
/**
* The initial system to send to the Hugging Face Inference API.

@@ -105,3 +121,4 @@ * This will be used during the pre-processing step to construct the payload that will be sent to the API.

declare const llama2InputPreProcessor: HfInputPreProcessor;
declare const llama2OutputPreProcessor: HfOutputPreProcessor;
export { type HfAdapterBuilder, type HfAdapterOptions, type HfInputPreProcessor, createAdapter, llama2InputPreProcessor };
export { type HfAdapterBuilder, type HfAdapterOptions, type HfInputPreProcessor, createAdapter, llama2InputPreProcessor, llama2OutputPreProcessor };
{
"name": "@nlux/hf",
"version": "0.5.3",
"version": "0.5.4",
"description": "The Hugging Face adapters for NLUX, the javascript library for building conversational AI interfaces.",

@@ -56,3 +56,3 @@ "keywords": [

"peerDependencies": {
"@nlux/nlux": "0.5.3"
"@nlux/nlux": "0.5.4"
},

@@ -59,0 +59,0 @@ "main": "index.js",

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/nlux")):"function"==typeof define&&define.amd?define(["exports","@nlux/nlux"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/hf"]={},e.nlux)}(this,(function(e,t){"use strict";const s=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},o=e=>e.ok?null:e.status>=500&&e.status<600?"NX-NT-003":e.status>=400&&e.status<500?"NX-NT-004":"NX-NT-001",r=class e{constructor(e){if(!e.model)throw new t.NluxValidationError({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...e}}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){if("string"==typeof e)return Promise.resolve(e);if(Array.isArray(e)){if(0===e.length)return Promise.resolve("");const t=e[0];if("object"==typeof t&&t.hasOwnProperty("generated_text"))return Promise.resolve(t.generated_text)}return""}async encode(e){const s=e,{inputPreProcessor:o}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,{...this.options});t.warn("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}send(e,s){if(!e)throw new t.NluxValidationError({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new t.NluxValidationError({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() NaN"});if("fetch"===this.dataTransferMode)return this.sendFetch(e);this.sendStream(e,s)}async sendFetch(r){if(!this.options.model)throw new t.NluxValidationError({source:this.constructor.name,message:'You must provide a valid model or endpoint through the "model" option when creating the Hugging Face adapter!'});const n={"Content-Type":"application/json"};this.options.authToken&&(n.Authorization=`Bearer ${this.options.authToken}`);const a="endpoint"===(e=>{const t=e.toLowerCase()||"";return t.startsWith("https://")||t.startsWith("http://")?"endpoint":"model"})(this.options.model)?this.options.model:`${e.baseUrl}/${this.options.model}`;let i;try{const t=await this.encode(r);i=await fetch(a,{headers:n,method:"POST",body:JSON.stringify({inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}})})}catch(e){throw new t.NluxError({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${e.message}`,exceptionId:s(e)??void 0})}if(!i.ok)throw new t.NluxError({source:this.constructor.name,message:`The Hugging Face adapter received an invalid response from the server: ${i.status}`,exceptionId:o(i)??void 0});return await this.decode(await i.json())}sendStream(e,t){throw new Error("Not implemented yet!")}};r.baseUrl="https://api-inference.huggingface.co/models",r.defaultMaxNewTokens=500;let n=r;class a{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModelOrEndpoint=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(null===this.theModelOrEndpoint)throw new t.NluxValidationError({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method to create a Hugging Face adapter!'});return new n({dataTransferMode:this.theDataTransferMode,model:this.theModelOrEndpoint,authToken:this.theAuthToken??void 0,inputPreProcessor:this.theInputPreProcessor??void 0,maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theModelOrEndpoint)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theModelOrEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModelOrEndpoint)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModelOrEndpoint=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.createAdapter=()=>new a,e.llama2InputPreProcessor=(e,t,s)=>`<s>[INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>></s><s>[INST] ${e} [/INST]`}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@nlux/nlux"),require("@huggingface/inference")):"function"==typeof define&&define.amd?define(["exports","@nlux/nlux","@huggingface/inference"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/hf"]={},e.nlux,e.inference)}(this,(function(e,t,s){"use strict";const o=e=>{if("object"==typeof e&&null!==e){if("invalid_api_key"===e.code)return"NX-NT-002";if(e.message?.toLowerCase().includes("connection error"))return"NX-NT-001"}return null},n=class e{constructor(e){if(!e.model)throw new t.NluxValidationError({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.options={...e},this.inference=new s.HfInference(e.authToken)}get config(){return{encodeMessage:e=>Promise.resolve(e),decodeMessage:e=>Promise.resolve(e)}}get dataTransferMode(){return this.options.dataTransferMode??e.defaultDataTransferMode}get id(){return""}get info(){return{id:"",capabilities:{textChat:!0,audio:!1,fileUpload:!1,replyToSingleMessage:!1},remote:{url:""},inputFormats:["text"],outputFormats:["text"]}}get status(){return"idle"}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}return"object"==typeof e&&e&&"string"==typeof e.generated_text?e.generated_text:"object"==typeof e&&e&&"string"==typeof e.text?e.text:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const s=e,{preProcessors:{input:o}={}}=this.options;if(o&&s){if("string"==typeof s)return o(s,null,this.options);t.warn("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}send(e,s){const o=new Promise((async(o,n)=>{if(!e)throw new t.NluxValidationError({source:this.constructor.name,message:"The first argument to the send() method must be a non-empty string"});if("stream"===this.dataTransferMode&&!s)throw new t.NluxValidationError({source:this.constructor.name,message:"The Hugging Face adapter is set to be used in streaming mode, but no observer was provided to the send() method! You should either provide an observer as a second argument to the send() method or set the data loading mode to fetch when creating the adapter."});try{const t=await this.encode(e);if("stream"===this.dataTransferMode)return void this.sendStream(t,s);o(await this.sendFetch(t))}catch(e){n(e)}}));if("fetch"===this.dataTransferMode)return o}async sendFetch(s){if(!this.options.model&&!this.options.endpoint)throw new t.NluxValidationError({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:s,parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};let r;try{if(this.options.endpoint){const e=this.inference.endpoint(this.options.endpoint);r=await e.textGeneration(n)}else r=await this.inference.textGeneration({model:this.options.model,...n})}catch(e){throw new t.NluxError({source:this.constructor.name,message:`An error occurred while sending the message to the Hugging Face API: ${e.message}`,exceptionId:o(e)??void 0})}return await this.decode(r)}sendStream(s,o){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new t.NluxValidationError({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:s,parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};let r;try{if(this.options.endpoint){r=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else r=this.inference.textGenerationStream({model:this.options.model,...n});for(;;){const e=await r.next(),{done:t,value:s}=e;if(t)break;o.next(await this.decode(s.token))}o.complete()}catch(e){o.error(e),t.warn("An error occurred while sending the message to the Hugging Face streaming API: \n"+e.message)}}))}};n.baseUrl="https://api-inference.huggingface.co/models",n.defaultDataTransferMode="fetch",n.defaultMaxNewTokens=500;let r=n;class i{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new t.NluxValidationError({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new r({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new t.NluxUsageError({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}Object.defineProperty(e,"debug",{enumerable:!0,get:function(){return t.debug}}),e.createAdapter=()=>new i,e.llama2InputPreProcessor=(e,t,s)=>`<s> [INST] <<SYS>> ${s?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,e.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):""}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc