New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@nlux/hf

Package Overview
Dependencies
Maintainers
1
Versions
170
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@nlux/hf - npm Package Compare versions

Comparing version 2.14.0 to 2.16.0

2

cjs/hf.js

@@ -1,1 +0,1 @@

"use strict";var e=require("@huggingface/inference"),t=Object.defineProperty,s=(e,s,n)=>((e,s,n)=>s in e?t(e,s,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[s]=n)(e,"symbol"!=typeof s?s+"":s,n);class n extends Error{constructor(e={}){super(e.message),s(this,"exceptionId"),s(this,"message"),s(this,"source"),s(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class o extends n{}class r extends n{}const i=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},h=class t{constructor(t){if(!t.model&&!t.endpoint)throw new r({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...t},this.inference=new e.HfInference(t.authToken)}get dataTransferMode(){return this.options.dataTransferMode??t.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(e){if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new n({source:this.constructor.name,message:t,exceptionId:a(e)??void 0})}}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}streamText(e,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(e),parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n.token))}s.complete()}catch(e){const t=e;s.error(t),i("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);i("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};h.defaultDataTransferMode="batch",h.defaultMaxNewTokens=500;let c=h;class u{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new r({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new c({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}exports.createChatAdapter=()=>new u,exports.llama2InputPreProcessor=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,exports.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):"";
"use strict";var e=require("@huggingface/inference"),t=Object.defineProperty,s=(e,s,n)=>((e,s,n)=>s in e?t(e,s,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[s]=n)(e,"symbol"!=typeof s?s+"":s,n);class n extends Error{constructor(e={}){super(e.message),s(this,"exceptionId"),s(this,"message"),s(this,"source"),s(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class o extends n{}class r extends n{}const i=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},h=class t{constructor(t){if(!t.model&&!t.endpoint)throw new r({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...t},this.inference=new e.HfInference(t.authToken)}get dataTransferMode(){return this.options.dataTransferMode??t.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(e){if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new n({source:this.constructor.name,message:t,exceptionId:a(e)??void 0})}}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}streamText(e,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(e),parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n?.token))}s.complete()}catch(e){const t=e;s.error(t),i("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);i("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};h.defaultDataTransferMode="batch",h.defaultMaxNewTokens=500;let c=h;class u{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new r({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new c({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}exports.createChatAdapter=()=>new u,exports.llama2InputPreProcessor=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,exports.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):"";

@@ -1,1 +0,1 @@

import{HfInference as e}from"@huggingface/inference";var t=Object.defineProperty,s=(e,s,n)=>((e,s,n)=>s in e?t(e,s,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[s]=n)(e,"symbol"!=typeof s?s+"":s,n);class n extends Error{constructor(e={}){super(e.message),s(this,"exceptionId"),s(this,"message"),s(this,"source"),s(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class o extends n{}class r extends n{}const i=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},h=class t{constructor(t){if(!t.model&&!t.endpoint)throw new r({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...t},this.inference=new e(t.authToken)}get dataTransferMode(){return this.options.dataTransferMode??t.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(e){if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new n({source:this.constructor.name,message:t,exceptionId:a(e)??void 0})}}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}streamText(e,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(e),parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n.token))}s.complete()}catch(e){const t=e;s.error(t),i("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);i("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};h.defaultDataTransferMode="batch",h.defaultMaxNewTokens=500;let c=h;class u{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new r({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new c({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}const d=()=>new u,p=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,l=e=>e?e.replace(/<[^>]*>/g,""):"";export{d as createChatAdapter,p as llama2InputPreProcessor,l as llama2OutputPreProcessor};
import{HfInference as e}from"@huggingface/inference";var t=Object.defineProperty,s=(e,s,n)=>((e,s,n)=>s in e?t(e,s,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[s]=n)(e,"symbol"!=typeof s?s+"":s,n);class n extends Error{constructor(e={}){super(e.message),s(this,"exceptionId"),s(this,"message"),s(this,"source"),s(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class o extends n{}class r extends n{}const i=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},a=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},h=class t{constructor(t){if(!t.model&&!t.endpoint)throw new r({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...t},this.inference=new e(t.authToken)}get dataTransferMode(){return this.options.dataTransferMode??t.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(e){if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:e,parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new n({source:this.constructor.name,message:t,exceptionId:a(e)??void 0})}}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}streamText(e,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new r({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(e),parameters:{max_new_tokens:this.options.maxNewTokens??t.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n?.token))}s.complete()}catch(e){const t=e;s.error(t),i("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);i("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};h.defaultDataTransferMode="batch",h.defaultMaxNewTokens=500;let c=h;class u{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new r({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new c({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new o({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new o({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new o({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new o({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new o({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new o({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new o({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}const d=()=>new u,p=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,l=e=>e?e.replace(/<[^>]*>/g,""):"";export{d as createChatAdapter,p as llama2InputPreProcessor,l as llama2OutputPreProcessor};
{
"name": "@nlux/hf",
"version": "2.14.0",
"version": "2.16.0",
"description": "The Hugging Face adapters for nlux, the javascript library for building conversational AI interfaces.",

@@ -61,3 +61,3 @@ "keywords": [

"dependencies": {
"@nlux/core": "2.14.0",
"@nlux/core": "2.16.0",
"@huggingface/inference": "^2"

@@ -64,0 +64,0 @@ },

@@ -1,1 +0,1 @@

!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@huggingface/inference")):"function"==typeof define&&define.amd?define(["exports","@huggingface/inference"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/hf"]={},e.inference)}(this,(function(e,t){"use strict";var s=Object.defineProperty,n=(e,t,n)=>((e,t,n)=>t in e?s(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n)(e,"symbol"!=typeof t?t+"":t,n);class o extends Error{constructor(e={}){super(e.message),n(this,"exceptionId"),n(this,"message"),n(this,"source"),n(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class r extends o{}class i extends o{}const a=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},h=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},c=class e{constructor(e){if(!e.model&&!e.endpoint)throw new i({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...e},this.inference=new t.HfInference(e.authToken)}get dataTransferMode(){return this.options.dataTransferMode??e.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(t){if(!this.options.model&&!this.options.endpoint)throw new i({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new o({source:this.constructor.name,message:t,exceptionId:h(e)??void 0})}}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}streamText(t,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new i({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(t),parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n.token))}s.complete()}catch(e){const t=e;s.error(t),a("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);a("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};c.defaultDataTransferMode="batch",c.defaultMaxNewTokens=500;let u=c;class d{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new i({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new u({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new r({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new r({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new r({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new r({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new r({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new r({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new r({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new r({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}e.createChatAdapter=()=>new d,e.llama2InputPreProcessor=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,e.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):""}));
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("@huggingface/inference")):"function"==typeof define&&define.amd?define(["exports","@huggingface/inference"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@nlux/hf"]={},e.inference)}(this,(function(e,t){"use strict";var s=Object.defineProperty,n=(e,t,n)=>((e,t,n)=>t in e?s(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n)(e,"symbol"!=typeof t?t+"":t,n);class o extends Error{constructor(e={}){super(e.message),n(this,"exceptionId"),n(this,"message"),n(this,"source"),n(this,"type"),this.message=e.message??"",this.source=e.source,this.type=this.constructor.name,this.exceptionId=e.exceptionId}}class r extends o{}class i extends o{}const a=e=>{"string"!=typeof e?e&&"function"==typeof e.toString?console.warn(`[nlux] ${e.toString()}`):console.warn("[nlux]"):console.warn(`[nlux] ${e}`)},h=e=>{if("object"==typeof e&&null!==e){const t=e;if("invalid_api_key"===t.code)return"invalid-api-key";if(t.message&&"string"==typeof t.message&&t.message.toLowerCase().includes("connection error"))return"connection-error"}return null},c=class e{constructor(e){if(!e.model&&!e.endpoint)throw new i({source:this.constructor.name,message:'when creating the Hugging Face adapter, you must set either the model or the endpoint using the "endpoint" option!'});this.__instanceId=`${this.info.id}-${"xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g,(e=>{const t=16*Math.random()|0;return("x"==e?t:3&t|8).toString(16)}))}`,this.options={...e},this.inference=new t.HfInference(e.authToken)}get dataTransferMode(){return this.options.dataTransferMode??e.defaultDataTransferMode}get id(){return this.__instanceId}get info(){return{id:"hugging-face-adapter",capabilities:{chat:!0,fileUpload:!1,textToSpeech:!1,speechToText:!1}}}async batchText(t){if(!this.options.model&&!this.options.endpoint)throw new i({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const s={inputs:t,parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};try{let e;if(this.options.endpoint){const t=this.inference.endpoint(this.options.endpoint);e=await t.textGeneration(s)}else e=await this.inference.textGeneration({model:this.options.model,...s});return e}catch(e){const t=e.message||"An error occurred while sending the message to the Hugging Face API";throw new o({source:this.constructor.name,message:t,exceptionId:h(e)??void 0})}}preProcessAiBatchedMessage(e,t){throw new Error("Method not implemented.")}preProcessAiStreamedChunk(e,t){throw new Error("Method not implemented.")}streamText(t,s){Promise.resolve().then((async()=>{if(!this.options.model&&!this.options.endpoint)throw new i({source:this.constructor.name,message:'Unable to send message! When sending a message to the Hugging Face API, you must set either the model using the "model" option or the endpoint using the "endpoint" option!'});const n={inputs:await this.encode(t),parameters:{max_new_tokens:this.options.maxNewTokens??e.defaultMaxNewTokens}};let o;try{if(this.options.endpoint){o=this.inference.endpoint(this.options.endpoint).textGenerationStream(n)}else o=this.inference.textGenerationStream({model:this.options.model,...n});for(;o;){const e=await o.next(),{done:t,value:n}=e;if(t)break;s.next(await this.decode(n?.token))}s.complete()}catch(e){const t=e;s.error(t),a("An error occurred while sending the message to the Hugging Face streaming API: \n"+t.message)}}))}async decode(e){const t=(()=>{if("string"==typeof e)return e;if(Array.isArray(e)){if(0===e.length)return"";const t=e[0];if("object"==typeof t&&t&&"string"==typeof t.generated_text)return t.generated_text}const t=e?e.generated_text:void 0;if("string"==typeof t)return t;const s=e&&"object"==typeof e&&"text"in e?e.text:void 0;return"string"===s?s:""})(),{preProcessors:{output:s}={}}=this.options;return s?Promise.resolve(s(t)):Promise.resolve(t)}async encode(e){const t=e,{preProcessors:{input:s}={}}=this.options;if(s&&t){if("string"==typeof t)return s(t,this.options);a("The input pre-processor function was provided, but the message is not a string! Input pre-processor will not be applied.")}return e}};c.defaultDataTransferMode="batch",c.defaultMaxNewTokens=500;let u=c;class d{constructor(){this.theAuthToken=null,this.theDataTransferMode="stream",this.theEndpoint=null,this.theInputPreProcessor=null,this.theMaxNewTokens=null,this.theModel=null,this.theOutputPreProcessor=null,this.theSystemMessage=null,this.withDataTransferModeCalled=!1}create(){if(!this.theModel&&!this.theEndpoint)throw new i({source:this.constructor.name,message:'You must provide a model or an endpoint using the "withModel()" method or the "withEndpoint()" method!'});return new u({dataTransferMode:this.theDataTransferMode,model:this.theModel??void 0,endpoint:this.theEndpoint??void 0,authToken:this.theAuthToken??void 0,preProcessors:{input:this.theInputPreProcessor??void 0,output:this.theOutputPreProcessor??void 0},maxNewTokens:this.theMaxNewTokens??void 0,systemMessage:this.theSystemMessage??void 0})}withAuthToken(e){if(null!==this.theAuthToken)throw new r({source:this.constructor.name,message:"Cannot set the auth token more than once"});return this.theAuthToken=e,this}withDataTransferMode(e){if(this.withDataTransferModeCalled)throw new r({source:this.constructor.name,message:"Cannot set the data loading mode more than once"});return this.theDataTransferMode=e,this.withDataTransferModeCalled=!0,this}withEndpoint(e){if(null!==this.theEndpoint)throw new r({source:this.constructor.name,message:"Cannot set the endpoint because a model or an endpoint has already been set"});return this.theEndpoint=e,this}withInputPreProcessor(e){if(null!==this.theInputPreProcessor)throw new r({source:this.constructor.name,message:"Cannot set the input pre-processor more than once"});return this.theInputPreProcessor=e,this}withMaxNewTokens(e){if(null!==this.theMaxNewTokens)throw new r({source:this.constructor.name,message:"Cannot set the max new tokens more than once"});return this.theMaxNewTokens=e,this}withModel(e){if(null!==this.theModel)throw new r({source:this.constructor.name,message:"Cannot set the model because a model or an endpoint has already been set"});return this.theModel=e,this}withOutputPreProcessor(e){if(null!==this.theOutputPreProcessor)throw new r({source:this.constructor.name,message:"Cannot set the output pre-processor more than once"});return this.theOutputPreProcessor=e,this}withSystemMessage(e){if(null!==this.theSystemMessage)throw new r({source:this.constructor.name,message:"Cannot set the system message more than once"});return this.theSystemMessage=e,this}}e.createChatAdapter=()=>new d,e.llama2InputPreProcessor=(e,t)=>`<s> [INST] <<SYS>> ${t?.systemMessage??"You are a helpful assistant. You keep your answers short."} <</SYS>> </s><s> [INST] ${e} [/INST]`,e.llama2OutputPreProcessor=e=>e?e.replace(/<[^>]*>/g,""):""}));
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc