🤗 Hugging Face Inference API
A Typescript powered wrapper for the Hugging Face Inference API. Learn more about the Inference API at Hugging Face.
Install
npm install huggingface
yarn add huggingface
pnpm add huggingface
Usage
❗Important note: Using an API key is optional to get started (simply provide a random string), however you will be rate limited eventually. Join Hugging Face and then visit access tokens to generate your API key.
Basic examples
import HuggingFace from 'huggingface'
const hf = new HuggingFace('your api key')
await hf.fillMask({
model: 'bert-base-uncased',
inputs: '[MASK] world!'
})
await hf.summarization({
model: 'facebook/bart-large-cnn',
inputs:
'The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930.',
parameters: {
max_length: 100
}
})
await hf.questionAnswer({
model: 'deepset/roberta-base-squad2',
inputs: {
question: 'What is the capital of France?',
context: 'The capital of France is Paris.'
}
})
await hf.tableQuestionAnswer({
model: 'google/tapas-base-finetuned-wtq',
inputs: {
query: 'How many stars does the transformers repository have?',
table: {
Repository: ['Transformers', 'Datasets', 'Tokenizers'],
Stars: ['36542', '4512', '3934'],
Contributors: ['651', '77', '34'],
'Programming language': ['Python', 'Python', 'Rust, Python and NodeJS']
}
}
})
await hf.textClassification({
model: 'distilbert-base-uncased-finetuned-sst-2-english',
inputs: 'I like you. I love you.'
})
await hf.textGeneration({
model: 'gpt2',
inputs: 'The answer to the universe is'
})
await hf.tokenClassification({
model: 'dbmdz/bert-large-cased-finetuned-conll03-english',
inputs: 'My name is Sarah Jessica Parker but you can call me Jessica'
})
await hf.translation({
model: 't5-base',
inputs: 'My name is Wolfgang and I live in Berlin'
})
await hf.zeroShotClassification({
model: 'facebook/bart-large-mnli',
inputs: [
'Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!'
],
parameters: { candidate_labels: ['refund', 'legal', 'faq'] }
})
await hf.conversational({
model: 'microsoft/DialoGPT-large',
inputs: {
past_user_inputs: ['Which movie is the best ?'],
generated_responses: ['It is Die Hard for sure.'],
text: 'Can you explain why ?'
}
})
await hf.featureExtraction({
model: 'sentence-transformers/paraphrase-xlm-r-multilingual-v1',
inputs: {
source_sentence: 'That is a happy person',
sentences: [
'That is a happy dog',
'That is a very happy person',
'Today is a sunny day'
]
}
})
await hf.automaticSpeechRecognition({
model: 'facebook/wav2vec2-large-960h-lv60-self',
data: readFileSync('test/sample1.flac')
})
await hf.audioClassification({
model: 'superb/hubert-large-superb-er',
data: readFileSync('test/sample1.flac')
})
await hf.imageClassification({
data: readFileSync('test/cheetah.png'),
model: 'google/vit-base-patch16-224'
})
await hf.objectDetection({
data: readFileSync('test/cats.png'),
model: 'facebook/detr-resnet-50'
})
await hf.imageSegmentation({
data: readFileSync('test/cats.png'),
model: 'facebook/detr-resnet-50-panoptic'
})
Supported APIs
Natural Language Processing
Audio
Computer Vision
Running tests
HF_API_KEY="your api key" yarn test
Options
export declare class HuggingFace {
private readonly apiKey
private readonly defaultOptions
constructor(apiKey: string, defaultOptions?: Options)
fillMask(args: FillMaskArgs, options?: Options): Promise<FillMaskReturn>
summarization(
args: SummarizationArgs,
options?: Options
): Promise<SummarizationReturn>
questionAnswer(
args: QuestionAnswerArgs,
options?: Options
): Promise<QuestionAnswerReturn>
tableQuestionAnswer(
args: TableQuestionAnswerArgs,
options?: Options
): Promise<TableQuestionAnswerReturn>
textClassification(
args: TextClassificationArgs,
options?: Options
): Promise<TextClassificationReturn>
textGeneration(
args: TextGenerationArgs,
options?: Options
): Promise<TextGenerationReturn>
tokenClassification(
args: TokenClassificationArgs,
options?: Options
): Promise<TokenClassificationReturn>
translation(
args: TranslationArgs,
options?: Options
): Promise<TranslationReturn>
zeroShotClassification(
args: ZeroShotClassificationArgs,
options?: Options
): Promise<ZeroShotClassificationReturn>
conversational(
args: ConversationalArgs,
options?: Options
): Promise<ConversationalReturn>
featureExtraction(
args: FeatureExtractionArgs,
options?: Options
): Promise<FeatureExtractionReturn>
automaticSpeechRecognition(
args: AutomaticSpeechRecognitionArgs,
options?: Options
): Promise<AutomaticSpeechRecognitionReturn>
audioClassification(
args: AudioClassificationArgs,
options?: Options
): Promise<AudioClassificationReturn>
imageClassification(
args: ImageClassificationArgs,
options?: Options
): Promise<ImageClassificationReturn>
objectDetection(
args: ObjectDetectionArgs,
options?: Options
): Promise<ObjectDetectionReturn>
imageSegmentation(
args: ImageSegmentationArgs,
options?: Options
): Promise<ImageSegmentationReturn>
request(
args: Args & {
data?: any
},
options?: Options & {
binary?: boolean
}
): Promise<any>
private static toArray
}
export declare type Options = {
use_gpu?: boolean
use_cache?: boolean
wait_for_model?: boolean
retry_on_error?: boolean
}
export declare type Args = {
model: string
}
export declare type FillMaskArgs = Args & {
inputs: string
}
export declare type FillMaskReturn = {
score: number
token: number
token_str: string
sequence: string
}[]
export declare type SummarizationArgs = Args & {
inputs: string
parameters?: {
min_length?: number
max_length?: number
top_k?: number
top_p?: number
temperature?: number
repetition_penalty?: number
max_time?: number
}
}
export declare type SummarizationReturn = {
summary_text: string
}
export declare type QuestionAnswerArgs = Args & {
inputs: {
question: string
context: string
}
}
export declare type QuestionAnswerReturn = {
answer: string
score: number
start: number
end: number
}
export declare type TableQuestionAnswerArgs = Args & {
inputs: {
query: string
table: Record<string, string[]>
}
}
export declare type TableQuestionAnswerReturn = {
answer: string
coordinates: number[][]
cells: string[]
aggregator: string
}
export declare type TextClassificationArgs = Args & {
inputs: string
}
export declare type TextClassificationReturn = {
label: string
score: number
}[]
export declare type TextGenerationArgs = Args & {
inputs: string
parameters?: {
top_k?: number
top_p?: number
temperature?: number
repetition_penalty?: number
max_new_tokens?: number
max_time?: number
return_full_text?: boolean
num_return_sequences?: number
do_sample?: boolean
}
}
export declare type TextGenerationReturn = {
generated_text: string
}
export declare type TokenClassificationArgs = Args & {
inputs: string
parameters?: {
aggregation_strategy?: 'none' | 'simple' | 'first' | 'average' | 'max'
}
}
export declare type TokenClassificationReturnValue = {
entity_group: string
score: number
word: string
start: number
end: number
}
export declare type TokenClassificationReturn = TokenClassificationReturnValue[]
export declare type TranslationArgs = Args & {
inputs: string
}
export declare type TranslationReturn = {
translation_text: string
}
export declare type ZeroShotClassificationArgs = Args & {
inputs: string | string[]
parameters: {
candidate_labels: string[]
multi_label?: boolean
}
}
export declare type ZeroShotClassificationReturnValue = {
sequence: string
labels: string[]
scores: number[]
}
export declare type ZeroShotClassificationReturn =
ZeroShotClassificationReturnValue[]
export declare type ConversationalArgs = Args & {
inputs: {
text: string
generated_responses?: string[]
past_user_inputs?: string[]
}
parameters?: {
min_length?: number
max_length?: number
top_k?: number
top_p?: number
temperature?: number
repetition_penalty?: number
max_time?: number
}
}
export declare type ConversationalReturn = {
generated_text: string
conversation: {
generated_responses: string[]
past_user_inputs: string[]
}
warnings: string[]
}
export declare type FeatureExtractionArgs = Args & {
inputs: Record<string, any> | Record<string, any>[]
}
export declare type FeatureExtractionReturn = (number | number[])[]
export declare type ImageClassificationArgs = Args & {
data: any
}
export declare type ImageClassificationReturnValue = {
score: number
label: string
}
export declare type ImageClassificationReturn = ImageClassificationReturnValue[]
export declare type ObjectDetectionArgs = Args & {
data: any
}
export declare type ObjectDetectionReturnValue = {
score: number
label: string
box: {
xmin: number
ymin: number
xmax: number
ymax: number
}
}
export declare type ObjectDetectionReturn = ObjectDetectionReturnValue[]
export declare type ImageSegmentationArgs = Args & {
data: any
}
export declare type ImageSegmentationReturnValue = {
score: number
label: string
mask: string
}
export declare type ImageSegmentationReturn = ImageSegmentationReturnValue[]
export declare type AutomaticSpeechRecognitionArgs = Args & {
data: any
}
export declare type AutomaticSpeechRecognitionReturn = {
text: string
}
export declare type AudioClassificationArgs = Args & {
data: any
}
export declare type AudioClassificationReturnValue = {
label: string
score: number
}
export declare type AudioClassificationReturn = AudioClassificationReturnValue[]