@huggingface/tasks
Advanced tools
Comparing version 0.17.0 to 0.17.1
@@ -488,2 +488,9 @@ import type { ModelData } from "./model-data.js"; | ||
}; | ||
medvae: { | ||
prettyLabel: string; | ||
repoName: string; | ||
repoUrl: string; | ||
filter: false; | ||
countDownloads: string; | ||
}; | ||
mitie: { | ||
@@ -924,3 +931,3 @@ prettyLabel: string; | ||
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[]; | ||
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "keras" | "tf-keras" | "keras-hub" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "merlin" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "whisperkit" | "yolov10" | "3dtopia-xl")[]; | ||
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "keras" | "tf-keras" | "keras-hub" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "whisperkit" | "yolov10" | "3dtopia-xl")[]; | ||
//# sourceMappingURL=model-libraries.d.ts.map |
@@ -476,2 +476,9 @@ "use strict"; | ||
}, | ||
medvae: { | ||
prettyLabel: "MedVAE", | ||
repoName: "MedVAE", | ||
repoUrl: "https://github.com/StanfordMIMI/MedVAE", | ||
filter: false, | ||
countDownloads: `path_extension:"ckpt"`, | ||
}, | ||
mitie: { | ||
@@ -478,0 +485,0 @@ prettyLabel: "MITIE", |
@@ -21,5 +21,5 @@ "use strict"; | ||
const inputsVisualQuestionAnswering = () => `{ | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
const inputsQuestionAnswering = () => `{ | ||
@@ -68,2 +68,6 @@ "question": "What is my name?", | ||
const inputsImageToText = () => `"cats.jpg"`; | ||
const inputsImageToImage = () => `{ | ||
"image": "cat.png", | ||
"prompt": "Turn the cat into a tiger." | ||
}`; | ||
const inputsImageSegmentation = () => `"cats.jpg"`; | ||
@@ -89,2 +93,3 @@ const inputsObjectDetection = () => `"cats.jpg"`; | ||
"image-to-text": inputsImageToText, | ||
"image-to-image": inputsImageToImage, | ||
"image-segmentation": inputsImageSegmentation, | ||
@@ -91,0 +96,0 @@ "object-detection": inputsObjectDetection, |
@@ -488,2 +488,9 @@ import type { ModelData } from "./model-data.js"; | ||
}; | ||
medvae: { | ||
prettyLabel: string; | ||
repoName: string; | ||
repoUrl: string; | ||
filter: false; | ||
countDownloads: string; | ||
}; | ||
mitie: { | ||
@@ -924,3 +931,3 @@ prettyLabel: string; | ||
export declare const ALL_MODEL_LIBRARY_KEYS: ModelLibraryKey[]; | ||
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "keras" | "tf-keras" | "keras-hub" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "merlin" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "whisperkit" | "yolov10" | "3dtopia-xl")[]; | ||
export declare const ALL_DISPLAY_MODEL_LIBRARY_KEYS: ("adapter-transformers" | "allennlp" | "anemoi" | "asteroid" | "audiocraft" | "audioseal" | "ben2" | "bertopic" | "big_vision" | "birder" | "birefnet" | "bm25s" | "champ" | "chat_tts" | "colpali" | "comet" | "cosmos" | "cxr-foundation" | "deepforest" | "depth-anything-v2" | "depth-pro" | "derm-foundation" | "diffree" | "diffusers" | "diffusionkit" | "doctr" | "cartesia_pytorch" | "cartesia_mlx" | "clipscope" | "cosyvoice" | "cotracker" | "edsnlp" | "elm" | "espnet" | "fairseq" | "fastai" | "fasttext" | "flair" | "gemma.cpp" | "gliner" | "glyph-byt5" | "grok" | "hallo" | "hezar" | "htrflow" | "hunyuan-dit" | "hunyuan3d-2" | "imstoucan" | "keras" | "tf-keras" | "keras-hub" | "k2" | "liveportrait" | "llama-cpp-python" | "mini-omni2" | "mindspore" | "mamba-ssm" | "mars5-tts" | "mesh-anything" | "merlin" | "medvae" | "mitie" | "ml-agents" | "mlx" | "mlx-image" | "mlc-llm" | "model2vec" | "moshi" | "nemo" | "open-oasis" | "open_clip" | "paddlenlp" | "peft" | "pxia" | "pyannote-audio" | "py-feat" | "pythae" | "recurrentgemma" | "relik" | "refiners" | "reverb" | "saelens" | "sam2" | "sample-factory" | "sapiens" | "sentence-transformers" | "setfit" | "sklearn" | "spacy" | "span-marker" | "speechbrain" | "ssr-speech" | "stable-audio-tools" | "diffusion-single-file" | "seed-story" | "soloaudio" | "stable-baselines3" | "stanza" | "swarmformer" | "f5-tts" | "genmo" | "tensorflowtts" | "tabpfn" | "terratorch" | "tic-clip" | "timesfm" | "timm" | "transformers" | "transformers.js" | "trellis" | "ultralytics" | "unity-sentis" | "sana" | "vfi-mamba" | "voicecraft" | "whisperkit" | "yolov10" | "3dtopia-xl")[]; | ||
//# sourceMappingURL=model-libraries.d.ts.map |
@@ -450,2 +450,9 @@ import * as snippets from "./model-libraries-snippets.js"; | ||
}, | ||
medvae: { | ||
prettyLabel: "MedVAE", | ||
repoName: "MedVAE", | ||
repoUrl: "https://github.com/StanfordMIMI/MedVAE", | ||
filter: false, | ||
countDownloads: `path_extension:"ckpt"`, | ||
}, | ||
mitie: { | ||
@@ -452,0 +459,0 @@ prettyLabel: "MITIE", |
@@ -18,5 +18,5 @@ const inputsZeroShotClassification = () => `"Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!"`; | ||
const inputsVisualQuestionAnswering = () => `{ | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
const inputsQuestionAnswering = () => `{ | ||
@@ -65,2 +65,6 @@ "question": "What is my name?", | ||
const inputsImageToText = () => `"cats.jpg"`; | ||
const inputsImageToImage = () => `{ | ||
"image": "cat.png", | ||
"prompt": "Turn the cat into a tiger." | ||
}`; | ||
const inputsImageSegmentation = () => `"cats.jpg"`; | ||
@@ -86,2 +90,3 @@ const inputsObjectDetection = () => `"cats.jpg"`; | ||
"image-to-text": inputsImageToText, | ||
"image-to-image": inputsImageToImage, | ||
"image-segmentation": inputsImageSegmentation, | ||
@@ -88,0 +93,0 @@ "object-detection": inputsObjectDetection, |
{ | ||
"name": "@huggingface/tasks", | ||
"packageManager": "pnpm@8.10.5", | ||
"version": "0.17.0", | ||
"version": "0.17.1", | ||
"description": "List of ML tasks for huggingface.co/tasks", | ||
@@ -6,0 +6,0 @@ "repository": "https://github.com/huggingface/huggingface.js.git", |
@@ -494,2 +494,9 @@ import * as snippets from "./model-libraries-snippets.js"; | ||
}, | ||
medvae: { | ||
prettyLabel: "MedVAE", | ||
repoName: "MedVAE", | ||
repoUrl: "https://github.com/StanfordMIMI/MedVAE", | ||
filter: false, | ||
countDownloads: `path_extension:"ckpt"`, | ||
}, | ||
mitie: { | ||
@@ -496,0 +503,0 @@ prettyLabel: "MITIE", |
@@ -30,5 +30,5 @@ import type { PipelineType } from "../pipelines.js"; | ||
`{ | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
"image": "cat.png", | ||
"question": "What is in this image?" | ||
}`; | ||
@@ -90,2 +90,7 @@ const inputsQuestionAnswering = () => | ||
const inputsImageToImage = () => `{ | ||
"image": "cat.png", | ||
"prompt": "Turn the cat into a tiger." | ||
}`; | ||
const inputsImageSegmentation = () => `"cats.jpg"`; | ||
@@ -125,2 +130,3 @@ | ||
"image-to-text": inputsImageToText, | ||
"image-to-image": inputsImageToImage, | ||
"image-segmentation": inputsImageSegmentation, | ||
@@ -127,0 +133,0 @@ "object-detection": inputsObjectDetection, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
1688128
41975