@huggingface/tasks
Advanced tools
Comparing version 0.12.15 to 0.12.16
@@ -15,3 +15,3 @@ import type { ModelData } from "./model-data"; | ||
*/ | ||
content: string; | ||
content: string | string[]; | ||
} | ||
@@ -18,0 +18,0 @@ /** |
{ | ||
"name": "@huggingface/tasks", | ||
"packageManager": "pnpm@8.10.5", | ||
"version": "0.12.15", | ||
"version": "0.12.16", | ||
"description": "List of ML tasks for huggingface.co/tasks", | ||
@@ -6,0 +6,0 @@ "repository": "https://github.com/huggingface/huggingface.js.git", |
@@ -16,3 +16,3 @@ import type { ModelData } from "./model-data"; | ||
*/ | ||
content: string; | ||
content: string | string[]; | ||
} | ||
@@ -62,6 +62,2 @@ | ||
function isGgufModel(model: ModelData): boolean { | ||
return model.tags.includes("gguf"); | ||
} | ||
function isAwqModel(model: ModelData): boolean { | ||
@@ -158,3 +154,3 @@ return model.config?.quantization_config?.quant_method === "awq"; | ||
` --data '{`, | ||
` "model": "${model.id}"`, | ||
` "model": "${model.id}",`, | ||
` "messages": [`, | ||
@@ -169,3 +165,3 @@ ` {"role": "user", "content": "Hello!"}`, | ||
setup: ["# Install vLLM from pip:", "pip install vllm"].join("\n"), | ||
content: ["# Load and run the model:", `vllm serve "${model.id}"`, ...runCommand].join("\n"), | ||
content: [`# Load and run the model:\nvllm serve "${model.id}"`, runCommand.join("\n")], | ||
}, | ||
@@ -186,6 +182,5 @@ { | ||
content: [ | ||
"# Load and run the model:", | ||
`docker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`, | ||
...runCommand, | ||
].join("\n"), | ||
`# Load and run the model:\ndocker exec -it my_vllm_container bash -c "vllm serve ${model.id}"`, | ||
runCommand.join("\n"), | ||
], | ||
}, | ||
@@ -223,3 +218,3 @@ ]; | ||
isMarlinModel(model) || | ||
isGgufModel(model) || | ||
isLlamaCppGgufModel(model) || | ||
isTransformersModel(model), | ||
@@ -226,0 +221,0 @@ snippet: snippetVllm, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
1365252
33823