New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@huggingface/inference

Package Overview
Dependencies
Maintainers
0
Versions
68
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@huggingface/inference - npm Package Compare versions

Comparing version

to
3.5.0

dist/src/providers/cerebras.d.ts

2

dist/src/index.d.ts

@@ -5,2 +5,4 @@ export { HfInference, HfInferenceEndpoint } from "./HfInference";

export * from "./tasks";
import * as snippets from "./snippets/index.js";
export { snippets };
//# sourceMappingURL=index.d.ts.map

3

dist/src/types.d.ts

@@ -25,3 +25,3 @@ import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";

export type InferenceTask = Exclude<PipelineType, "other">;
export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cohere", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "replicate", "sambanova", "together"];
export declare const INFERENCE_PROVIDERS: readonly ["black-forest-labs", "cerebras", "cohere", "fal-ai", "fireworks-ai", "hf-inference", "hyperbolic", "nebius", "novita", "openai", "replicate", "sambanova", "together"];
export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];

@@ -77,2 +77,3 @@ export interface BaseArgs {

makeUrl: (params: UrlParams) => string;
clientSideRoutingOnly?: boolean;
}

@@ -79,0 +80,0 @@ export interface HeaderParams {

{
"name": "@huggingface/inference",
"version": "3.4.1",
"version": "3.5.0",
"packageManager": "pnpm@8.10.5",

@@ -42,3 +42,3 @@ "license": "MIT",

"dependencies": {
"@huggingface/tasks": "^0.16.5"
"@huggingface/tasks": "^0.17.0"
},

@@ -45,0 +45,0 @@ "devDependencies": {

@@ -60,2 +60,3 @@ # 🤗 Hugging Face Inference

- [Cohere](https://cohere.com)
- [Cerebras](https://cerebras.ai/)

@@ -86,2 +87,3 @@ To send requests to a third-party provider, you have to pass the `provider` parameter to the inference function. Make sure your request is authenticated with an access token.

- [Cohere supported models](https://huggingface.co/api/partners/cohere/models)
- [Cerebras supported models](https://huggingface.co/api/partners/cerebras/models)
- [HF Inference API (serverless)](https://huggingface.co/models?inference=warm&sort=trending)

@@ -88,0 +90,0 @@

@@ -5,1 +5,4 @@ export { HfInference, HfInferenceEndpoint } from "./HfInference";

export * from "./tasks";
import * as snippets from "./snippets/index.js";
export { snippets };
import { HF_HUB_URL, HF_ROUTER_URL } from "../config";
import { BLACK_FOREST_LABS_CONFIG } from "../providers/black-forest-labs";
import { CEREBRAS_CONFIG } from "../providers/cerebras";
import { COHERE_CONFIG } from "../providers/cohere";

@@ -13,2 +14,3 @@ import { FAL_AI_CONFIG } from "../providers/fal-ai";

import { TOGETHER_CONFIG } from "../providers/together";
import { OPENAI_CONFIG } from "../providers/openai";
import type { InferenceProvider, InferenceTask, Options, ProviderConfig, RequestArgs } from "../types";

@@ -32,2 +34,3 @@ import { isUrl } from "./isUrl";

"black-forest-labs": BLACK_FOREST_LABS_CONFIG,
cerebras: CEREBRAS_CONFIG,
cohere: COHERE_CONFIG,

@@ -38,2 +41,3 @@ "fal-ai": FAL_AI_CONFIG,

hyperbolic: HYPERBOLIC_CONFIG,
openai: OPENAI_CONFIG,
nebius: NEBIUS_CONFIG,

@@ -78,18 +82,34 @@ novita: NOVITA_CONFIG,

}
if (providerConfig.clientSideRoutingOnly && !maybeModel) {
throw new Error(`Provider ${provider} requires a model ID to be passed directly.`);
}
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
const hfModel = maybeModel ?? (await loadDefaultModel(task!));
const model = await getProviderModelId({ model: hfModel, provider }, args, {
task,
chatCompletion,
fetch: options?.fetch,
});
const model = providerConfig.clientSideRoutingOnly
? // eslint-disable-next-line @typescript-eslint/no-non-null-assertion
removeProviderPrefix(maybeModel!, provider)
: // For closed-models API providers, one needs to pass the model ID directly (e.g. "gpt-3.5-turbo")
await getProviderModelId({ model: hfModel, provider }, args, {
task,
chatCompletion,
fetch: options?.fetch,
});
/// If accessToken is passed, it should take precedence over includeCredentials
const authMethod = accessToken
? accessToken.startsWith("hf_")
? "hf-token"
: "provider-key"
: includeCredentials === "include"
? "credentials-include"
: "none";
const authMethod = (() => {
if (providerConfig.clientSideRoutingOnly) {
// Closed-source providers require an accessToken (cannot be routed).
if (accessToken && accessToken.startsWith("hf_")) {
throw new Error(`Provider ${provider} is closed-source and does not support HF tokens.`);
}
return "provider-key";
}
if (accessToken) {
return accessToken.startsWith("hf_") ? "hf-token" : "provider-key";
}
if (includeCredentials === "include") {
// If accessToken is passed, it should take precedence over includeCredentials
return "credentials-include";
}
return "none";
})();

@@ -183,1 +203,8 @@ // Make URL

}
function removeProviderPrefix(model: string, provider: string): string {
if (!model.startsWith(`${provider}/`)) {
throw new Error(`Models from ${provider} must be prefixed by "${provider}/". Got "${model}".`);
}
return model.slice(provider.length + 1);
}

@@ -20,2 +20,3 @@ import type { InferenceProvider } from "../types";

"black-forest-labs": {},
cerebras: {},
cohere: {},

@@ -28,2 +29,3 @@ "fal-ai": {},

novita: {},
openai: {},
replicate: {},

@@ -30,0 +32,0 @@ sambanova: {},

@@ -33,2 +33,3 @@ import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";

"black-forest-labs",
"cerebras",
"cohere",

@@ -41,2 +42,3 @@ "fal-ai",

"novita",
"openai",
"replicate",

@@ -101,2 +103,3 @@ "sambanova",

makeUrl: (params: UrlParams) => string;
clientSideRoutingOnly?: boolean;
}

@@ -103,0 +106,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet