New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ollama-ai-provider

Package Overview
Dependencies
Maintainers
0
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ollama-ai-provider - npm Package Compare versions

Comparing version 1.1.0 to 1.2.0

13

dist/index.d.ts
import { ProviderV1, LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';
type OllamaChatModelId = 'athene-v2' | 'athene-v2:72b' | 'aya-expanse' | 'aya-expanse:8b' | 'aya-expanse:32b' | 'codegemma' | 'codegemma:2b' | 'codegemma:7b' | 'codellama' | 'codellama:7b' | 'codellama:13b' | 'codellama:34b' | 'codellama:70b' | 'codellama:code' | 'codellama:python' | 'command-r' | 'command-r:35b' | 'command-r-plus' | 'command-r-plus:104b' | 'deepseek-coder-v2' | 'deepseek-coder-v2:16b' | 'deepseek-coder-v2:236b' | 'falcon2' | 'falcon2:11b' | 'firefunction-v2' | 'firefunction-v2:70b' | 'gemma' | 'gemma:2b' | 'gemma:7b' | 'gemma2' | 'gemma2:2b' | 'gemma2:9b' | 'gemma2:27b' | 'granite3-dense' | 'granite3-dense:2b' | 'granite3-dense:8b' | 'granite3-guardian' | 'granite3-guardian:2b' | 'granite3-guardian:8b' | 'granite3-moe' | 'granite3-moe:1b' | 'granite3-moe:3b' | 'llama2' | 'llama2:7b' | 'llama2:13b' | 'llama2:70b' | 'llama3' | 'llama3:8b' | 'llama3:70b' | 'llama3-chatqa' | 'llama3-chatqa:8b' | 'llama3-chatqa:70b' | 'llama3-gradient' | 'llama3-gradient:8b' | 'llama3-gradient:70b' | 'llama3.1' | 'llama3.1:8b' | 'llama3.1:70b' | 'llama3.1:405b' | 'llama3.2' | 'llama3.2:1b' | 'llama3.2:3b' | 'llama3.2-vision' | 'llama3.2-vision:11b' | 'llama3.2-vision:90b' | 'llama3.3' | 'llama3.3:70b' | 'llama-guard3' | 'llama-guard3:1b' | 'llama-guard3:8b' | 'llava' | 'llava:7b' | 'llava:13b' | 'llava:34b' | 'llava-llama3' | 'llava-llama3:8b' | 'llava-phi3' | 'llava-phi3:3.8b' | 'marco-o1' | 'marco-o1:7b' | 'mistral' | 'mistral:7b' | 'mistral-large' | 'mistral-large:123b' | 'mistral-nemo' | 'mistral-nemo:12b' | 'mistral-small' | 'mistral-small:22b' | 'mixtral' | 'mixtral:8x7b' | 'mixtral:8x22b' | 'moondream' | 'moondream:1.8b' | 'openhermes' | 'openhermes:v2.5' | 'nemotron' | 'nemotron:70b' | 'nemotron-mini' | 'nemotron-mini:4b' | 'opencoder' | 'opencoder:1.5b' | 'opencoder:8b' | 'phi3' | 'phi3:3.8b' | 'phi3:14b' | 'phi3.5' | 'phi3.5:3.8b' | 'qwen' | 'qwen:7b' | 'qwen:14b' | 'qwen:32b' | 'qwen:72b' | 'qwen:110b' | 'qwen2' | 'qwen2:0.5b' | 'qwen2:1.5b' | 'qwen2:7b' | 'qwen2:72b' | 'qwen2.5' | 'qwen2.5:0.5b' | 'qwen2.5:1.5b' | 'qwen2.5:3b' | 'qwen2.5:7b' | 'qwen2.5:14b' | 'qwen2.5:32b' | 'qwen2.5:72b' | 'qwen2.5-coder' | 'qwen2.5-coder:0.5b' | 'qwen2.5-coder:1.5b' | 'qwen2.5-coder:3b' | 'qwen2.5-coder:7b' | 'qwen2.5-coder:14b' | 'qwen2.5-coder:32b' | 'qwq' | 'qwq:32b' | 'sailor2' | 'sailor2:1b' | 'sailor2:8b' | 'sailor2:20b' | 'shieldgemma' | 'shieldgemma:2b' | 'shieldgemma:9b' | 'shieldgemma:27b' | 'smollm' | 'smollm:135m' | 'smollm:360m' | 'smollm:1.7b' | 'tinyllama' | 'tinyllama:1.1b' | 'tulu3' | 'tulu3:8b' | 'tulu3:70b' | (string & NonNullable<unknown>);
type OllamaChatModelId = 'athene-v2' | 'athene-v2:72b' | 'aya-expanse' | 'aya-expanse:8b' | 'aya-expanse:32b' | 'codegemma' | 'codegemma:2b' | 'codegemma:7b' | 'codellama' | 'codellama:7b' | 'codellama:13b' | 'codellama:34b' | 'codellama:70b' | 'codellama:code' | 'codellama:python' | 'command-r' | 'command-r:35b' | 'command-r-plus' | 'command-r-plus:104b' | 'command-r7b' | 'command-r7b:7b' | 'deepseek-coder-v2' | 'deepseek-coder-v2:16b' | 'deepseek-coder-v2:236b' | 'deepseek-v3' | 'deepseek-v3:671b' | 'dolphin3' | 'dolphin3:8b' | 'exaone3.5' | 'exaone3.5:2.4b' | 'exaone3.5:7.8b' | 'exaone3.5:32b' | 'falcon2' | 'falcon2:11b' | 'falcon3' | 'falcon3:1b' | 'falcon3:3b' | 'falcon3:7b' | 'falcon3:10b' | 'firefunction-v2' | 'firefunction-v2:70b' | 'gemma' | 'gemma:2b' | 'gemma:7b' | 'gemma2' | 'gemma2:2b' | 'gemma2:9b' | 'gemma2:27b' | 'granite3-dense' | 'granite3-dense:2b' | 'granite3-dense:8b' | 'granite3-guardian' | 'granite3-guardian:2b' | 'granite3-guardian:8b' | 'granite3-moe' | 'granite3-moe:1b' | 'granite3-moe:3b' | 'granite3.1-dense' | 'granite3.1-dense:2b' | 'granite3.1-dense:8b' | 'granite3.1-moe' | 'granite3.1-moe:1b' | 'granite3.1-moe:3b' | 'llama2' | 'llama2:7b' | 'llama2:13b' | 'llama2:70b' | 'llama3' | 'llama3:8b' | 'llama3:70b' | 'llama3-chatqa' | 'llama3-chatqa:8b' | 'llama3-chatqa:70b' | 'llama3-gradient' | 'llama3-gradient:8b' | 'llama3-gradient:70b' | 'llama3.1' | 'llama3.1:8b' | 'llama3.1:70b' | 'llama3.1:405b' | 'llama3.2' | 'llama3.2:1b' | 'llama3.2:3b' | 'llama3.2-vision' | 'llama3.2-vision:11b' | 'llama3.2-vision:90b' | 'llama3.3' | 'llama3.3:70b' | 'llama-guard3' | 'llama-guard3:1b' | 'llama-guard3:8b' | 'llava' | 'llava:7b' | 'llava:13b' | 'llava:34b' | 'llava-llama3' | 'llava-llama3:8b' | 'llava-phi3' | 'llava-phi3:3.8b' | 'marco-o1' | 'marco-o1:7b' | 'mistral' | 'mistral:7b' | 'mistral-large' | 'mistral-large:123b' | 'mistral-nemo' | 'mistral-nemo:12b' | 'mistral-small' | 'mistral-small:22b' | 'mixtral' | 'mixtral:8x7b' | 'mixtral:8x22b' | 'moondream' | 'moondream:1.8b' | 'openhermes' | 'openhermes:v2.5' | 'nemotron' | 'nemotron:70b' | 'nemotron-mini' | 'nemotron-mini:4b' | 'olmo' | 'olmo:7b' | 'olmo:13b' | 'opencoder' | 'opencoder:1.5b' | 'opencoder:8b' | 'phi3' | 'phi3:3.8b' | 'phi3:14b' | 'phi3.5' | 'phi3.5:3.8b' | 'phi4' | 'phi4:14b' | 'qwen' | 'qwen:7b' | 'qwen:14b' | 'qwen:32b' | 'qwen:72b' | 'qwen:110b' | 'qwen2' | 'qwen2:0.5b' | 'qwen2:1.5b' | 'qwen2:7b' | 'qwen2:72b' | 'qwen2.5' | 'qwen2.5:0.5b' | 'qwen2.5:1.5b' | 'qwen2.5:3b' | 'qwen2.5:7b' | 'qwen2.5:14b' | 'qwen2.5:32b' | 'qwen2.5:72b' | 'qwen2.5-coder' | 'qwen2.5-coder:0.5b' | 'qwen2.5-coder:1.5b' | 'qwen2.5-coder:3b' | 'qwen2.5-coder:7b' | 'qwen2.5-coder:14b' | 'qwen2.5-coder:32b' | 'qwq' | 'qwq:32b' | 'sailor2' | 'sailor2:1b' | 'sailor2:8b' | 'sailor2:20b' | 'shieldgemma' | 'shieldgemma:2b' | 'shieldgemma:9b' | 'shieldgemma:27b' | 'smallthinker' | 'smallthinker:3b' | 'smollm' | 'smollm:135m' | 'smollm:360m' | 'smollm:1.7b' | 'tinyllama' | 'tinyllama:1.1b' | 'tulu3' | 'tulu3:8b' | 'tulu3:70b' | (string & NonNullable<unknown>);
interface OllamaChatSettings {

@@ -8,2 +8,4 @@ /**

* default to maintain backward compatibility, disable it if you encounter any issues.
*
* @deprecated Use `simulateStreaming` instead.
*/

@@ -83,2 +85,9 @@ experimentalStreamTools?: boolean;

/**
Simulates streaming by using a normal generate call and returning it as a stream.
Enable this if the model that you are using does not support streaming.
Defaults to `false`.
*/
simulateStreaming?: boolean;
/**
* Whether to use structured outputs. Defaults to false.

@@ -112,3 +121,3 @@ *

type OllamaEmbeddingModelId = 'all-minilm' | 'all-minilm:22m' | 'all-minilm:33m' | 'bge-large' | 'bge-m3' | 'mxbai-embed-large' | 'nomic-embed-text' | 'paraphrase-multilingual' | 'snowflake-arctic-embed' | 'snowflake-arctic-embed:110m' | 'snowflake-arctic-embed:137m' | 'snowflake-arctic-embed:22m' | 'snowflake-arctic-embed:335m' | 'snowflake-arctic-embed:33m' | 'snowflake-arctic-embed2' | 'snowflake-arctic-embed2:568m' | OllamaChatModelId | (string & NonNullable<unknown>);
type OllamaEmbeddingModelId = 'all-minilm' | 'all-minilm:22m' | 'all-minilm:33m' | 'bge-large' | 'bge-m3' | 'granite-embedding' | 'granite-embedding:30m' | 'granite-embedding:278m' | 'mxbai-embed-large' | 'nomic-embed-text' | 'paraphrase-multilingual' | 'snowflake-arctic-embed' | 'snowflake-arctic-embed:110m' | 'snowflake-arctic-embed:137m' | 'snowflake-arctic-embed:22m' | 'snowflake-arctic-embed:335m' | 'snowflake-arctic-embed:33m' | 'snowflake-arctic-embed2' | 'snowflake-arctic-embed2:568m' | OllamaChatModelId | (string & NonNullable<unknown>);
interface OllamaEmbeddingSettings {

@@ -115,0 +124,0 @@ maxEmbeddingsPerCall?: number;

@@ -542,2 +542,45 @@ "use strict";

async doStream(options) {
if (this.settings.simulateStreaming) {
const result = await this.doGenerate(options);
const simulatedStream = new ReadableStream({
start(controller) {
controller.enqueue({ type: "response-metadata", ...result.response });
if (result.text) {
controller.enqueue({
textDelta: result.text,
type: "text-delta"
});
}
if (result.toolCalls) {
for (const toolCall of result.toolCalls) {
controller.enqueue({
argsTextDelta: toolCall.args,
toolCallId: toolCall.toolCallId,
toolCallType: "function",
toolName: toolCall.toolName,
type: "tool-call-delta"
});
controller.enqueue({
type: "tool-call",
...toolCall
});
}
}
controller.enqueue({
finishReason: result.finishReason,
logprobs: result.logprobs,
providerMetadata: result.providerMetadata,
type: "finish",
usage: result.usage
});
controller.close();
}
});
return {
rawCall: result.rawCall,
rawResponse: result.rawResponse,
stream: simulatedStream,
warnings: result.warnings
};
}
const { args: body, type, warnings } = this.getArguments(options);

@@ -544,0 +587,0 @@ const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({

2

package.json
{
"name": "ollama-ai-provider",
"version": "1.1.0",
"version": "1.2.0",
"description": "Vercel AI Provider for running LLMs locally using Ollama",

@@ -5,0 +5,0 @@ "main": "./dist/index.js",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc