New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ollama-ai-provider

Package Overview
Dependencies
Maintainers
0
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ollama-ai-provider - npm Package Compare versions

Comparing version 0.11.0 to 0.12.0

9

dist/index.d.ts

@@ -6,2 +6,7 @@ import { LanguageModelV1, EmbeddingModelV1 } from '@ai-sdk/provider';

/**
* Until Ollama officially supports tool calling in streams, the provider can try to detect function calls. Enabled by
* default to maintain backward compatibility, disable it if you encounter any issues.
*/
experimentalStreamTools?: boolean;
/**
* Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0)

@@ -36,2 +41,4 @@ */

* Multiple stop patterns may be set by specifying multiple separate `stop` parameters in a modelfile.
*
* @deprecated Use `stopSequences` from AI SDK functions.
*/

@@ -47,2 +54,4 @@ stop?: string;

* lower value (e.g. 10) will be more conservative. (Default: 40)
*
* @deprecated Use `topK` from AI SDK functions.
*/

@@ -49,0 +58,0 @@ topK?: number;

53

dist/index.js

@@ -127,4 +127,8 @@ "use strict";

var InferToolCallsFromStream = class {
constructor({ type }) {
constructor({
tools,
type
}) {
this._firstMessage = true;
this._tools = tools;
this._toolPartial = "";

@@ -197,2 +201,5 @@ this._toolCalls = [];

detectToolCall(delta) {
if (!this._tools || this._tools.length === 0) {
return;
}
if (this._firstMessage) {

@@ -322,10 +329,21 @@ if (this._type === "object-tool") {

prompt,
responseFormat,
seed,
stopSequences,
temperature,
topK,
topP
}) {
var _a;
var _a, _b, _c;
const type = mode.type;
const warnings = [];
if (responseFormat !== void 0 && responseFormat.type === "json" && responseFormat.schema !== void 0) {
warnings.push({
details: "JSON response format schema is not supported",
setting: "responseFormat",
type: "unsupported-setting"
});
}
const baseArguments = {
format: responseFormat == null ? void 0 : responseFormat.type,
model: this.modelId,

@@ -343,6 +361,6 @@ options: removeUndefined({

seed,
stop: this.settings.stop,
stop: (_a = this.settings.stop) != null ? _a : stopSequences,
temperature,
tfs_z: this.settings.tfsZ,
top_k: this.settings.topK,
top_k: (_b = this.settings.topK) != null ? _b : topK,
top_p: topP

@@ -353,3 +371,3 @@ })

case "regular": {
const tools = ((_a = mode.tools) == null ? void 0 : _a.length) ? mode.tools : void 0;
const tools = ((_c = mode.tools) == null ? void 0 : _c.length) ? mode.tools : void 0;
return {

@@ -424,3 +442,3 @@ args: {

fetch: this.config.fetch,
headers: this.config.headers(),
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(

@@ -464,3 +482,3 @@ ollamaChatResponseSchema

fetch: this.config.fetch,
headers: this.config.headers(),
headers: (0, import_provider_utils5.combineHeaders)(this.config.headers(), options.headers),
successfulResponseHandler: createJsonStreamResponseHandler(

@@ -472,3 +490,7 @@ ollamaChatStreamChunkSchema

const { messages: rawPrompt, ...rawSettings } = args;
const inferToolCallsFromStream = new InferToolCallsFromStream({ type });
const tools = options.mode.type === "regular" ? options.mode.tools : options.mode.type === "object-tool" ? [options.mode.tool] : void 0;
const inferToolCallsFromStream = new InferToolCallsFromStream({
tools,
type
});
let finishReason = "other";

@@ -479,2 +501,3 @@ let usage = {

};
const { experimentalStreamTools = true } = this.settings;
return {

@@ -506,8 +529,10 @@ rawCall: { rawPrompt, rawSettings },

}
const isToolCallStream = inferToolCallsFromStream.parse({
controller,
delta: value.message.content
});
if (isToolCallStream) {
return;
if (experimentalStreamTools) {
const isToolCallStream = inferToolCallsFromStream.parse({
controller,
delta: value.message.content
});
if (isToolCallStream) {
return;
}
}

@@ -514,0 +539,0 @@ if (value.message.content !== null) {

{
"name": "ollama-ai-provider",
"version": "0.11.0",
"version": "0.12.0",
"description": "Vercel AI Provider for running LLMs locally using Ollama",

@@ -18,4 +18,4 @@ "main": "./dist/index.js",

"dependencies": {
"@ai-sdk/provider": "0.0.14",
"@ai-sdk/provider-utils": "1.0.5",
"@ai-sdk/provider": "0.0.15",
"@ai-sdk/provider-utils": "1.0.7",
"partial-json": "0.1.7"

@@ -25,6 +25,6 @@ },

"@edge-runtime/vm": "^3.2.0",
"@types/node": "^18.19.39",
"tsup": "^8.1.0",
"@types/node": "^18.19.43",
"tsup": "^8.2.4",
"typescript": "5.1.3",
"zod": "3.22.4"
"zod": "3.23.8"
},

@@ -31,0 +31,0 @@ "peerDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc