New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ollama-ai-provider

Package Overview
Dependencies
Maintainers
1
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ollama-ai-provider - npm Package Compare versions

Comparing version 0.6.0 to 0.7.0

179

dist/index.js

@@ -30,7 +30,7 @@ "use strict";

// src/ollama-facade.ts
var import_provider_utils6 = require("@ai-sdk/provider-utils");
var import_provider_utils7 = require("@ai-sdk/provider-utils");
// src/ollama-chat-language-model.ts
var import_provider3 = require("@ai-sdk/provider");
var import_provider_utils5 = require("@ai-sdk/provider-utils");
var import_provider_utils6 = require("@ai-sdk/provider-utils");
var import_zod3 = require("zod");

@@ -55,19 +55,16 @@

system,
toolChoice,
tools
}) {
if (!tools) {
const selectedTools = tools == null ? void 0 : tools.filter(
(tool) => !toolChoice || tool.name === toolChoice
);
if (!selectedTools) {
return system;
}
return [
system,
system === null ? null : "",
// add a newline if system is not null
schemaPrefix,
JSON.stringify(tools),
schemaSuffix
].filter((line) => line !== null).join("\n");
return [system, schemaPrefix, JSON.stringify(selectedTools), schemaSuffix].filter((line) => line !== null).join("\n");
}
// src/convert-to-ollama-chat-messages.ts
function convertToOllamaChatMessages(prompt, tools) {
function convertToOllamaChatMessages(prompt, tools, toolChoice) {
const messages = [];

@@ -81,2 +78,3 @@ let hasSystem = false;

system: content,
toolChoice,
tools

@@ -134,2 +132,3 @@ }),

system: "",
toolChoice,
tools

@@ -149,3 +148,6 @@ }),

const tool = JSON.parse(response.message.content);
const parsedTools = toolResponseSchema.parse(tool);
let parsedTools = toolResponseSchema.parse(tool);
if (!Array.isArray(parsedTools)) {
parsedTools = [parsedTools];
}
return {

@@ -174,3 +176,9 @@ ...response,

}
var toolResponseSchema = import_zod.z.array(
var toolResponseSchema = import_zod.z.union([
import_zod.z.array(
import_zod.z.object({
arguments: import_zod.z.record(import_zod.z.unknown()),
name: import_zod.z.string()
})
),
import_zod.z.object({

@@ -180,4 +188,94 @@ arguments: import_zod.z.record(import_zod.z.unknown()),

})
);
]);
// src/generate-tool/infer-tool-calls-from-stream.ts
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var import_partial_json = require("partial-json");
var InferToolCallsFromStream = class {
constructor({ type }) {
this._firstMessage = true;
this._toolPartial = "";
this._toolCalls = [];
this._type = type;
this._detectedToolCall = false;
}
get toolCalls() {
return this._toolCalls;
}
get detectedToolCall() {
return this._detectedToolCall;
}
parse({
controller,
delta
}) {
var _a;
this.detectToolCall(delta);
if (!this._detectedToolCall) {
return false;
}
this._toolPartial += delta;
let parsedFunctions = (0, import_partial_json.parse)(this._toolPartial);
if (!Array.isArray(parsedFunctions)) {
parsedFunctions = [parsedFunctions];
}
for (const [index, parsedFunction] of parsedFunctions.entries()) {
const parsedArguments = (_a = JSON.stringify(parsedFunction == null ? void 0 : parsedFunction.arguments)) != null ? _a : "";
if (parsedArguments === "") {
continue;
}
if (!this._toolCalls[index]) {
this._toolCalls[index] = {
function: {
arguments: "",
name: parsedFunction.name
},
id: (0, import_provider_utils3.generateId)(),
type: "function"
};
}
const toolCall = this._toolCalls[index];
toolCall.function.arguments = parsedArguments;
controller.enqueue({
argsTextDelta: delta,
toolCallId: toolCall.id,
toolCallType: "function",
toolName: toolCall.function.name,
type: "tool-call-delta"
});
}
return true;
}
finish({
controller
}) {
for (const toolCall of this.toolCalls) {
controller.enqueue({
args: toolCall.function.arguments,
toolCallId: toolCall.id,
toolCallType: "function",
toolName: toolCall.function.name,
type: "tool-call"
});
}
return this.finishReason();
}
detectToolCall(delta) {
if (this._firstMessage) {
if (this._type === "object-tool") {
this._detectedToolCall = true;
} else if (this._type === "regular" && (delta.trim().startsWith("{") || delta.trim().startsWith("["))) {
this._detectedToolCall = true;
}
this._firstMessage = false;
}
}
finishReason() {
if (!this.detectedToolCall) {
return "stop";
}
return this._type === "object-tool" ? "stop" : "tool-calls";
}
};
// src/map-ollama-finish-reason.ts

@@ -197,3 +295,3 @@ function mapOllamaFinishReason(finishReason) {

// src/ollama-error.ts
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_zod2 = require("zod");

@@ -208,3 +306,3 @@ var ollamaErrorDataSchema = import_zod2.z.object({

});
var ollamaFailedResponseHandler = (0, import_provider_utils3.createJsonErrorResponseHandler)({
var ollamaFailedResponseHandler = (0, import_provider_utils4.createJsonErrorResponseHandler)({
errorSchema: ollamaErrorDataSchema,

@@ -216,5 +314,5 @@ errorToMessage: (data) => data.error.message

var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_provider_utils5 = require("@ai-sdk/provider-utils");
var createJsonStreamResponseHandler = (chunkSchema) => async ({ response }) => {
const responseHeaders = (0, import_provider_utils4.extractResponseHeaders)(response);
const responseHeaders = (0, import_provider_utils5.extractResponseHeaders)(response);
if (response.body === null) {

@@ -229,3 +327,3 @@ throw new import_provider2.EmptyResponseBodyError({});

controller.enqueue(
(0, import_provider_utils4.safeParseJSON)({
(0, import_provider_utils5.safeParseJSON)({
schema: chunkSchema,

@@ -307,2 +405,3 @@ text: data

},
type,
warnings

@@ -318,2 +417,3 @@ };

},
type,
warnings

@@ -327,3 +427,7 @@ };

format: "json",
messages: convertToOllamaChatMessages(prompt, [mode.tool]),
messages: convertToOllamaChatMessages(
prompt,
[mode.tool],
mode.tool.name
),
tool_choice: {

@@ -344,2 +448,3 @@ function: { name: mode.tool.name },

},
type,
warnings

@@ -362,3 +467,3 @@ };

const { args, warnings } = this.getArguments(options);
const { responseHeaders, value } = await (0, import_provider_utils5.postJsonToApi)({
const { responseHeaders, value } = await (0, import_provider_utils6.postJsonToApi)({
abortSignal: options.abortSignal,

@@ -371,3 +476,3 @@ body: {

headers: this.config.headers(),
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)(
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)(
ollamaChatResponseSchema

@@ -386,3 +491,3 @@ ),

args: toolCall.function.arguments,
toolCallId: (0, import_provider_utils5.generateId)(),
toolCallId: (0, import_provider_utils6.generateId)(),
toolCallType: "function",

@@ -399,4 +504,4 @@ toolName: toolCall.function.name

async doStream(options) {
const { args, warnings } = this.getArguments(options);
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({
const { args, type, warnings } = this.getArguments(options);
const { responseHeaders, value: response } = await (0, import_provider_utils6.postJsonToApi)({
abortSignal: options.abortSignal,

@@ -412,2 +517,3 @@ body: args,

const { messages: rawPrompt, ...rawSettings } = args;
const inferToolCallsFromStream = new InferToolCallsFromStream({ type });
let finishReason = "other";

@@ -437,3 +543,3 @@ let usage = {

if (value.done) {
finishReason = mapOllamaFinishReason("stop");
finishReason = inferToolCallsFromStream.finish({ controller });
usage = {

@@ -445,2 +551,9 @@ completionTokens: value.eval_count,

}
const isToolCallStream = inferToolCallsFromStream.parse({
controller,
delta: value.message.content
});
if (isToolCallStream) {
return;
}
if (value.message.content !== null) {

@@ -510,3 +623,3 @@ controller.enqueue({

var _a;
this.baseURL = (_a = (0, import_provider_utils6.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
this.baseURL = (_a = (0, import_provider_utils7.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
this.headers = options.headers;

@@ -531,7 +644,7 @@ }

// src/ollama-provider.ts
var import_provider_utils8 = require("@ai-sdk/provider-utils");
var import_provider_utils9 = require("@ai-sdk/provider-utils");
// src/ollama-embedding-model.ts
var import_provider4 = require("@ai-sdk/provider");
var import_provider_utils7 = require("@ai-sdk/provider-utils");
var import_provider_utils8 = require("@ai-sdk/provider-utils");
var import_zod4 = require("zod");

@@ -572,3 +685,3 @@ var OllamaEmbeddingModel = class {

for (const value of values) {
const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({
abortSignal,

@@ -581,3 +694,3 @@ body: {

headers: this.config.headers(),
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)(
successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)(
ollamaTextEmbeddingResponseSchema

@@ -600,3 +713,3 @@ ),

var _a;
const baseURL = (_a = (0, import_provider_utils8.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
const baseURL = (_a = (0, import_provider_utils9.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
const getHeaders = () => ({

@@ -603,0 +716,0 @@ ...options.headers

35

package.json
{
"name": "ollama-ai-provider",
"version": "0.6.0",
"version": "0.7.0",
"description": "Vercel AI Provider for running LLMs locally using Ollama",

@@ -19,27 +19,10 @@ "main": "./dist/index.js",

"@ai-sdk/provider": "0.0.5",
"@ai-sdk/provider-utils": "0.0.8"
"@ai-sdk/provider-utils": "0.0.8",
"partial-json": "^0.1.7"
},
"devDependencies": {
"@changesets/cli": "^2.27.1",
"@commitlint/cli": "^19.3.0",
"@commitlint/config-conventional": "^19.2.2",
"@edge-runtime/vm": "^3.2.0",
"@types/node": "^18.19.33",
"@typescript-eslint/eslint-plugin": "^7.9.0",
"@typescript-eslint/parser": "^7.9.0",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-simple-import-sort": "^12.1.0",
"eslint-plugin-sort": "^3.0.2",
"eslint-plugin-unicorn": "^52.0.0",
"eslint-plugin-unused-imports": "^3.2.0",
"husky": "^9.0.11",
"lint-staged": "^15.2.2",
"prettier": "^3.2.5",
"tsup": "^8.0.2",
"typescript": "5.1.3",
"vite-tsconfig-paths": "^4.3.2",
"vitest": "^1.6.0",
"zod": "3.22.4"

@@ -61,12 +44,4 @@ },

},
"repository": {
"type": "git",
"url": "git+https://github.com/sgomez/ollama-ai-provider.git"
},
"bugs": {
"url": "https://github.com/sgomez/ollama-ai-provider/issues"
},
"scripts": {
"build": "tsup",
"changeset": "changeset",
"clean": "rm -rf dist",

@@ -79,6 +54,4 @@ "dev": "tsup --watch",

"test:edge": "vitest --config vitest.edge.config.js --run",
"test:node": "vitest --config vitest.node.config.js --run",
"ci:release": "pnpm clean && pnpm build && changeset publish",
"ci:version": "changeset version && pnpm install --no-frozen-lockfile"
"test:node": "vitest --config vitest.node.config.js --run"
}
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc