New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ollama-ai-provider

Package Overview
Dependencies
Maintainers
1
Versions
25
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ollama-ai-provider - npm Package Compare versions

Comparing version 0.2.0 to 0.3.0

59

dist/index.js

@@ -30,10 +30,12 @@ "use strict";

// src/ollama-facade.ts
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_provider_utils5 = require("@ai-sdk/provider-utils");
// src/ollama-chat-language-model.ts
var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var import_provider3 = require("@ai-sdk/provider");
var import_provider_utils4 = require("@ai-sdk/provider-utils");
var import_zod2 = require("zod");
// src/convert-to-ollama-chat-messages.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils = require("@ai-sdk/provider-utils");
function convertToOllamaChatMessages(prompt) {

@@ -49,9 +51,18 @@ const messages = [];

messages.push({
content: content.map((part) => {
switch (part.type) {
case "text": {
return part.text;
...content.reduce(
(previous, current) => {
if (current.type === "text") {
previous.content += current.text;
} else if (current.type === "image" && current.image instanceof URL) {
throw new import_provider.UnsupportedFunctionalityError({
functionality: "image-part"
});
} else if (current.type === "image" && current.image instanceof Uint8Array) {
previous.images = previous.images || [];
previous.images.push((0, import_provider_utils.convertUint8ArrayToBase64)(current.image));
}
}
}).join(""),
return previous;
},
{ content: "" }
),
role: "user"

@@ -96,3 +107,3 @@ });

// src/ollama-error.ts
var import_provider_utils = require("@ai-sdk/provider-utils");
var import_provider_utils2 = require("@ai-sdk/provider-utils");
var import_zod = require("zod");

@@ -107,3 +118,3 @@ var ollamaErrorDataSchema = import_zod.z.object({

});
var ollamaFailedResponseHandler = (0, import_provider_utils.createJsonErrorResponseHandler)({
var ollamaFailedResponseHandler = (0, import_provider_utils2.createJsonErrorResponseHandler)({
errorSchema: ollamaErrorDataSchema,

@@ -114,8 +125,8 @@ errorToMessage: (data) => data.error.message

// src/utils.ts
var import_provider = require("@ai-sdk/provider");
var import_provider_utils2 = require("@ai-sdk/provider-utils");
var import_provider2 = require("@ai-sdk/provider");
var import_provider_utils3 = require("@ai-sdk/provider-utils");
var createJsonStreamResponseHandler = (chunkSchema) => async ({ response }) => {
const responseHeaders = (0, import_provider_utils2.extractResponseHeaders)(response);
const responseHeaders = (0, import_provider_utils3.extractResponseHeaders)(response);
if (response.body === null) {
throw new import_provider.EmptyResponseBodyError({});
throw new import_provider2.EmptyResponseBodyError({});
}

@@ -128,3 +139,3 @@ return {

controller.enqueue(
(0, import_provider_utils2.safeParseJSON)({
(0, import_provider_utils3.safeParseJSON)({
schema: chunkSchema,

@@ -185,3 +196,3 @@ text: data

case "object-tool": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "object-tool mode"

@@ -191,3 +202,3 @@ });

case "object-grammar": {
throw new import_provider2.UnsupportedFunctionalityError({
throw new import_provider3.UnsupportedFunctionalityError({
functionality: "object-grammar mode"

@@ -204,3 +215,4 @@ });

const { args, warnings } = this.getArguments(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
console.debug("doGenerate", JSON.stringify(args, null, 2));
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
abortSignal: options.abortSignal,

@@ -213,3 +225,3 @@ body: {

headers: this.config.headers(),
successfulResponseHandler: (0, import_provider_utils3.createJsonResponseHandler)(
successfulResponseHandler: (0, import_provider_utils4.createJsonResponseHandler)(
ollamaChatChunkSchema

@@ -233,4 +245,5 @@ ),

async doStream(options) {
console.debug("doStream", options);
const { args, warnings } = this.getArguments(options);
const { responseHeaders, value: response } = await (0, import_provider_utils3.postJsonToApi)({
const { responseHeaders, value: response } = await (0, import_provider_utils4.postJsonToApi)({
abortSignal: options.abortSignal,

@@ -332,4 +345,4 @@ body: args,

var _a, _b;
this.baseURL = (_a = (0, import_provider_utils4.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
this.generateId = (_b = options.generateId) != null ? _b : import_provider_utils4.generateId;
this.baseURL = (_a = (0, import_provider_utils5.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api";
this.generateId = (_b = options.generateId) != null ? _b : import_provider_utils5.generateId;
this.headers = options.headers;

@@ -336,0 +349,0 @@ }

{
"name": "ollama-ai-provider",
"version": "0.2.0",
"version": "0.3.0",
"description": "Vercel AI Provider for running LLMs locally using Ollama",

@@ -5,0 +5,0 @@ "main": "./dist/index.js",

@@ -56,3 +56,3 @@ # ollama-ai-provider

This provider is capable of generating and streaming text and objects. It does not
support image input and function calling (tools). Object generation may fail depending
support function calling (tools). Object generation may fail depending
on the model used and the schema used.

@@ -62,11 +62,11 @@

| Model | Image input | Object generation | Tool usage | Tool streaming |
|------------|-------------|--------------------|------------|----------------|
| llama2 | :x: | :white_check_mark: | :x: | :x: |
| llama3 | :x: | :white_check_mark: | :x: | :x: |
| llava | :x: | :white_check_mark: | :x: | :x: |
| mistral | :x: | :white_check_mark: | :x: | :x: |
| mixtral | :x: | :white_check_mark: | :x: | :x: |
| openhermes | :x: | :white_check_mark: | :x: | :x: |
| phi3 | :x: | :white_check_mark: | :x: | :x: |
| Model | Image input | Object generation | Tool usage | Tool streaming |
|------------|--------------------|--------------------|------------|----------------|
| llama2 | :x: | :white_check_mark: | :x: | :x: |
| llama3 | :x: | :white_check_mark: | :x: | :x: |
| llava | :white_check_mark: | :white_check_mark: | :x: | :x: |
| mistral | :x: | :white_check_mark: | :x: | :x: |
| mixtral | :x: | :white_check_mark: | :x: | :x: |
| openhermes | :x: | :white_check_mark: | :x: | :x: |
| phi3 | :x: | :white_check_mark: | :x: | :x: |

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc