ollama-ai-provider
Advanced tools
Comparing version 0.9.1 to 0.10.0
@@ -30,7 +30,7 @@ "use strict"; | ||
// src/ollama-facade.ts | ||
var import_provider_utils6 = require("@ai-sdk/provider-utils"); | ||
var import_provider_utils7 = require("@ai-sdk/provider-utils"); | ||
// src/ollama-chat-language-model.ts | ||
var import_provider2 = require("@ai-sdk/provider"); | ||
var import_provider_utils5 = require("@ai-sdk/provider-utils"); | ||
var import_provider3 = require("@ai-sdk/provider"); | ||
var import_provider_utils6 = require("@ai-sdk/provider-utils"); | ||
var import_zod3 = require("zod"); | ||
@@ -304,3 +304,3 @@ | ||
// src/utils.ts | ||
// src/utils/remove-undefined.ts | ||
function removeUndefined(object) { | ||
@@ -312,2 +312,52 @@ return Object.fromEntries( | ||
// src/utils/response-handler.ts | ||
var import_provider2 = require("@ai-sdk/provider"); | ||
var import_provider_utils5 = require("@ai-sdk/provider-utils"); | ||
// src/utils/text-line-stream.ts | ||
var TextLineStream = class extends TransformStream { | ||
constructor() { | ||
super({ | ||
flush: (controller) => { | ||
if (this.buffer.length === 0) return; | ||
controller.enqueue(this.buffer); | ||
}, | ||
transform: (chunkText, controller) => { | ||
chunkText = this.buffer + chunkText; | ||
while (true) { | ||
const EOL = chunkText.indexOf("\n"); | ||
if (EOL === -1) break; | ||
controller.enqueue(chunkText.slice(0, EOL)); | ||
chunkText = chunkText.slice(EOL + 1); | ||
} | ||
this.buffer = chunkText; | ||
} | ||
}); | ||
this.buffer = ""; | ||
} | ||
}; | ||
// src/utils/response-handler.ts | ||
var createJsonStreamResponseHandler = (chunkSchema) => async ({ response }) => { | ||
const responseHeaders = (0, import_provider_utils5.extractResponseHeaders)(response); | ||
if (response.body === null) { | ||
throw new import_provider2.EmptyResponseBodyError({}); | ||
} | ||
return { | ||
responseHeaders, | ||
value: response.body.pipeThrough(new TextDecoderStream()).pipeThrough(new TextLineStream()).pipeThrough( | ||
new TransformStream({ | ||
transform(chunkText, controller) { | ||
controller.enqueue( | ||
(0, import_provider_utils5.safeParseJSON)({ | ||
schema: chunkSchema, | ||
text: chunkText | ||
}) | ||
); | ||
} | ||
}) | ||
) | ||
}; | ||
}; | ||
// src/ollama-chat-language-model.ts | ||
@@ -419,3 +469,3 @@ var OllamaChatLanguageModel = class { | ||
case "object-grammar": { | ||
throw new import_provider2.UnsupportedFunctionalityError({ | ||
throw new import_provider3.UnsupportedFunctionalityError({ | ||
functionality: "object-grammar mode" | ||
@@ -433,3 +483,3 @@ }); | ||
const { args, warnings } = this.getArguments(options); | ||
const { responseHeaders, value } = await (0, import_provider_utils5.postJsonToApi)({ | ||
const { responseHeaders, value } = await (0, import_provider_utils6.postJsonToApi)({ | ||
abortSignal: options.abortSignal, | ||
@@ -443,3 +493,3 @@ body: { | ||
headers: this.config.headers(), | ||
successfulResponseHandler: (0, import_provider_utils5.createJsonResponseHandler)( | ||
successfulResponseHandler: (0, import_provider_utils6.createJsonResponseHandler)( | ||
ollamaChatResponseSchema | ||
@@ -458,3 +508,3 @@ ), | ||
args: toolCall.function.arguments, | ||
toolCallId: (0, import_provider_utils5.generateId)(), | ||
toolCallId: (0, import_provider_utils6.generateId)(), | ||
toolCallType: "function", | ||
@@ -472,3 +522,3 @@ toolName: toolCall.function.name | ||
const { args, type, warnings } = this.getArguments(options); | ||
const { responseHeaders, value: response } = await (0, import_provider_utils5.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils6.postJsonToApi)({ | ||
abortSignal: options.abortSignal, | ||
@@ -479,3 +529,3 @@ body: args, | ||
headers: this.config.headers(), | ||
successfulResponseHandler: (0, import_provider_utils5.createJsonStreamResponseHandler)( | ||
successfulResponseHandler: createJsonStreamResponseHandler( | ||
ollamaChatStreamChunkSchema | ||
@@ -589,3 +639,3 @@ ), | ||
var _a; | ||
this.baseURL = (_a = (0, import_provider_utils6.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api"; | ||
this.baseURL = (_a = (0, import_provider_utils7.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api"; | ||
this.headers = options.headers; | ||
@@ -610,7 +660,7 @@ } | ||
// src/ollama-provider.ts | ||
var import_provider_utils8 = require("@ai-sdk/provider-utils"); | ||
var import_provider_utils9 = require("@ai-sdk/provider-utils"); | ||
// src/ollama-embedding-model.ts | ||
var import_provider3 = require("@ai-sdk/provider"); | ||
var import_provider_utils7 = require("@ai-sdk/provider-utils"); | ||
var import_provider4 = require("@ai-sdk/provider"); | ||
var import_provider_utils8 = require("@ai-sdk/provider-utils"); | ||
var import_zod4 = require("zod"); | ||
@@ -639,3 +689,3 @@ var OllamaEmbeddingModel = class { | ||
if (values.length > this.maxEmbeddingsPerCall) { | ||
throw new import_provider3.TooManyEmbeddingValuesForCallError({ | ||
throw new import_provider4.TooManyEmbeddingValuesForCallError({ | ||
maxEmbeddingsPerCall: this.maxEmbeddingsPerCall, | ||
@@ -652,3 +702,3 @@ modelId: this.modelId, | ||
for (const value of values) { | ||
const { responseHeaders, value: response } = await (0, import_provider_utils7.postJsonToApi)({ | ||
const { responseHeaders, value: response } = await (0, import_provider_utils8.postJsonToApi)({ | ||
abortSignal, | ||
@@ -662,3 +712,3 @@ body: { | ||
headers: this.config.headers(), | ||
successfulResponseHandler: (0, import_provider_utils7.createJsonResponseHandler)( | ||
successfulResponseHandler: (0, import_provider_utils8.createJsonResponseHandler)( | ||
ollamaTextEmbeddingResponseSchema | ||
@@ -681,3 +731,3 @@ ), | ||
var _a; | ||
const baseURL = (_a = (0, import_provider_utils8.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api"; | ||
const baseURL = (_a = (0, import_provider_utils9.withoutTrailingSlash)(options.baseURL)) != null ? _a : "http://127.0.0.1:11434/api"; | ||
const getHeaders = () => ({ | ||
@@ -684,0 +734,0 @@ ...options.headers |
{ | ||
"name": "ollama-ai-provider", | ||
"version": "0.9.1", | ||
"version": "0.10.0", | ||
"description": "Vercel AI Provider for running LLMs locally using Ollama", | ||
@@ -5,0 +5,0 @@ "main": "./dist/index.js", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
142068
1578