🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

node-llama-cpp

Package Overview
Dependencies
Maintainers
1
Versions
112
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

node-llama-cpp - npm Package Compare versions

Comparing version

to
2.8.0

dist/utils/getBuildDefaults.d.ts

4

dist/cli/commands/BuildCommand.d.ts

@@ -5,4 +5,4 @@ import { CommandModule } from "yargs";

nodeTarget?: string;
metal: boolean;
cuda: boolean;
metal?: boolean;
cuda?: boolean;
};

@@ -9,0 +9,0 @@ export declare const BuildCommand: CommandModule<object, BuildCommand>;

@@ -40,3 +40,3 @@ import process from "process";

};
export async function BuildLlamaCppCommand({ arch, nodeTarget, metal, cuda }) {
export async function BuildLlamaCppCommand({ arch = undefined, nodeTarget = undefined, metal = defaultLlamaCppMetalSupport, cuda = defaultLlamaCppCudaSupport }) {
if (!(await fs.pathExists(llamaCppDirectory))) {

@@ -43,0 +43,0 @@ console.log(chalk.red('llama.cpp is not downloaded. Please run "node-llama-cpp download" first'));

import fs from "fs-extra";
import chalk from "chalk";
import { llamaCppDirectory } from "../../config.js";
import { llamaCppDirectory, llamaCppDirectoryTagFilePath } from "../../config.js";
import withOra from "../../utils/withOra.js";

@@ -31,2 +31,3 @@ import { clearLlamaBuild } from "../../utils/clearLlamaBuild.js";

await fs.remove(llamaCppDirectory);
await fs.remove(llamaCppDirectoryTagFilePath);
});

@@ -33,0 +34,0 @@ }

import { CommandModule } from "yargs";
type DownloadCommandArgs = {
repo: string;
release: "latest" | string;
repo?: string;
release?: "latest" | string;
arch?: string;
nodeTarget?: string;
metal: boolean;
cuda: boolean;
metal?: boolean;
cuda?: boolean;
skipBuild?: boolean;
noBundle?: boolean;
updateBinariesReleaseMetadataAndSaveGitBundle?: boolean;
};

@@ -13,0 +12,0 @@ export declare const DownloadCommand: CommandModule<object, DownloadCommandArgs>;

@@ -5,3 +5,3 @@ import process from "process";

import chalk from "chalk";
import { defaultLlamaCppCudaSupport, defaultLlamaCppGitHubRepo, defaultLlamaCppMetalSupport, defaultLlamaCppRelease, isCI, llamaCppDirectory } from "../../config.js";
import { defaultLlamaCppCudaSupport, defaultLlamaCppGitHubRepo, defaultLlamaCppMetalSupport, defaultLlamaCppRelease, isCI, llamaCppDirectory, llamaCppDirectoryTagFilePath } from "../../config.js";
import { compileLlamaCpp } from "../../utils/compileLLamaCpp.js";

@@ -74,3 +74,3 @@ import withOra from "../../utils/withOra.js";

};
export async function DownloadLlamaCppCommand({ repo, release, arch, nodeTarget, metal, cuda, skipBuild, noBundle, updateBinariesReleaseMetadataAndSaveGitBundle }) {
export async function DownloadLlamaCppCommand({ repo = defaultLlamaCppGitHubRepo, release = defaultLlamaCppRelease, arch = undefined, nodeTarget = undefined, metal = defaultLlamaCppMetalSupport, cuda = defaultLlamaCppCudaSupport, skipBuild = false, noBundle = false, updateBinariesReleaseMetadataAndSaveGitBundle = false }) {
const useBundle = noBundle != true;

@@ -133,2 +133,3 @@ const octokit = new Octokit();

await fs.remove(llamaCppDirectory);
await fs.remove(llamaCppDirectoryTagFilePath);
});

@@ -135,0 +136,0 @@ console.log(chalk.blue("Cloning llama.cpp"));

import { BuildLlamaCppCommand } from "./cli/commands/BuildCommand.js";
import { DownloadLlamaCppCommand } from "./cli/commands/DownloadCommand.js";
import { ClearLlamaCppBuildCommand } from "./cli/commands/ClearCommand.js";
export { BuildLlamaCppCommand, DownloadLlamaCppCommand, ClearLlamaCppBuildCommand };
import { getBuildDefaults } from "./utils/getBuildDefaults.js";
export { BuildLlamaCppCommand, DownloadLlamaCppCommand, ClearLlamaCppBuildCommand, getBuildDefaults };
import { BuildLlamaCppCommand } from "./cli/commands/BuildCommand.js";
import { DownloadLlamaCppCommand } from "./cli/commands/DownloadCommand.js";
import { ClearLlamaCppBuildCommand } from "./cli/commands/ClearCommand.js";
export { BuildLlamaCppCommand, DownloadLlamaCppCommand, ClearLlamaCppBuildCommand };
import { getBuildDefaults } from "./utils/getBuildDefaults.js";
export { BuildLlamaCppCommand, DownloadLlamaCppCommand, ClearLlamaCppBuildCommand, getBuildDefaults };
//# sourceMappingURL=commands.js.map

@@ -11,2 +11,3 @@ export declare const llamaDirectory: string;

export declare const binariesGithubReleasePath: string;
export declare const llamaCppDirectoryTagFilePath: string;
export declare const currentReleaseGitBundlePath: string;

@@ -13,0 +14,0 @@ export declare const xpackDirectory: string;

@@ -20,2 +20,3 @@ import { fileURLToPath } from "url";

export const binariesGithubReleasePath = path.join(llamaDirectory, "binariesGithubRelease.json");
export const llamaCppDirectoryTagFilePath = path.join(llamaDirectory, "llama.cpp.tag.json");
export const currentReleaseGitBundlePath = path.join(llamaDirectory, "gitRelease.bundle");

@@ -22,0 +23,0 @@ export const xpackDirectory = path.join(llamaDirectory, "xpack");

@@ -16,4 +16,5 @@ import { LlamaModel, type LlamaModelOptions } from "./llamaEvaluator/LlamaModel.js";

import { getChatWrapperByBos } from "./chatWrappers/createChatWrapperByBos.js";
import { getReleaseInfo } from "./utils/getReleaseInfo.js";
import { type ConversationInteraction, type Token } from "./types.js";
import { type GbnfJsonArraySchema, type GbnfJsonBasicSchema, type GbnfJsonConstSchema, type GbnfJsonEnumSchema, type GbnfJsonObjectSchema, type GbnfJsonOneOfSchema, type GbnfJsonSchema, type GbnfJsonSchemaImmutableType, type GbnfJsonSchemaToType } from "./utils/gbnfJson/types.js";
export { LlamaModel, type LlamaModelOptions, LlamaGrammar, type LlamaGrammarOptions, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, type LlamaGrammarEvaluationStateOptions, LlamaContext, type LlamaContextOptions, type LlamaContextRepeatPenalty, LlamaChatSession, type LlamaChatSessionOptions, type LLamaChatPromptOptions, type LlamaChatSessionRepeatPenalty, type ConversationInteraction, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper, ChatMLChatPromptWrapper, FalconChatPromptWrapper, getChatWrapperByBos, type Token, type GbnfJsonSchema, type GbnfJsonSchemaToType, type GbnfJsonSchemaImmutableType, type GbnfJsonBasicSchema, type GbnfJsonConstSchema, type GbnfJsonEnumSchema, type GbnfJsonOneOfSchema, type GbnfJsonObjectSchema, type GbnfJsonArraySchema };
export { LlamaModel, type LlamaModelOptions, LlamaGrammar, type LlamaGrammarOptions, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, type LlamaGrammarEvaluationStateOptions, LlamaContext, type LlamaContextOptions, type LlamaContextRepeatPenalty, LlamaChatSession, type LlamaChatSessionOptions, type LLamaChatPromptOptions, type LlamaChatSessionRepeatPenalty, type ConversationInteraction, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper, ChatMLChatPromptWrapper, FalconChatPromptWrapper, getChatWrapperByBos, getReleaseInfo, type Token, type GbnfJsonSchema, type GbnfJsonSchemaToType, type GbnfJsonSchemaImmutableType, type GbnfJsonBasicSchema, type GbnfJsonConstSchema, type GbnfJsonEnumSchema, type GbnfJsonOneOfSchema, type GbnfJsonObjectSchema, type GbnfJsonArraySchema };

@@ -16,3 +16,4 @@ import { LlamaModel } from "./llamaEvaluator/LlamaModel.js";

import { getChatWrapperByBos } from "./chatWrappers/createChatWrapperByBos.js";
export { LlamaModel, LlamaGrammar, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, LlamaContext, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper, ChatMLChatPromptWrapper, FalconChatPromptWrapper, getChatWrapperByBos };
import { getReleaseInfo } from "./utils/getReleaseInfo.js";
export { LlamaModel, LlamaGrammar, LlamaJsonSchemaGrammar, LlamaJsonSchemaValidationError, LlamaGrammarEvaluationState, LlamaContext, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper, ChatMLChatPromptWrapper, FalconChatPromptWrapper, getChatWrapperByBos, getReleaseInfo };
//# sourceMappingURL=index.js.map
export declare function cloneLlamaCppRepo(githubOwner: string, githubRepo: string, tag: string, useBundles?: boolean): Promise<void>;
export declare function getClonedLlamaCppRepoReleaseTag(): Promise<string | null>;

@@ -5,3 +5,3 @@ import simpleGit from "simple-git";

import fs from "fs-extra";
import { llamaCppDirectory } from "../config.js";
import { llamaCppDirectory, llamaCppDirectoryTagFilePath } from "../config.js";
import { getGitBundlePathForRelease } from "./gitReleaseBundles.js";

@@ -49,2 +49,3 @@ export async function cloneLlamaCppRepo(githubOwner, githubRepo, tag, useBundles = true) {

await fs.remove(llamaCppDirectory);
await fs.remove(llamaCppDirectoryTagFilePath);
console.error("Failed to clone git bundle, cloning from GitHub instead", err);

@@ -67,2 +68,14 @@ printCloneErrorHelp(String(err));

}
try {
const clonedLlamaCppRepoTagJson = {
tag
};
await fs.writeJson(llamaCppDirectoryTagFilePath, clonedLlamaCppRepoTagJson, {
spaces: 4
});
}
catch (err) {
console.error("Failed to write llama.cpp tag file", err);
throw err;
}
}

@@ -80,2 +93,14 @@ function printCloneErrorHelp(error) {

}
export async function getClonedLlamaCppRepoReleaseTag() {
if (!(await fs.pathExists(llamaCppDirectoryTagFilePath)))
return null;
try {
const clonedLlamaCppRepoTagJson = await fs.readJson(llamaCppDirectoryTagFilePath);
return clonedLlamaCppRepoTagJson.tag;
}
catch (err) {
console.error("Failed to read llama.cpp tag file", err);
return null;
}
}
//# sourceMappingURL=cloneLlamaCppRepo.js.map
{
"release": "b1489"
"release": "b1492"
}
{
"name": "node-llama-cpp",
"version": "2.7.5",
"version": "2.8.0",
"description": "Run AI models locally on your machine with node.js bindings for llama.cpp. Force a JSON schema on the model output on the generation level",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet