🚀 Big News: Socket Acquires Coana to Bring Reachability Analysis to Every Appsec Team.Learn more
Socket
DemoInstallSign in
Socket

node-llama-cpp

Package Overview
Dependencies
Maintainers
1
Versions
112
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

node-llama-cpp - npm Package Compare versions

Comparing version

to
1.2.0

dist/chatWrappers/GeneralChatPromptWrapper.d.ts

1

dist/cli/commands/ChatCommand.d.ts

@@ -6,4 +6,5 @@ import { CommandModule } from "yargs";

systemPrompt: string;
wrapper: string;
};
export declare const ChatCommand: CommandModule<object, ChatCommand>;
export {};

33

dist/cli/commands/ChatCommand.js

@@ -7,2 +7,3 @@ import * as readline from "readline/promises";

import { LlamaChatPromptWrapper } from "../../chatWrappers/LlamaChatPromptWrapper.js";
import { GeneralChatPromptWrapper } from "../../chatWrappers/GeneralChatPromptWrapper.js";
export const ChatCommand = {

@@ -32,7 +33,14 @@ command: "chat",

group: "Optional:"
})
.option("wrapper", {
type: "string",
default: "general",
choices: ["general", "llama"],
description: "Chat wrapper to use",
group: "Optional:"
});
},
async handler({ model, systemInfo, systemPrompt }) {
async handler({ model, systemInfo, systemPrompt, wrapper }) {
try {
await RunChat({ model, systemInfo, systemPrompt });
await RunChat({ model, systemInfo, systemPrompt, wrapper });
}

@@ -45,5 +53,5 @@ catch (err) {

};
async function RunChat({ model: modelArg, systemInfo, systemPrompt }) {
const { LlamaChatSession } = await import("../../LlamaChatSession.js");
const { LlamaModel } = await import("../../LlamaModel.js");
async function RunChat({ model: modelArg, systemInfo, systemPrompt, wrapper }) {
const { LlamaChatSession } = await import("../../llamaEvaluator/LlamaChatSession.js");
const { LlamaModel } = await import("../../llamaEvaluator/LlamaModel.js");
const model = new LlamaModel({

@@ -53,6 +61,6 @@ modelPath: modelArg

const session = new LlamaChatSession({
model,
context: model.createContext(),
printLLamaSystemInfo: systemInfo,
systemPrompt,
promptWrapper: new LlamaChatPromptWrapper()
promptWrapper: createChatWrapper(wrapper)
});

@@ -81,3 +89,3 @@ await withOra({

await session.prompt(input, (chunk) => {
process.stdout.write(model.decode(Uint32Array.from([chunk])));
process.stdout.write(session.context.decode(Uint32Array.from(chunk)));
});

@@ -88,2 +96,11 @@ process.stdout.write(endColor);

}
function createChatWrapper(wrapper) {
switch (wrapper) {
case "general":
return new GeneralChatPromptWrapper();
case "llama":
return new LlamaChatPromptWrapper();
}
throw new Error("Unknown wrapper: " + wrapper);
}
//# sourceMappingURL=ChatCommand.js.map

@@ -1,3 +0,3 @@

import { LlamaChatSession } from "./LlamaChatSession.js";
import { LlamaModel } from "./LlamaModel.js";
import { LlamaChatSession } from "./llamaEvaluator/LlamaChatSession.js";
import { LlamaModel } from "./llamaEvaluator/LlamaModel.js";
import { AbortError } from "./AbortError.js";

@@ -7,2 +7,4 @@ import { ChatPromptWrapper } from "./ChatPromptWrapper.js";

import { LlamaChatPromptWrapper } from "./chatWrappers/LlamaChatPromptWrapper.js";
export { LlamaModel, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper };
import { GeneralChatPromptWrapper } from "./chatWrappers/GeneralChatPromptWrapper.js";
import { LlamaContext } from "./llamaEvaluator/LlamaContext.js";
export { LlamaModel, LlamaContext, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper };

@@ -1,3 +0,3 @@

import { LlamaChatSession } from "./LlamaChatSession.js";
import { LlamaModel } from "./LlamaModel.js";
import { LlamaChatSession } from "./llamaEvaluator/LlamaChatSession.js";
import { LlamaModel } from "./llamaEvaluator/LlamaModel.js";
import { AbortError } from "./AbortError.js";

@@ -7,3 +7,5 @@ import { ChatPromptWrapper } from "./ChatPromptWrapper.js";

import { LlamaChatPromptWrapper } from "./chatWrappers/LlamaChatPromptWrapper.js";
export { LlamaModel, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper };
import { GeneralChatPromptWrapper } from "./chatWrappers/GeneralChatPromptWrapper.js";
import { LlamaContext } from "./llamaEvaluator/LlamaContext.js";
export { LlamaModel, LlamaContext, LlamaChatSession, AbortError, ChatPromptWrapper, EmptyChatPromptWrapper, LlamaChatPromptWrapper, GeneralChatPromptWrapper };
//# sourceMappingURL=index.js.map
{
"name": "node-llama-cpp",
"version": "1.1.1",
"version": "1.2.0",
"description": "node.js bindings for llama.cpp",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet