New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

@brainstack/agent

Package Overview
Dependencies
Maintainers
3
Versions
141
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@brainstack/agent - npm Package Compare versions

Comparing version 1.0.0 to 1.0.1

2

dist/abstraction.d.ts
export interface IModel {
_ask(input: string): Promise<string | null>;
_ask(input: string, system?: string): Promise<string | null>;
}

@@ -5,3 +5,3 @@ import { IModel } from './abstraction';

constructor(_modelIntegration: IModel);
ask(question: string): Promise<string | null>;
ask(question: string, system?: string): Promise<string | null>;
}

@@ -17,10 +17,10 @@ "use strict";

}
ask(question) {
ask(question, system = 'You are a helpful assistant.') {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield this.modelIntegration._ask(question);
const response = yield this.modelIntegration._ask(question, system);
return response;
}
catch (error) {
console.error("Error querying LLM:", error);
console.error('Error querying LLM:', error);
return "Sorry, I couldn't get a response at the moment.";

@@ -27,0 +27,0 @@ }

@@ -6,3 +6,3 @@ import { IModel } from '../abstraction';

constructor(apiKey: string, baseUrl: string);
_ask(input: string): Promise<string | null>;
_ask(input: string, content?: string): Promise<string | null>;
}

@@ -22,3 +22,3 @@ "use strict";

}
_ask(input) {
_ask(input, content = 'You are a helpful assistant.') {
return __awaiter(this, void 0, void 0, function* () {

@@ -28,3 +28,3 @@ try {

const body = {
messages: [{ role: 'system', content: 'You are a helpful assistant.' }, { role: 'user', content: input }],
messages: [{ role: 'system', content }, { role: 'user', content: input }],
max_tokens: 100,

@@ -31,0 +31,0 @@ temperature: 0.7

@@ -6,3 +6,3 @@ import { IModel } from '../abstraction';

constructor(_apiKey: string, _model?: string);
_ask(input: string): Promise<string | null>;
_ask(input: string, content?: string): Promise<string | null>;
}

@@ -21,7 +21,7 @@ "use strict";

}
_ask(input) {
_ask(input, content = 'You are a helpful assistant.') {
return __awaiter(this, void 0, void 0, function* () {
try {
const response = yield this.model.chat.completions.create({
messages: [{ role: 'user', content: input }],
messages: [{ role: 'system', content }, { role: 'user', content: input }],
model: this.modelName,

@@ -28,0 +28,0 @@ });

{
"name": "@brainstack/agent",
"version": "1.0.0",
"version": "1.0.1",
"description": "Brainstack Model Agent Lib",

@@ -5,0 +5,0 @@ "main": "dist/index.js",

export interface IModel {
_ask(input: string): Promise<string | null>;
_ask(input: string, system?: string): Promise<string | null>;
}
import { IModel } from './abstraction';
export class ModelService {
private modelIntegration: IModel;
constructor(_modelIntegration:IModel) {
constructor(_modelIntegration: IModel) {
this.modelIntegration = _modelIntegration;
}
async ask(question: string): Promise<string | null> {
async ask(
question: string,
system: string = 'You are a helpful assistant.'
): Promise<string | null> {
try {
const response = await this.modelIntegration._ask(question);
const response = await this.modelIntegration._ask(question, system);
return response;
} catch (error) {
console.error("Error querying LLM:", error);
console.error('Error querying LLM:', error);
return "Sorry, I couldn't get a response at the moment.";

@@ -21,2 +23,1 @@ }

}

@@ -13,7 +13,7 @@ import axios from 'axios';

async _ask(input: string): Promise<string | null> {
async _ask(input: string, content: string = 'You are a helpful assistant.'): Promise<string | null> {
try {
const url = `${this.baseUrl}`;
const body = {
messages: [{ role: 'system', content: 'You are a helpful assistant.' }, { role: 'user', content: input }],
messages: [{ role: 'system', content }, { role: 'user', content: input }],
max_tokens: 100,

@@ -20,0 +20,0 @@ temperature: 0.7

@@ -16,6 +16,6 @@ import { OpenAI } from 'openai';

async _ask(input: string): Promise<string | null> {
async _ask(input: string, content: string = 'You are a helpful assistant.'): Promise<string | null> {
try {
const response = await this.model.chat.completions.create({
messages: [{ role: 'user', content: input }],
messages: [{ role: 'system', content }, { role: 'user', content: input }],
model: this.modelName,

@@ -22,0 +22,0 @@ });

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc