Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@baseai/core

Package Overview
Dependencies
Maintainers
0
Versions
94
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@baseai/core - npm Package Compare versions

Comparing version 0.9.7 to 0.9.8-snapshot.0

4

dist/index.d.ts

@@ -135,2 +135,3 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

maxCalls?: number;
config?: any;
}

@@ -144,2 +145,5 @@ interface ChoiceGenerate {

declare class Pipe {
private console;
private config;
private configEnv;
private request;

@@ -146,0 +150,0 @@ private pipe;

264

dist/index.js

@@ -52,2 +52,87 @@ "use strict";

// src/utils/is-prod.ts
var FORCE_PROD = false;
var TEST_PROD_LOCALLY = FORCE_PROD;
function isProd(configEnv) {
var _a;
if (TEST_PROD_LOCALLY) return true;
const env = (_a = configEnv == null ? void 0 : configEnv.NODE_ENV) != null ? _a : process.env.NODE_ENV;
return env === "production";
}
function getApiUrl(configEnv) {
return isProd(configEnv) ? "https://api.langbase.com" : "http://localhost:9000";
}
// src/helpers/logger.ts
var Logger = class {
constructor(config) {
this.config = config;
}
log(category, value, logHeader) {
var _a, _b, _c, _d, _e;
if (isProd((_a = this.config) == null ? void 0 : _a.env) && !((_c = (_b = this.config) == null ? void 0 : _b.log) == null ? void 0 : _c.isEnabledInProd))
return;
if (!((_e = (_d = this.config) == null ? void 0 : _d.log) == null ? void 0 : _e.isEnabled)) return;
if (!this.shouldLog(category)) return;
console.log("");
if (logHeader) {
console.log(`======= ${logHeader} =======`);
}
console.log(`=\u276F ${category}`);
if (typeof value === "object" && value !== null) {
console.dir(value, { depth: null, colors: true });
} else if (value !== void 0) {
console.log(value);
}
}
shouldLog(category) {
var _a;
const logConfig = (_a = this.config) == null ? void 0 : _a.log;
if (!logConfig) return false;
const categoryParts = category.split(".");
while (categoryParts.length > 0) {
const currentCategory = categoryParts.join(".");
if (logConfig[currentCategory] === true) return true;
if (logConfig[currentCategory] === false) return false;
categoryParts.pop();
}
return true;
}
};
// src/utils/local-server-running.ts
async function isLocalServerRunning() {
try {
const endpoint = getApiUrl();
const response = await fetch(endpoint, {
mode: "no-cors",
cache: "no-cache"
// Prevents caching of the request
});
const portUseError = `
Port 9000 is already in use.
Terminate the process running on it.
Run "npx baseai@latest dev" in an new terminal to start the dev server.
`;
if (!response.ok) {
console.error(portUseError);
return false;
}
const res = await response.json();
if (!res.success) {
console.error(portUseError);
return false;
}
return true;
} catch (error) {
console.error(
`
BaseAI dev server is not running.
Please run "npx baseai dev" in a new teriminal, in the root of this project.
`
);
return false;
}
}
// src/common/request.ts

@@ -172,4 +257,5 @@ var import_streaming = require("openai/streaming");

var Request = class {
constructor(config) {
this.config = config;
constructor(props) {
this.props = props;
this.console = new Logger(this.props.config);
}

@@ -210,3 +296,3 @@ async send({ endpoint, ...options }) {

buildUrl({ endpoint }) {
return `${this.config.baseUrl}${endpoint}`;
return `${this.props.baseUrl}${endpoint}`;
}

@@ -218,3 +304,3 @@ buildHeaders({

"Content-Type": "application/json",
Authorization: `Bearer ${this.config.apiKey}`,
Authorization: `Bearer ${this.props.apiKey}`,
...headers

@@ -228,2 +314,8 @@ };

}) {
this.console.log("pipe.request", {
url,
method: options.method,
headers,
body: options.body
});
const resp = await fetch(url, {

@@ -233,3 +325,3 @@ method: options.method,

body: JSON.stringify(options.body),
signal: AbortSignal.timeout(this.config.timeout || 3e4)
signal: AbortSignal.timeout(this.props.timeout || 3e4)
});

@@ -295,4 +387,2 @@ return resp;

async post(options) {
console.log("Request.post.options");
console.dir(options, { depth: null, colors: true });
return this.send({ ...options, method: "POST" });

@@ -701,22 +791,41 @@ }

// src/utils/get-llm-api-key.ts
function getLLMApiKey(modelProvider) {
function getLLMApiKey({
modelProvider,
configEnv
}) {
const getEnv = (key) => {
let value;
if (configEnv && key in configEnv) {
value = configEnv[key];
} else {
value = process.env[key];
}
if (!value) {
throw new Error(
`Environment variable ${key} is not set or empty. Only needed in local dev environment.
Note: In production, add it to your keysets https://langbase.com/docs/features/keysets
`
);
}
return value;
};
switch (true) {
case modelProvider.includes(OPEN_AI):
return process.env.OPENAI_API_KEY || "";
return getEnv("OPENAI_API_KEY");
case modelProvider === ANTHROPIC:
return process.env.ANTHROPIC_API_KEY || "";
return getEnv("ANTHROPIC_API_KEY");
case modelProvider === TOGETHER_AI:
return process.env.TOGETHER_API_KEY || "";
return getEnv("TOGETHER_API_KEY");
case modelProvider === GROQ:
return process.env.GROQ_API_KEY || "";
return getEnv("GROQ_API_KEY");
case modelProvider === GOOGLE:
return process.env.GOOGLE_API_KEY || "";
return getEnv("GOOGLE_API_KEY");
case modelProvider.includes(COHERE):
return process.env.COHERE_API_KEY || "";
return getEnv("COHERE_API_KEY");
case modelProvider.includes(FIREWORKS_AI):
return process.env.FIREWORKS_API_KEY || "";
return getEnv("FIREWORKS_API_KEY");
case modelProvider.includes(PERPLEXITY):
return process.env.PERPLEXITY_API_KEY || "";
return getEnv("PERPLEXITY_API_KEY");
case modelProvider.includes(OLLAMA):
return process.env.OLLAMA_API_KEY || "";
return getEnv("OLLAMA_API_KEY");
default:

@@ -727,13 +836,2 @@ throw new Error(`Unsupported model provider: ${modelProvider}`);

// src/utils/is-prod.ts
var FORCE_PROD = false;
var TEST_PROD_LOCALLY = FORCE_PROD;
function isProd() {
if (TEST_PROD_LOCALLY) return true;
return process.env.NODE_ENV === "production";
}
function getApiUrl() {
return isProd() ? "https://api.langbase.com" : "http://localhost:9000";
}
// src/utils/to-old-pipe-format.ts

@@ -792,42 +890,14 @@ function toOldPipeFormat(newFormat) {

// src/utils/local-server-running.ts
async function isLocalServerRunning() {
try {
const endpoint = getApiUrl();
const response = await fetch(endpoint, {
mode: "no-cors",
cache: "no-cache"
// Prevents caching of the request
});
const portUseError = `
Port 9000 is already in use.
Terminate the process running on it.
Run "npx baseai@latest dev" in an new terminal to start the dev server.
`;
if (!response.ok) {
console.error(portUseError);
return false;
}
const res = await response.json();
if (!res.success) {
console.error(portUseError);
return false;
}
return true;
} catch (error) {
console.error(
`
BaseAI dev server is not running.
Please run "npx baseai@latest dev" in a new teriminal to start dev server.
`
);
return false;
}
}
// src/pipes/pipes.ts
var Pipe = class {
constructor(options) {
const baseUrl = getApiUrl();
this.request = new Request({ apiKey: options.apiKey, baseUrl });
var _a;
this.config = options == null ? void 0 : options.config;
this.console = new Logger(this.config);
this.configEnv = (_a = options == null ? void 0 : options.config) == null ? void 0 : _a.env;
this.request = new Request({
baseUrl: getApiUrl(this.configEnv),
apiKey: options.apiKey,
config: this.config
});
this.pipe = toOldPipeFormat(options);

@@ -883,7 +953,9 @@ delete this.pipe.apiKey;

async run(options) {
console.log("pipe.run", this.pipe.name, "RUN");
this.console.log("pipe", this.pipe.name, "PIPE RUN");
const endpoint = "/beta/pipes/run";
console.log("pipe.run.baseUrl.endpoint", getApiUrl() + endpoint);
console.log("pipe.run.options");
console.dir(options, { depth: null, colors: true });
this.console.log(
"pipe.run.baseUrl.endpoint",
getApiUrl(this.configEnv) + endpoint
);
this.console.log("pipe.runOptions", options);
const requestedStream = this.isStreamRequested(options);

@@ -897,4 +969,3 @@ const stream = this.hasTools ? false : requestedStream;

}
console.log("pipe.run.response");
console.dir(response, { depth: null, colors: true });
this.console.log("pipe.response", response);
if (stream) {

@@ -909,15 +980,16 @@ return response;

if (this.hasNoToolCalls(responseMessage)) {
console.log("No more tool calls. Returning final response.");
this.console.log(
"pipe.hasNoToolCalls",
"No more tool calls. Returning final response."
);
return currentResponse;
}
console.log("\npipe.run.response.toolCalls");
console.dir(responseMessage.tool_calls, {
depth: null,
colors: true
});
this.console.log(
"pipe.run.response.toolCalls",
responseMessage.tool_calls
);
const toolResults = await this.runTools(
responseMessage.tool_calls
);
console.log("\npipe.run.toolResults");
console.dir(toolResults, { depth: null, colors: true });
this.console.log("pipe.run.toolResults", toolResults);
messages = this.getMessagesToSend(

@@ -948,9 +1020,16 @@ messages,

async createRequest(endpoint, body) {
const prodOptions = {
endpoint,
body: {
...body,
name: this.pipe.name
}
};
const isProdEnv = isProd(this.configEnv);
if (isProdEnv) {
const prodOptions = {
endpoint,
body: {
...body,
name: this.pipe.name
}
};
this.console.log("pipe.request.prodOptions", prodOptions);
return this.request.post(prodOptions);
}
const isServerRunning = await isLocalServerRunning();
if (!isServerRunning) return {};
const localOptions = {

@@ -961,11 +1040,10 @@ endpoint,

pipe: this.pipe,
llmApiKey: getLLMApiKey(this.pipe.model.provider)
llmApiKey: getLLMApiKey({
modelProvider: this.pipe.model.provider,
configEnv: this.configEnv
})
}
};
const isProdEnv = isProd();
if (!isProdEnv) {
const isServerRunning = await isLocalServerRunning();
if (!isServerRunning) return {};
}
return this.request.post(isProdEnv ? prodOptions : localOptions);
this.console.log("pipe.request.localOptions", localOptions);
return this.request.post(localOptions);
}

@@ -972,0 +1050,0 @@ };

@@ -116,2 +116,3 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

maxCalls?: number;
config?: any;
}

@@ -125,2 +126,5 @@ interface ChoiceGenerate {

declare class Pipe {
private console;
private config;
private configEnv;
private request;

@@ -127,0 +131,0 @@ private pipe;

@@ -36,2 +36,87 @@ "use strict";

// src/utils/is-prod.ts
var FORCE_PROD = false;
var TEST_PROD_LOCALLY = FORCE_PROD;
function isProd(configEnv) {
var _a;
if (TEST_PROD_LOCALLY) return true;
const env = (_a = configEnv == null ? void 0 : configEnv.NODE_ENV) != null ? _a : process.env.NODE_ENV;
return env === "production";
}
function getApiUrl(configEnv) {
return isProd(configEnv) ? "https://api.langbase.com" : "http://localhost:9000";
}
// src/helpers/logger.ts
var Logger = class {
constructor(config) {
this.config = config;
}
log(category, value, logHeader) {
var _a, _b, _c, _d, _e;
if (isProd((_a = this.config) == null ? void 0 : _a.env) && !((_c = (_b = this.config) == null ? void 0 : _b.log) == null ? void 0 : _c.isEnabledInProd))
return;
if (!((_e = (_d = this.config) == null ? void 0 : _d.log) == null ? void 0 : _e.isEnabled)) return;
if (!this.shouldLog(category)) return;
console.log("");
if (logHeader) {
console.log(`======= ${logHeader} =======`);
}
console.log(`=\u276F ${category}`);
if (typeof value === "object" && value !== null) {
console.dir(value, { depth: null, colors: true });
} else if (value !== void 0) {
console.log(value);
}
}
shouldLog(category) {
var _a;
const logConfig = (_a = this.config) == null ? void 0 : _a.log;
if (!logConfig) return false;
const categoryParts = category.split(".");
while (categoryParts.length > 0) {
const currentCategory = categoryParts.join(".");
if (logConfig[currentCategory] === true) return true;
if (logConfig[currentCategory] === false) return false;
categoryParts.pop();
}
return true;
}
};
// src/utils/local-server-running.ts
async function isLocalServerRunning() {
try {
const endpoint = getApiUrl();
const response = await fetch(endpoint, {
mode: "no-cors",
cache: "no-cache"
// Prevents caching of the request
});
const portUseError = `
Port 9000 is already in use.
Terminate the process running on it.
Run "npx baseai@latest dev" in an new terminal to start the dev server.
`;
if (!response.ok) {
console.error(portUseError);
return false;
}
const res = await response.json();
if (!res.success) {
console.error(portUseError);
return false;
}
return true;
} catch (error) {
console.error(
`
BaseAI dev server is not running.
Please run "npx baseai dev" in a new teriminal, in the root of this project.
`
);
return false;
}
}
// src/common/request.ts

@@ -156,4 +241,5 @@ var import_streaming = require("openai/streaming");

var Request = class {
constructor(config) {
this.config = config;
constructor(props) {
this.props = props;
this.console = new Logger(this.props.config);
}

@@ -194,3 +280,3 @@ async send({ endpoint, ...options }) {

buildUrl({ endpoint }) {
return `${this.config.baseUrl}${endpoint}`;
return `${this.props.baseUrl}${endpoint}`;
}

@@ -202,3 +288,3 @@ buildHeaders({

"Content-Type": "application/json",
Authorization: `Bearer ${this.config.apiKey}`,
Authorization: `Bearer ${this.props.apiKey}`,
...headers

@@ -212,2 +298,8 @@ };

}) {
this.console.log("pipe.request", {
url,
method: options.method,
headers,
body: options.body
});
const resp = await fetch(url, {

@@ -217,3 +309,3 @@ method: options.method,

body: JSON.stringify(options.body),
signal: AbortSignal.timeout(this.config.timeout || 3e4)
signal: AbortSignal.timeout(this.props.timeout || 3e4)
});

@@ -279,4 +371,2 @@ return resp;

async post(options) {
console.log("Request.post.options");
console.dir(options, { depth: null, colors: true });
return this.send({ ...options, method: "POST" });

@@ -685,22 +775,41 @@ }

// src/utils/get-llm-api-key.ts
function getLLMApiKey(modelProvider) {
function getLLMApiKey({
modelProvider,
configEnv
}) {
const getEnv = (key) => {
let value;
if (configEnv && key in configEnv) {
value = configEnv[key];
} else {
value = process.env[key];
}
if (!value) {
throw new Error(
`Environment variable ${key} is not set or empty. Only needed in local dev environment.
Note: In production, add it to your keysets https://langbase.com/docs/features/keysets
`
);
}
return value;
};
switch (true) {
case modelProvider.includes(OPEN_AI):
return process.env.OPENAI_API_KEY || "";
return getEnv("OPENAI_API_KEY");
case modelProvider === ANTHROPIC:
return process.env.ANTHROPIC_API_KEY || "";
return getEnv("ANTHROPIC_API_KEY");
case modelProvider === TOGETHER_AI:
return process.env.TOGETHER_API_KEY || "";
return getEnv("TOGETHER_API_KEY");
case modelProvider === GROQ:
return process.env.GROQ_API_KEY || "";
return getEnv("GROQ_API_KEY");
case modelProvider === GOOGLE:
return process.env.GOOGLE_API_KEY || "";
return getEnv("GOOGLE_API_KEY");
case modelProvider.includes(COHERE):
return process.env.COHERE_API_KEY || "";
return getEnv("COHERE_API_KEY");
case modelProvider.includes(FIREWORKS_AI):
return process.env.FIREWORKS_API_KEY || "";
return getEnv("FIREWORKS_API_KEY");
case modelProvider.includes(PERPLEXITY):
return process.env.PERPLEXITY_API_KEY || "";
return getEnv("PERPLEXITY_API_KEY");
case modelProvider.includes(OLLAMA):
return process.env.OLLAMA_API_KEY || "";
return getEnv("OLLAMA_API_KEY");
default:

@@ -711,13 +820,2 @@ throw new Error(`Unsupported model provider: ${modelProvider}`);

// src/utils/is-prod.ts
var FORCE_PROD = false;
var TEST_PROD_LOCALLY = FORCE_PROD;
function isProd() {
if (TEST_PROD_LOCALLY) return true;
return process.env.NODE_ENV === "production";
}
function getApiUrl() {
return isProd() ? "https://api.langbase.com" : "http://localhost:9000";
}
// src/utils/to-old-pipe-format.ts

@@ -776,42 +874,14 @@ function toOldPipeFormat(newFormat) {

// src/utils/local-server-running.ts
async function isLocalServerRunning() {
try {
const endpoint = getApiUrl();
const response = await fetch(endpoint, {
mode: "no-cors",
cache: "no-cache"
// Prevents caching of the request
});
const portUseError = `
Port 9000 is already in use.
Terminate the process running on it.
Run "npx baseai@latest dev" in an new terminal to start the dev server.
`;
if (!response.ok) {
console.error(portUseError);
return false;
}
const res = await response.json();
if (!res.success) {
console.error(portUseError);
return false;
}
return true;
} catch (error) {
console.error(
`
BaseAI dev server is not running.
Please run "npx baseai@latest dev" in a new teriminal to start dev server.
`
);
return false;
}
}
// src/pipes/pipes.ts
var Pipe = class {
constructor(options) {
const baseUrl = getApiUrl();
this.request = new Request({ apiKey: options.apiKey, baseUrl });
var _a;
this.config = options == null ? void 0 : options.config;
this.console = new Logger(this.config);
this.configEnv = (_a = options == null ? void 0 : options.config) == null ? void 0 : _a.env;
this.request = new Request({
baseUrl: getApiUrl(this.configEnv),
apiKey: options.apiKey,
config: this.config
});
this.pipe = toOldPipeFormat(options);

@@ -867,7 +937,9 @@ delete this.pipe.apiKey;

async run(options) {
console.log("pipe.run", this.pipe.name, "RUN");
this.console.log("pipe", this.pipe.name, "PIPE RUN");
const endpoint = "/beta/pipes/run";
console.log("pipe.run.baseUrl.endpoint", getApiUrl() + endpoint);
console.log("pipe.run.options");
console.dir(options, { depth: null, colors: true });
this.console.log(
"pipe.run.baseUrl.endpoint",
getApiUrl(this.configEnv) + endpoint
);
this.console.log("pipe.runOptions", options);
const requestedStream = this.isStreamRequested(options);

@@ -881,4 +953,3 @@ const stream = this.hasTools ? false : requestedStream;

}
console.log("pipe.run.response");
console.dir(response, { depth: null, colors: true });
this.console.log("pipe.response", response);
if (stream) {

@@ -893,15 +964,16 @@ return response;

if (this.hasNoToolCalls(responseMessage)) {
console.log("No more tool calls. Returning final response.");
this.console.log(
"pipe.hasNoToolCalls",
"No more tool calls. Returning final response."
);
return currentResponse;
}
console.log("\npipe.run.response.toolCalls");
console.dir(responseMessage.tool_calls, {
depth: null,
colors: true
});
this.console.log(
"pipe.run.response.toolCalls",
responseMessage.tool_calls
);
const toolResults = await this.runTools(
responseMessage.tool_calls
);
console.log("\npipe.run.toolResults");
console.dir(toolResults, { depth: null, colors: true });
this.console.log("pipe.run.toolResults", toolResults);
messages = this.getMessagesToSend(

@@ -932,9 +1004,16 @@ messages,

async createRequest(endpoint, body) {
const prodOptions = {
endpoint,
body: {
...body,
name: this.pipe.name
}
};
const isProdEnv = isProd(this.configEnv);
if (isProdEnv) {
const prodOptions = {
endpoint,
body: {
...body,
name: this.pipe.name
}
};
this.console.log("pipe.request.prodOptions", prodOptions);
return this.request.post(prodOptions);
}
const isServerRunning = await isLocalServerRunning();
if (!isServerRunning) return {};
const localOptions = {

@@ -945,11 +1024,10 @@ endpoint,

pipe: this.pipe,
llmApiKey: getLLMApiKey(this.pipe.model.provider)
llmApiKey: getLLMApiKey({
modelProvider: this.pipe.model.provider,
configEnv: this.configEnv
})
}
};
const isProdEnv = isProd();
if (!isProdEnv) {
const isServerRunning = await isLocalServerRunning();
if (!isServerRunning) return {};
}
return this.request.post(isProdEnv ? prodOptions : localOptions);
this.console.log("pipe.request.localOptions", localOptions);
return this.request.post(localOptions);
}

@@ -956,0 +1034,0 @@ };

@@ -44,5 +44,7 @@ 'use client'

var TEST_PROD_LOCALLY = FORCE_PROD;
function isProd() {
function isProd(configEnv) {
var _a;
if (TEST_PROD_LOCALLY) return true;
return process.env.NODE_ENV === "production";
const env = (_a = configEnv == null ? void 0 : configEnv.NODE_ENV) != null ? _a : process.env.NODE_ENV;
return env === "production";
}

@@ -49,0 +51,0 @@

{
"name": "@baseai/core",
"description": "The Web AI Framework's core - BaseAI.dev",
"version": "0.9.7",
"version": "0.9.8-snapshot.0",
"license": "Apache-2.0",

@@ -33,4 +33,4 @@ "sideEffects": false,

"vitest": "1.6.0",
"@baseai/eslint-config": "0.0.2",
"@baseai/tsconfig": "0.0.2"
"@baseai/tsconfig": "0.0.2",
"@baseai/eslint-config": "0.0.2"
},

@@ -37,0 +37,0 @@ "publishConfig": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc