Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@baseai/core

Package Overview
Dependencies
Maintainers
0
Versions
92
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@baseai/core - npm Package Compare versions

Comparing version 0.9.15-snapshot.6 to 0.9.15-snapshot.7

29

dist/index.d.ts

@@ -89,5 +89,2 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

interface Runner extends ChatCompletionStream<null> {
}
interface Variable {

@@ -273,2 +270,26 @@ name: string;

export { type Chunk, type ChunkStream, type Memory as MemoryI, type Message, type MessageRole, Pipe, type Pipe$1 as PipeI, type PipeOptions, type RunOptions, type RunOptionsStream, type RunResponse, type RunResponseStream, type Tool as ToolI, type Usage, type Variable, generateText, getTextContent, getTextDelta, isContent, isToolCall, isUnknown, printStreamToStdout, processChunk, streamText };
interface Runner extends ChatCompletionStream<null> {
}
/**
* Converts a ReadableStream into a Runner.
*
* @param readableStream - The ReadableStream to convert.
* @returns The converted Runner.
*/
declare const fromReadableStream: (readableStream: ReadableStream) => Runner;
/**
* Returns a runner for the given readable stream.
*
* @param readableStream - The readable stream to create a runner for.
* @returns A runner for the given readable stream.
*/
declare const getRunner: (readableStream: ReadableStream) => Runner;
/**
* Retrieves the text part from a given ChunkStream.
*
* @param chunk - The ChunkStream object.
* @returns The text content of the first choice's delta, or an empty string if it doesn't exist.
*/
declare const getTextPart: (chunk: ChunkStream) => string;
export { type Chunk, type ChunkStream, type Memory as MemoryI, type Message, type MessageRole, Pipe, type Pipe$1 as PipeI, type PipeOptions, type RunOptions, type RunOptionsStream, type RunResponse, type RunResponseStream, type Runner, type Tool as ToolI, type Usage, type Variable, fromReadableStream, generateText, getRunner, getTextContent, getTextDelta, getTextPart, isContent, isToolCall, isUnknown, printStreamToStdout, processChunk, streamText };

45

dist/index.js

@@ -20,9 +20,12 @@ "use strict";

// index.ts
var core_exports = {};
__export(core_exports, {
// src/index.ts
var src_exports = {};
__export(src_exports, {
Pipe: () => Pipe,
fromReadableStream: () => fromReadableStream,
generateText: () => generateText,
getRunner: () => getRunner,
getTextContent: () => getTextContent,
getTextDelta: () => getTextDelta,
getTextPart: () => getTextPart,
isContent: () => isContent,

@@ -35,8 +38,21 @@ isToolCall: () => isToolCall,

});
module.exports = __toCommonJS(core_exports);
module.exports = __toCommonJS(src_exports);
// common/request.ts
// src/helpers/stream.ts
var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream");
var fromReadableStream = (readableStream) => {
return import_ChatCompletionStream.ChatCompletionStream.fromReadableStream(readableStream);
};
var getRunner = (readableStream) => {
return fromReadableStream(readableStream);
};
var getTextPart = (chunk) => {
var _a, _b;
return ((_b = (_a = chunk.choices[0]) == null ? void 0 : _a.delta) == null ? void 0 : _b.content) || "";
};
// src/common/request.ts
var import_streaming = require("openai/streaming");
// common/errors.ts
// src/common/errors.ts
var APIError = class _APIError extends Error {

@@ -155,3 +171,3 @@ constructor(status, error, message, headers) {

// common/request.ts
// src/common/request.ts
var Request = class {

@@ -291,3 +307,3 @@ constructor(config) {

// data/models.ts
// src/data/models.ts
var OPEN_AI = "OpenAI";

@@ -681,3 +697,3 @@ var ANTHROPIC = "Anthropic";

// utils/get-llm-api-key.ts
// src/utils/get-llm-api-key.ts
function getLLMApiKey(modelProvider) {

@@ -708,3 +724,3 @@ switch (true) {

// utils/is-prod.ts
// src/utils/is-prod.ts
var FORCE_PROD = false;

@@ -720,3 +736,3 @@ var TEST_PROD_LOCALLY = FORCE_PROD;

// utils/to-old-pipe-format.ts
// src/utils/to-old-pipe-format.ts
function toOldPipeFormat(newFormat) {

@@ -774,3 +790,3 @@ const [providerString, modelName] = newFormat.model.split(":");

// utils/local-server-running.ts
// src/utils/local-server-running.ts
async function isLocalServerRunning() {

@@ -810,3 +826,3 @@ try {

// pipes/pipes.ts
// src/pipes/pipes.ts
var Pipe = class {

@@ -989,5 +1005,8 @@ constructor(options) {

Pipe,
fromReadableStream,
generateText,
getRunner,
getTextContent,
getTextDelta,
getTextPart,
isContent,

@@ -994,0 +1013,0 @@ isToolCall,

{
"name": "@baseai/core",
"description": "The Web AI Framework's core - BaseAI.dev",
"version": "0.9.15-snapshot.6",
"version": "0.9.15-snapshot.7",
"license": "Apache-2.0",

@@ -6,0 +6,0 @@ "sideEffects": false,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc