Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@baseai/core

Package Overview
Dependencies
Maintainers
0
Versions
92
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@baseai/core - npm Package Compare versions

Comparing version 0.9.17 to 0.9.18-snapshot.0

25

dist/index.d.ts

@@ -293,3 +293,26 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

declare const getTextPart: (chunk: ChunkStream) => string;
/**
* Handles the response stream from a given `Response` object.
*
* @param {Object} params - The parameters for handling the response stream.
* @param {Response} params.response - The API response to handle.
* @param {boolean} params.rawResponse - Optional flag to include raw response headers.
*
* @returns {Object} An object containing the processed stream, thread ID, and optionally raw response headers.
* @returns {ReadableStream<any>} return.stream - The readable stream created from the response.
* @returns {string | null} return.threadId - The thread ID extracted from the response headers.
* @returns {Object} [return.rawResponse] - Optional raw response headers.
* @returns {Record<string, string>} return.rawResponse.headers - The headers from the raw response.
*/
declare function handleResponseStream({ response, rawResponse, }: {
response: Response;
rawResponse?: boolean;
}): {
stream: any;
threadId: string | null;
rawResponse?: {
headers: Record<string, string>;
};
};
export { type Chunk, type ChunkStream, type Memory as MemoryI, type Message, type MessageRole, Pipe, type Pipe$1 as PipeI, type PipeOptions, type RunOptions, type RunOptionsStream, type RunResponse, type RunResponseStream, type Runner, type Tool as ToolI, type Usage, type Variable, fromReadableStream, generateText, getRunner, getTextContent, getTextDelta, getTextPart, isContent, isToolCall, isUnknown, printStreamToStdout, processChunk, streamText };
export { type Chunk, type ChunkStream, type Memory as MemoryI, type Message, type MessageRole, Pipe, type Pipe$1 as PipeI, type PipeOptions, type RunOptions, type RunOptionsStream, type RunResponse, type RunResponseStream, type Runner, type Tool as ToolI, type Usage, type Variable, fromReadableStream, generateText, getRunner, getTextContent, getTextDelta, getTextPart, handleResponseStream, isContent, isToolCall, isUnknown, printStreamToStdout, processChunk, streamText };

44

dist/index.js

@@ -30,2 +30,3 @@ "use strict";

getTextPart: () => getTextPart,
handleResponseStream: () => handleResponseStream,
isContent: () => isContent,

@@ -42,2 +43,3 @@ isToolCall: () => isToolCall,

var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream");
var import_streaming = require("openai/streaming");
var fromReadableStream = (readableStream) => {

@@ -53,6 +55,21 @@ return import_ChatCompletionStream.ChatCompletionStream.fromReadableStream(readableStream);

};
function handleResponseStream({
response,
rawResponse
}) {
const controller = new AbortController();
const streamSSE = import_streaming.Stream.fromSSEResponse(response, controller);
const stream = streamSSE.toReadableStream();
const result = {
stream,
threadId: response.headers.get("lb-thread-id")
};
if (rawResponse) {
result.rawResponse = {
headers: Object.fromEntries(response.headers.entries())
};
}
return result;
}
// src/common/request.ts
var import_streaming = require("openai/streaming");
// src/common/errors.ts

@@ -198,3 +215,3 @@ var APIError = class _APIError extends Error {

if ((_a = options.body) == null ? void 0 : _a.stream) {
return this.handleRunResponseStream({
return handleResponseStream({
response,

@@ -252,20 +269,2 @@ rawResponse: options.body.rawResponse

}
handleRunResponseStream({
response,
rawResponse
}) {
const controller = new AbortController();
const streamSSE = import_streaming.Stream.fromSSEResponse(response, controller);
const stream = streamSSE.toReadableStream();
const result = {
stream,
threadId: response.headers.get("lb-thread-id")
};
if (rawResponse) {
result.rawResponse = {
headers: Object.fromEntries(response.headers.entries())
};
}
return result;
}
async handleRunResponse({

@@ -1025,2 +1024,3 @@ response,

getTextPart,
handleResponseStream,
isContent,

@@ -1027,0 +1027,0 @@ isToolCall,

@@ -56,3 +56,26 @@ import { ChatCompletionStream } from 'openai/lib/ChatCompletionStream';

declare const getTextPart: (chunk: ChunkStream) => string;
/**
* Handles the response stream from a given `Response` object.
*
* @param {Object} params - The parameters for handling the response stream.
* @param {Response} params.response - The API response to handle.
* @param {boolean} params.rawResponse - Optional flag to include raw response headers.
*
* @returns {Object} An object containing the processed stream, thread ID, and optionally raw response headers.
* @returns {ReadableStream<any>} return.stream - The readable stream created from the response.
* @returns {string | null} return.threadId - The thread ID extracted from the response headers.
* @returns {Object} [return.rawResponse] - Optional raw response headers.
* @returns {Record<string, string>} return.rawResponse.headers - The headers from the raw response.
*/
declare function handleResponseStream({ response, rawResponse, }: {
response: Response;
rawResponse?: boolean;
}): {
stream: any;
threadId: string | null;
rawResponse?: {
headers: Record<string, string>;
};
};
export { type Runner, fromReadableStream, getRunner, getTextPart };
export { type Runner, fromReadableStream, getRunner, getTextPart, handleResponseStream };

@@ -25,3 +25,4 @@ "use strict";

getRunner: () => getRunner,
getTextPart: () => getTextPart
getTextPart: () => getTextPart,
handleResponseStream: () => handleResponseStream
});

@@ -32,2 +33,3 @@ module.exports = __toCommonJS(helpers_exports);

var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream");
var import_streaming = require("openai/streaming");
var fromReadableStream = (readableStream) => {

@@ -43,2 +45,20 @@ return import_ChatCompletionStream.ChatCompletionStream.fromReadableStream(readableStream);

};
function handleResponseStream({
response,
rawResponse
}) {
const controller = new AbortController();
const streamSSE = import_streaming.Stream.fromSSEResponse(response, controller);
const stream = streamSSE.toReadableStream();
const result = {
stream,
threadId: response.headers.get("lb-thread-id")
};
if (rawResponse) {
result.rawResponse = {
headers: Object.fromEntries(response.headers.entries())
};
}
return result;
}
// Annotate the CommonJS export names for ESM import in node:

@@ -48,4 +68,5 @@ 0 && (module.exports = {

getRunner,
getTextPart
getTextPart,
handleResponseStream
});
//# sourceMappingURL=index.js.map
{
"name": "@baseai/core",
"description": "The Web AI Framework's core - BaseAI.dev",
"version": "0.9.17",
"version": "0.9.18-snapshot.0",
"license": "Apache-2.0",

@@ -36,4 +36,4 @@ "sideEffects": false,

"vitest": "1.6.0",
"@baseai/eslint-config": "0.0.2",
"@baseai/tsconfig": "0.0.2"
"@baseai/tsconfig": "0.0.2",
"@baseai/eslint-config": "0.0.2"
},

@@ -40,0 +40,0 @@ "publishConfig": {

@@ -36,4 +36,23 @@ "use strict";

// src/common/request.ts
// src/helpers/stream.ts
var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream");
var import_streaming = require("openai/streaming");
function handleResponseStream({
response,
rawResponse
}) {
const controller = new AbortController();
const streamSSE = import_streaming.Stream.fromSSEResponse(response, controller);
const stream = streamSSE.toReadableStream();
const result = {
stream,
threadId: response.headers.get("lb-thread-id")
};
if (rawResponse) {
result.rawResponse = {
headers: Object.fromEntries(response.headers.entries())
};
}
return result;
}

@@ -180,3 +199,3 @@ // src/common/errors.ts

if ((_a = options.body) == null ? void 0 : _a.stream) {
return this.handleRunResponseStream({
return handleResponseStream({
response,

@@ -234,20 +253,2 @@ rawResponse: options.body.rawResponse

}
handleRunResponseStream({
response,
rawResponse
}) {
const controller = new AbortController();
const streamSSE = import_streaming.Stream.fromSSEResponse(response, controller);
const stream = streamSSE.toReadableStream();
const result = {
stream,
threadId: response.headers.get("lb-thread-id")
};
if (rawResponse) {
result.rawResponse = {
headers: Object.fromEntries(response.headers.entries())
};
}
return result;
}
async handleRunResponse({

@@ -254,0 +255,0 @@ response,

@@ -51,4 +51,5 @@ import React from 'react';

sendMessage: (content: string, options?: PipeRequestOptions) => Promise<void>;
setInput: React.Dispatch<React.SetStateAction<string>>;
};
export { usePipe };

@@ -34,2 +34,3 @@ 'use client'

var import_ChatCompletionStream = require("openai/lib/ChatCompletionStream");
var import_streaming = require("openai/streaming");
var fromReadableStream = (readableStream) => {

@@ -244,3 +245,4 @@ return import_ChatCompletionStream.ChatCompletionStream.fromReadableStream(readableStream);

threadId: threadIdRef.current,
sendMessage
sendMessage,
setInput
}),

@@ -247,0 +249,0 @@ [

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc