Socket
Socket
Sign inDemoInstall

@axflow/models

Package Overview
Dependencies
Maintainers
1
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@axflow/models - npm Package Compare versions

Comparing version 0.0.1-beta.1 to 0.0.1-beta.2

10

dist/anthropic/completion.d.ts

@@ -50,2 +50,3 @@ declare namespace AnthropicCompletionTypes {

declare function stream(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<AnthropicCompletionTypes.Chunk>>;
declare function streamTokens(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<string>>;
declare class AnthropicCompletion {

@@ -55,10 +56,5 @@ static run: typeof run;

static streamBytes: typeof streamBytes;
static streamTokens: typeof streamTokens;
}
declare class AnthropicCompletionDecoderStream extends TransformStream<Uint8Array, AnthropicCompletionTypes.Chunk> {
private static EVENT_LINES_RE;
private static parse;
private static transformer;
constructor();
}
export { AnthropicCompletion, AnthropicCompletionDecoderStream, AnthropicCompletionTypes };
export { AnthropicCompletion, AnthropicCompletionTypes };

@@ -23,4 +23,3 @@ "use strict";

__export(completion_exports, {
AnthropicCompletion: () => AnthropicCompletion,
AnthropicCompletionDecoderStream: () => AnthropicCompletionDecoderStream
AnthropicCompletion: () => AnthropicCompletion
});

@@ -62,6 +61,16 @@ module.exports = __toCommonJS(completion_exports);

}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new AnthropicCompletionDecoderStream());
return byteStream.pipeThrough(new AnthropicCompletionDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.event === "completion" ? chunk.data.completion : "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new AnthropicCompletionDecoderStream(chunkToToken));
}
var AnthropicCompletion = class {

@@ -71,2 +80,3 @@ static run = run;

static streamBytes = streamBytes;
static streamTokens = streamTokens;
};

@@ -92,3 +102,3 @@ var AnthropicCompletionDecoderStream = class _AnthropicCompletionDecoderStream extends TransformStream {

}
static transformer() {
static transformer(map) {
let buffer = [];

@@ -106,4 +116,7 @@ const decoder = new TextDecoder();

const event = _AnthropicCompletionDecoderStream.parse(buffer.join(""));
if (event) {
controller.enqueue(event);
if (event && event.event === "error") {
const error = event.data.error;
controller.error(`${error.type}: ${error.message}`);
} else if (event) {
controller.enqueue(map(event));
}

@@ -114,4 +127,4 @@ buffer = [];

}
constructor() {
super({ transform: _AnthropicCompletionDecoderStream.transformer() });
constructor(map) {
super({ transform: _AnthropicCompletionDecoderStream.transformer(map) });
}

@@ -121,4 +134,3 @@ };

0 && (module.exports = {
AnthropicCompletion,
AnthropicCompletionDecoderStream
AnthropicCompletion
});

@@ -63,2 +63,3 @@ type SharedRequestOptions = {

declare function stream(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<ReadableStream<CohereGenerationTypes.Chunk>>;
declare function streamTokens(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<ReadableStream<string>>;
declare class CohereGeneration {

@@ -68,9 +69,5 @@ static run: typeof run;

static streamBytes: typeof streamBytes;
static streamTokens: typeof streamTokens;
}
declare class CohereGenerationDecoderStream extends TransformStream<Uint8Array, CohereGenerationTypes.Chunk> {
private static parse;
private static transformer;
constructor();
}
export { CohereGeneration, CohereGenerationDecoderStream, CohereGenerationTypes };
export { CohereGeneration, CohereGenerationTypes };

@@ -23,4 +23,3 @@ "use strict";

__export(generation_exports, {
CohereGeneration: () => CohereGeneration,
CohereGenerationDecoderStream: () => CohereGenerationDecoderStream
CohereGeneration: () => CohereGeneration
});

@@ -65,6 +64,16 @@ module.exports = __toCommonJS(generation_exports);

}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new CohereGenerationDecoderStream());
return byteStream.pipeThrough(new CohereGenerationDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.text || "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new CohereGenerationDecoderStream(chunkToToken));
}
var CohereGeneration = class {

@@ -74,2 +83,3 @@ static run = run;

static streamBytes = streamBytes;
static streamTokens = streamTokens;
};

@@ -90,3 +100,3 @@ var CohereGenerationDecoderStream = class _CohereGenerationDecoderStream extends TransformStream {

}
static transformer() {
static transformer(map) {
let buffer = [];

@@ -104,3 +114,3 @@ const decoder = new TextDecoder();

if (event) {
controller.enqueue(event);
controller.enqueue(map(event));
}

@@ -111,4 +121,4 @@ buffer = [];

}
constructor() {
super({ transform: _CohereGenerationDecoderStream.transformer() });
constructor(map) {
super({ transform: _CohereGenerationDecoderStream.transformer(map) });
}

@@ -118,4 +128,3 @@ };

0 && (module.exports = {
CohereGeneration,
CohereGenerationDecoderStream
CohereGeneration
});

@@ -78,2 +78,3 @@ type SharedRequestOptions = {

declare function stream(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<ReadableStream<OpenAIChatTypes.Chunk>>;
declare function streamTokens(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<ReadableStream<string>>;
declare class OpenAIChat {

@@ -83,7 +84,5 @@ static run: typeof run;

static streamBytes: typeof streamBytes;
static streamTokens: typeof streamTokens;
}
declare class OpenAIChatDecoderStream extends TransformStream<Uint8Array, OpenAIChatTypes.Chunk> {
constructor();
}
export { OpenAIChat, OpenAIChatDecoderStream, OpenAIChatTypes };
export { OpenAIChat, OpenAIChatTypes };

@@ -23,4 +23,3 @@ "use strict";

__export(chat_exports, {
OpenAIChat: () => OpenAIChat,
OpenAIChatDecoderStream: () => OpenAIChatDecoderStream
OpenAIChat: () => OpenAIChat
});

@@ -41,3 +40,3 @@ module.exports = __toCommonJS(chat_exports);

}
function streamTransformer() {
function streamTransformer(map) {
let buffer = [];

@@ -55,3 +54,3 @@ const decoder = new TextDecoder();

if (parsedChunk) {
controller.enqueue(parsedChunk);
controller.enqueue(map(parsedChunk));
}

@@ -104,6 +103,16 @@ buffer = [];

}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAIChatDecoderStream());
return byteStream.pipeThrough(new OpenAIChatDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.choices[0].delta.content || "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAIChatDecoderStream(chunkToToken));
}
var OpenAIChat = class {

@@ -113,6 +122,7 @@ static run = run;

static streamBytes = streamBytes;
static streamTokens = streamTokens;
};
var OpenAIChatDecoderStream = class extends TransformStream {
constructor() {
super({ transform: streamTransformer() });
constructor(map) {
super({ transform: streamTransformer(map) });
}

@@ -122,4 +132,3 @@ };

0 && (module.exports = {
OpenAIChat,
OpenAIChatDecoderStream
OpenAIChat
});

@@ -60,2 +60,3 @@ type SharedRequestOptions = {

declare function stream(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<ReadableStream<OpenAICompletionTypes.Chunk>>;
declare function streamTokens(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<ReadableStream<string>>;
declare class OpenAICompletion {

@@ -65,7 +66,5 @@ static run: typeof run;

static streamBytes: typeof streamBytes;
static streamTokens: typeof streamTokens;
}
declare class OpenAICompletionDecoderStream extends TransformStream<Uint8Array, OpenAICompletionTypes.Chunk> {
constructor();
}
export { OpenAICompletion, OpenAICompletionDecoderStream, OpenAICompletionTypes };
export { OpenAICompletion, OpenAICompletionTypes };

@@ -23,4 +23,3 @@ "use strict";

__export(completion_exports, {
OpenAICompletion: () => OpenAICompletion,
OpenAICompletionDecoderStream: () => OpenAICompletionDecoderStream
OpenAICompletion: () => OpenAICompletion
});

@@ -41,3 +40,3 @@ module.exports = __toCommonJS(completion_exports);

}
function streamTransformer() {
function streamTransformer(map) {
let buffer = [];

@@ -55,3 +54,3 @@ const decoder = new TextDecoder();

if (parsedChunk) {
controller.enqueue(parsedChunk);
controller.enqueue(map(parsedChunk));
}

@@ -104,6 +103,16 @@ buffer = [];

}
function noop(chunk) {
return chunk;
}
async function stream(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAICompletionDecoderStream());
return byteStream.pipeThrough(new OpenAICompletionDecoderStream(noop));
}
function chunkToToken(chunk) {
return chunk.choices[0].text || "";
}
async function streamTokens(request, options) {
const byteStream = await streamBytes(request, options);
return byteStream.pipeThrough(new OpenAICompletionDecoderStream(chunkToToken));
}
var OpenAICompletion = class {

@@ -113,6 +122,7 @@ static run = run;

static streamBytes = streamBytes;
static streamTokens = streamTokens;
};
var OpenAICompletionDecoderStream = class extends TransformStream {
constructor() {
super({ transform: streamTransformer() });
constructor(map) {
super({ transform: streamTransformer(map) });
}

@@ -122,4 +132,3 @@ };

0 && (module.exports = {
OpenAICompletion,
OpenAICompletionDecoderStream
OpenAICompletion
});

@@ -23,5 +23,5 @@ import { MessageType, JSONValueType } from '@axflow/models/shared';

* If given an object, the object will be merged into the request body with the
* new message and the message history, e.g., `{...body, message, history }`.
* full set of messages, i.e., `{...body, messages }`.
*
* By default, the request body is `{ message, history }`.
* By default, the request body is `{ messages }`.
*/

@@ -28,0 +28,0 @@ body?: BodyType;

@@ -98,6 +98,8 @@ "use strict";

var DEFAULT_ACCESSOR = (value) => value;
var DEFAULT_BODY = (message, history) => ({ message, history });
var DEFAULT_BODY = (message, history) => ({
messages: [...history, message]
});
var DEFAULT_HEADERS = {};
function useChat(options) {
options = options ?? {};
options ??= {};
const [input, setInput] = (0, import_react.useState)(options.initialInput ?? "");

@@ -113,6 +115,6 @@ const [messages, _setMessages] = (0, import_react.useState)(options.initialMessages ?? []);

);
const url = options.url || DEFAULT_URL;
const accessor = options.accessor || DEFAULT_ACCESSOR;
const body = options.body || DEFAULT_BODY;
const headers = options.headers || DEFAULT_HEADERS;
const url = options.url ?? DEFAULT_URL;
const accessor = options.accessor ?? DEFAULT_ACCESSOR;
const body = options.body ?? DEFAULT_BODY;
const headers = options.headers ?? DEFAULT_HEADERS;
function append(message) {

@@ -119,0 +121,0 @@ stableAppend(message, messagesRef, setMessages, url, headers, body, accessor);

@@ -99,3 +99,2 @@ declare class HttpError extends Error {

* @param options
* @param options.map A function to map input chunks to output chunks. The return value must be either a JSON-serializable object or a Promise that resolves to a JSON-serializable object.
* @param options.data Additional data to prepend to the output stream.

@@ -105,3 +104,2 @@ * @returns A readable stream of newline-delimited JSON.

static encode<T = any>(stream: ReadableStream<T>, options?: {
map?: (value: T) => JSONValueType | Promise<JSONValueType>;
data?: JSONValueType[];

@@ -120,30 +118,33 @@ }): ReadableStream<Uint8Array>;

/**
* Returns a `Response` object that streams newline-delimited JSON objects.
*
* Example
*
* export async function POST(request: Request) {
* const req = await request.json();
* const stream = await OpenAIChat.stream(req, { apiKey: OPENAI_API_KEY });
* return StreamingJsonResponse(stream, {
* map: (chunk) => chunk.choices[0].delta.content ?? ''
* });
* }
*
* @see http://ndjson.org
*
* @param stream A readable stream of chunks to encode as newline-delimited JSON.
* @param options
* @param options.status HTTP response status.
* @param options.statusText HTTP response status text.
* @param options.headers HTTP response headers.
* @param options.map A function to map input chunks to output chunks. The return value must be either a JSON-serializable object or a Promise that resolves to a JSON-serializable object.
* @param options.data Additional data to prepend to the output stream.
* @returns A `Response` object that streams newline-delimited JSON objects.
* A subclass of `Response` that streams newline-delimited JSON.
*/
declare function StreamingJsonResponse<T>(stream: ReadableStream<T>, options?: ResponseInit & {
map?: (value: T) => JSONValueType | Promise<JSONValueType>;
data?: JSONValueType[];
}): Response;
declare class StreamingJsonResponse<T> extends Response {
/**
* Create a `Response` object that streams newline-delimited JSON objects.
*
* Example
*
* export async function POST(request: Request) {
* const req = await request.json();
* const stream = await OpenAIChat.stream(req, { apiKey: OPENAI_API_KEY });
* return new StreamingJsonResponse(stream, {
* map: (chunk) => chunk.choices[0].delta.content ?? ''
* data: [{ stream: "additional" }, { data: "here" }]
* });
* }
*
* @see http://ndjson.org
*
* @param stream A readable stream of chunks to encode as newline-delimited JSON.
* @param options
* @param options.status HTTP response status.
* @param options.statusText HTTP response status text.
* @param options.headers HTTP response headers.
* @param options.data Additional data to prepend to the output stream.
*/
constructor(stream: ReadableStream<T>, options?: ResponseInit & {
data?: JSONValueType[];
});
}
export { HttpError, JSONValueType, MessageType, NdJsonStream, NdJsonValueType, POST, StreamToIterable, StreamingJsonResponse, isHttpError };

@@ -79,5 +79,2 @@ "use strict";

}
function identity(value) {
return value;
}
var NdJsonStream = class {

@@ -127,3 +124,2 @@ /**

* @param options
* @param options.map A function to map input chunks to output chunks. The return value must be either a JSON-serializable object or a Promise that resolves to a JSON-serializable object.
* @param options.data Additional data to prepend to the output stream.

@@ -133,5 +129,3 @@ * @returns A readable stream of newline-delimited JSON.

static encode(stream, options) {
options = options || {};
const map = options.map || identity;
const data = options.data || [];
const data = options?.data ?? [];
const encoder = new TextEncoder();

@@ -149,4 +143,3 @@ function serialize(obj) {

},
async transform(chunk, controller) {
const value = await Promise.resolve(map(chunk));
async transform(value, controller) {
controller.enqueue(serialize({ type: "chunk", value }));

@@ -186,11 +179,38 @@ }

};
function StreamingJsonResponse(stream, options) {
options = options ?? {};
const ndjson = NdJsonStream.encode(stream, { map: options.map, data: options.data });
return new Response(ndjson, {
status: options.status,
statusText: options.statusText,
headers: { ...options.headers, ...NdJsonStream.headers }
});
}
var StreamingJsonResponse = class extends Response {
/**
* Create a `Response` object that streams newline-delimited JSON objects.
*
* Example
*
* export async function POST(request: Request) {
* const req = await request.json();
* const stream = await OpenAIChat.stream(req, { apiKey: OPENAI_API_KEY });
* return new StreamingJsonResponse(stream, {
* map: (chunk) => chunk.choices[0].delta.content ?? ''
* data: [{ stream: "additional" }, { data: "here" }]
* });
* }
*
* @see http://ndjson.org
*
* @param stream A readable stream of chunks to encode as newline-delimited JSON.
* @param options
* @param options.status HTTP response status.
* @param options.statusText HTTP response status text.
* @param options.headers HTTP response headers.
* @param options.data Additional data to prepend to the output stream.
*/
constructor(stream, options) {
options ??= {};
const ndjson = NdJsonStream.encode(stream, {
data: options.data
});
super(ndjson, {
status: options.status,
statusText: options.statusText,
headers: { ...options.headers, ...NdJsonStream.headers }
});
}
};
// Annotate the CommonJS export names for ESM import in node:

@@ -197,0 +217,0 @@ 0 && (module.exports = {

{
"name": "@axflow/models",
"version": "0.0.1-beta.1",
"version": "0.0.1-beta.2",
"description": "Zero-dependency module to run, stream, and render results across the most popular LLMs and embedding models",

@@ -142,3 +142,3 @@ "author": "Axilla (https://axilla.io)",

},
"gitHead": "58f70b0f492bbcc1a577527687fd956e5b6fa4ab"
"gitHead": "6e7c2a345c3db43f64f750783abeb85d31ee6885"
}

@@ -11,3 +11,4 @@ # @axflow/models

* Zero-dependency, lightweight package to consume all the most popular LLMs, embedding models, and more
* Zero-dependency, modular package to consume all the most popular LLMs, embedding models, and more
* Comes with a set of React hooks for easily creating robust completion and chat components
* Built exclusively on modern web standards such as `fetch` and the stream APIs

@@ -66,2 +67,28 @@ * First-class streaming support with both low-level byte streams or higher-level JavaScript objects

For models that support streaming, there is a convenience method for streaming only the string tokens.
```ts
import {OpenAIChat} from '@axflow/models/openai/chat';
const tokenStream = OpenAIChat.streamTokens(
{
model: 'gpt-4',
messages: [{ role: 'user', content: 'What is the Eiffel tower?' }],
},
{
apiKey: '<openai api key>',
},
);
// Example stdout output:
//
// The Eiffel Tower is a renowned wrought-iron landmark located in Paris, France, known globally as a symbol of romance and elegance.
//
for await (const token of tokenStream) {
process.stdout.write(token);
}
process.stdout.write("\n");
```
## Next.js edge proxy example

@@ -120,3 +147,3 @@

```ts
import {OpenAIChat, OpenAIChatDecoderStream} from '@axflow/models/openai/chat';
import {OpenAIChat} from '@axflow/models/openai/chat';
import type {OpenAIChatTypes} from '@axflow/models/openai/chat';

@@ -127,2 +154,3 @@

OpenAIChat.streamBytes(/* args */)
OpenAIChat.streamTokens(/* args */)
```

@@ -133,3 +161,3 @@

```ts
import {OpenAICompletion, OpenAICompletionDecoderStream} from '@axflow/models/openai/completion';
import {OpenAICompletion} from '@axflow/models/openai/completion';
import type {OpenAICompletionTypes} from '@axflow/models/openai/completion';

@@ -140,2 +168,3 @@

OpenAICompletion.streamBytes(/* args */)
OpenAICompletion.streamTokens(/* args */)
```

@@ -155,3 +184,3 @@

```ts
import {CohereGeneration, CohereGenerationDecoderStream} from '@axflow/models/cohere/generation';
import {CohereGeneration} from '@axflow/models/cohere/generation';
import type {CohereGenerationTypes} from '@axflow/models/cohere/generation';

@@ -162,2 +191,3 @@

CohereGeneration.streamBytes(/* args */)
CohereGeneration.streamTokens(/* args */)
```

@@ -177,3 +207,3 @@

```ts
import {AnthropicCompletion, AnthropicCompletionDecoderStream} from '@axflow/models/anthropic/completion';
import {AnthropicCompletion} from '@axflow/models/anthropic/completion';
import type {AnthropicCompletionTypes} from '@axflow/models/anthropic/completion';

@@ -184,4 +214,14 @@

AnthropicCompletion.streamBytes(/* args */)
AnthropicCompletion.streamTokens(/* args */)
```
### @axflow/models/react
```ts
import {useChat} from '@axflow/models/react';
import type {UseChatOptionsType, UseChatResultType} from '@axflow/models/shared';
```
`useChat` is a react hook that makes building chat componets a breeze.
### @axflow/models/shared

@@ -188,0 +228,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc