@axflow/models
Advanced tools
Comparing version 0.0.1-alpha.4 to 0.0.1-alpha.5
@@ -1,2 +0,2 @@ | ||
declare namespace AnthropicTypes { | ||
declare namespace AnthropicCompletionTypes { | ||
export type Request = { | ||
@@ -47,5 +47,5 @@ model: string; | ||
} | ||
declare function run(request: AnthropicTypes.Request, options: AnthropicTypes.RequestOptions): Promise<AnthropicTypes.Response>; | ||
declare function streamBytes(request: AnthropicTypes.Request, options: AnthropicTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>; | ||
declare function stream(request: AnthropicTypes.Request, options: AnthropicTypes.RequestOptions): Promise<ReadableStream<AnthropicTypes.Chunk>>; | ||
declare function run(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<AnthropicCompletionTypes.Response>; | ||
declare function streamBytes(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>; | ||
declare function stream(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<AnthropicCompletionTypes.Chunk>>; | ||
declare class AnthropicCompletion { | ||
@@ -56,3 +56,9 @@ static run: typeof run; | ||
} | ||
declare class AnthropicCompletionDecoderStream extends TransformStream<Uint8Array, AnthropicCompletionTypes.Chunk> { | ||
private static EVENT_LINES_RE; | ||
private static parse; | ||
private static transformer; | ||
constructor(); | ||
} | ||
export { AnthropicCompletion, AnthropicTypes, run, stream, streamBytes }; | ||
export { AnthropicCompletion, AnthropicCompletionDecoderStream, AnthropicCompletionTypes }; |
@@ -24,5 +24,3 @@ "use strict"; | ||
AnthropicCompletion: () => AnthropicCompletion, | ||
run: () => run, | ||
stream: () => stream, | ||
streamBytes: () => streamBytes | ||
AnthropicCompletionDecoderStream: () => AnthropicCompletionDecoderStream | ||
}); | ||
@@ -66,3 +64,3 @@ module.exports = __toCommonJS(completion_exports); | ||
const byteStream = await streamBytes(request, options); | ||
return byteStream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventDecoderStream()); | ||
return byteStream.pipeThrough(new AnthropicCompletionDecoderStream()); | ||
} | ||
@@ -74,3 +72,3 @@ var AnthropicCompletion = class { | ||
}; | ||
var EventDecoderStream = class _EventDecoderStream extends TransformStream { | ||
var AnthropicCompletionDecoderStream = class _AnthropicCompletionDecoderStream extends TransformStream { | ||
static EVENT_LINES_RE = /^event:\s*(\w+)\r\ndata:\s*(.+)$/; | ||
@@ -82,3 +80,3 @@ static parse(lines) { | ||
} | ||
const match = lines.match(_EventDecoderStream.EVENT_LINES_RE); | ||
const match = lines.match(_AnthropicCompletionDecoderStream.EVENT_LINES_RE); | ||
try { | ||
@@ -97,3 +95,5 @@ const event = match[1]; | ||
let buffer = []; | ||
return (chunk, controller) => { | ||
const decoder = new TextDecoder(); | ||
return (bytes, controller) => { | ||
const chunk = decoder.decode(bytes); | ||
for (let i = 0, len = chunk.length; i < len; ++i) { | ||
@@ -106,3 +106,3 @@ const bufferLen = buffer.length; | ||
} | ||
const event = _EventDecoderStream.parse(buffer.join("")); | ||
const event = _AnthropicCompletionDecoderStream.parse(buffer.join("")); | ||
if (event) { | ||
@@ -116,3 +116,3 @@ controller.enqueue(event); | ||
constructor() { | ||
super({ transform: _EventDecoderStream.transformer() }); | ||
super({ transform: _AnthropicCompletionDecoderStream.transformer() }); | ||
} | ||
@@ -123,5 +123,3 @@ }; | ||
AnthropicCompletion, | ||
run, | ||
stream, | ||
streamBytes | ||
AnthropicCompletionDecoderStream | ||
}); |
@@ -33,2 +33,2 @@ type SharedRequestOptions = { | ||
export { CohereEmbedding, CohereEmbeddingTypes, run }; | ||
export { CohereEmbedding, CohereEmbeddingTypes }; |
@@ -23,4 +23,3 @@ "use strict"; | ||
__export(embedding_exports, { | ||
CohereEmbedding: () => CohereEmbedding, | ||
run: () => run | ||
CohereEmbedding: () => CohereEmbedding | ||
}); | ||
@@ -58,4 +57,3 @@ module.exports = __toCommonJS(embedding_exports); | ||
0 && (module.exports = { | ||
CohereEmbedding, | ||
run | ||
CohereEmbedding | ||
}); |
@@ -68,3 +68,8 @@ type SharedRequestOptions = { | ||
} | ||
declare class CohereGenerationDecoderStream extends TransformStream<Uint8Array, CohereGenerationTypes.Chunk> { | ||
private static parse; | ||
private static transformer; | ||
constructor(); | ||
} | ||
export { CohereGeneration, CohereGenerationTypes, run, stream, streamBytes }; | ||
export { CohereGeneration, CohereGenerationDecoderStream, CohereGenerationTypes }; |
@@ -24,5 +24,3 @@ "use strict"; | ||
CohereGeneration: () => CohereGeneration, | ||
run: () => run, | ||
stream: () => stream, | ||
streamBytes: () => streamBytes | ||
CohereGenerationDecoderStream: () => CohereGenerationDecoderStream | ||
}); | ||
@@ -69,3 +67,3 @@ module.exports = __toCommonJS(generation_exports); | ||
const byteStream = await streamBytes(request, options); | ||
return byteStream.pipeThrough(new TextDecoderStream()).pipeThrough(new EventDecoderStream()); | ||
return byteStream.pipeThrough(new CohereGenerationDecoderStream()); | ||
} | ||
@@ -77,3 +75,3 @@ var CohereGeneration = class { | ||
}; | ||
var EventDecoderStream = class _EventDecoderStream extends TransformStream { | ||
var CohereGenerationDecoderStream = class _CohereGenerationDecoderStream extends TransformStream { | ||
static parse(line) { | ||
@@ -94,3 +92,5 @@ line = line.trim(); | ||
let buffer = []; | ||
return (chunk, controller) => { | ||
const decoder = new TextDecoder(); | ||
return (bytes, controller) => { | ||
const chunk = decoder.decode(bytes); | ||
for (let i = 0, len = chunk.length; i < len; ++i) { | ||
@@ -102,3 +102,3 @@ const isEventSeparator = chunk[i] === "\n"; | ||
} | ||
const event = _EventDecoderStream.parse(buffer.join("")); | ||
const event = _CohereGenerationDecoderStream.parse(buffer.join("")); | ||
if (event) { | ||
@@ -112,3 +112,3 @@ controller.enqueue(event); | ||
constructor() { | ||
super({ transform: _EventDecoderStream.transformer() }); | ||
super({ transform: _CohereGenerationDecoderStream.transformer() }); | ||
} | ||
@@ -119,5 +119,3 @@ }; | ||
CohereGeneration, | ||
run, | ||
stream, | ||
streamBytes | ||
CohereGenerationDecoderStream | ||
}); |
@@ -83,3 +83,6 @@ type SharedRequestOptions = { | ||
} | ||
declare class OpenAIChatDecoderStream extends TransformStream<Uint8Array, OpenAIChatTypes.Chunk> { | ||
constructor(); | ||
} | ||
export { OpenAIChat, OpenAIChatTypes, run, stream, streamBytes }; | ||
export { OpenAIChat, OpenAIChatDecoderStream, OpenAIChatTypes }; |
@@ -24,5 +24,3 @@ "use strict"; | ||
OpenAIChat: () => OpenAIChat, | ||
run: () => run, | ||
stream: () => stream, | ||
streamBytes: () => streamBytes | ||
OpenAIChatDecoderStream: () => OpenAIChatDecoderStream | ||
}); | ||
@@ -45,3 +43,5 @@ module.exports = __toCommonJS(chat_exports); | ||
let buffer = []; | ||
return (chunk, controller) => { | ||
const decoder = new TextDecoder(); | ||
return (bytes, controller) => { | ||
const chunk = decoder.decode(bytes); | ||
for (let i = 0, len = chunk.length; i < len; ++i) { | ||
@@ -105,6 +105,3 @@ const isChunkSeparator = chunk[i] === "\n" && buffer[buffer.length - 1] === "\n"; | ||
const byteStream = await streamBytes(request, options); | ||
const chunkTransformerStream = new TransformStream({ | ||
transform: streamTransformer() | ||
}); | ||
return byteStream.pipeThrough(new TextDecoderStream()).pipeThrough(chunkTransformerStream); | ||
return byteStream.pipeThrough(new OpenAIChatDecoderStream()); | ||
} | ||
@@ -116,8 +113,11 @@ var OpenAIChat = class { | ||
}; | ||
var OpenAIChatDecoderStream = class extends TransformStream { | ||
constructor() { | ||
super({ transform: streamTransformer() }); | ||
} | ||
}; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
OpenAIChat, | ||
run, | ||
stream, | ||
streamBytes | ||
OpenAIChatDecoderStream | ||
}); |
@@ -65,3 +65,6 @@ type SharedRequestOptions = { | ||
} | ||
declare class OpenAICompletionDecoderStream extends TransformStream<Uint8Array, OpenAICompletionTypes.Chunk> { | ||
constructor(); | ||
} | ||
export { OpenAICompletion, OpenAICompletionTypes, run, stream, streamBytes }; | ||
export { OpenAICompletion, OpenAICompletionDecoderStream, OpenAICompletionTypes }; |
@@ -24,5 +24,3 @@ "use strict"; | ||
OpenAICompletion: () => OpenAICompletion, | ||
run: () => run, | ||
stream: () => stream, | ||
streamBytes: () => streamBytes | ||
OpenAICompletionDecoderStream: () => OpenAICompletionDecoderStream | ||
}); | ||
@@ -45,3 +43,5 @@ module.exports = __toCommonJS(completion_exports); | ||
let buffer = []; | ||
return (chunk, controller) => { | ||
const decoder = new TextDecoder(); | ||
return (bytes, controller) => { | ||
const chunk = decoder.decode(bytes); | ||
for (let i = 0, len = chunk.length; i < len; ++i) { | ||
@@ -105,6 +105,3 @@ const isChunkSeparator = chunk[i] === "\n" && buffer[buffer.length - 1] === "\n"; | ||
const byteStream = await streamBytes(request, options); | ||
const chunkTransformerStream = new TransformStream({ | ||
transform: streamTransformer() | ||
}); | ||
return byteStream.pipeThrough(new TextDecoderStream()).pipeThrough(chunkTransformerStream); | ||
return byteStream.pipeThrough(new OpenAICompletionDecoderStream()); | ||
} | ||
@@ -116,8 +113,11 @@ var OpenAICompletion = class { | ||
}; | ||
var OpenAICompletionDecoderStream = class extends TransformStream { | ||
constructor() { | ||
super({ transform: streamTransformer() }); | ||
} | ||
}; | ||
// Annotate the CommonJS export names for ESM import in node: | ||
0 && (module.exports = { | ||
OpenAICompletion, | ||
run, | ||
stream, | ||
streamBytes | ||
OpenAICompletionDecoderStream | ||
}); |
@@ -34,2 +34,2 @@ type SharedRequestOptions = { | ||
export { OpenAIEmbedding, OpenAIEmbeddingTypes, run }; | ||
export { OpenAIEmbedding, OpenAIEmbeddingTypes }; |
@@ -23,4 +23,3 @@ "use strict"; | ||
__export(embedding_exports, { | ||
OpenAIEmbedding: () => OpenAIEmbedding, | ||
run: () => run | ||
OpenAIEmbedding: () => OpenAIEmbedding | ||
}); | ||
@@ -58,4 +57,3 @@ module.exports = __toCommonJS(embedding_exports); | ||
0 && (module.exports = { | ||
OpenAIEmbedding, | ||
run | ||
OpenAIEmbedding | ||
}); |
{ | ||
"name": "@axflow/models", | ||
"version": "0.0.1-alpha.4", | ||
"version": "0.0.1-alpha.5", | ||
"description": "Zero-dependency module to run, stream, and render results across the most popular LLMs and embedding models", | ||
@@ -123,3 +123,3 @@ "author": "Axilla (https://axilla.io)", | ||
}, | ||
"gitHead": "ad74308aa99d91c2b2736415cfe4ed497b13dac2" | ||
"gitHead": "df6d224ca97e68b5c0e2856a00f5a4a5329fe372" | ||
} |
@@ -81,4 +81,3 @@ # @axflow/models | ||
// Byte stream is more efficient here because we do not parse the stream and | ||
// re-encode it, but rather pass the bytes directly through to the client. | ||
// We'll stream the bytes from OpenAI directly to the client | ||
const stream = await OpenAIChat.streamBytes(chatRequest, { | ||
@@ -94,2 +93,4 @@ apiKey: process.env.OPENAI_API_KEY!, | ||
*DO NOT expose api keys to your frontend.* | ||
```ts | ||
@@ -119,3 +120,4 @@ import { OpenAIChat } from '@axflow/models/openai/chat'; | ||
```ts | ||
import {OpenAIChat} from '@axflow/models/openai/chat'; | ||
import {OpenAIChat, OpenAIChatDecoderStream} from '@axflow/models/openai/chat'; | ||
import type {OpenAIChatTypes} from '@axflow/models/openai/chat'; | ||
@@ -130,3 +132,4 @@ OpenAIChat.run(/* args */) | ||
```ts | ||
import {OpenAICompletion} from '@axflow/models/openai/completion'; | ||
import {OpenAICompletion, OpenAICompletionDecoderStream} from '@axflow/models/openai/completion'; | ||
import type {OpenAICompletionTypes} from '@axflow/models/openai/completion'; | ||
@@ -142,2 +145,3 @@ OpenAICompletion.run(/* args */) | ||
import {OpenAIEmbedding} from '@axflow/models/openai/embedding'; | ||
import type {OpenAIEmbeddingTypes} from '@axflow/models/openai/embedding'; | ||
@@ -150,3 +154,4 @@ OpenAIEmbedding.run(/* args */) | ||
```ts | ||
import {CohereGeneration} from '@axflow/models/cohere/generation'; | ||
import {CohereGeneration, CohereGenerationDecoderStream} from '@axflow/models/cohere/generation'; | ||
import type {CohereGenerationTypes} from '@axflow/models/cohere/generation'; | ||
@@ -162,2 +167,3 @@ CohereGeneration.run(/* args */) | ||
import {CohereEmbedding} from '@axflow/models/cohere/embedding'; | ||
import type {CohereEmbeddingTypes} from '@axflow/models/cohere/embedding'; | ||
@@ -170,3 +176,4 @@ CohereEmbedding.run(/* args */) | ||
```ts | ||
import {AnthropicCompletion} from '@axflow/models/anthropic/completion'; | ||
import {AnthropicCompletion, AnthropicCompletionDecoderStream} from '@axflow/models/anthropic/completion'; | ||
import type {AnthropicCompletionTypes} from '@axflow/models/anthropic/completion'; | ||
@@ -173,0 +180,0 @@ AnthropicCompletion.run(/* args */) |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
68361
1478
182