Socket
Socket
Sign inDemoInstall

@axflow/models

Package Overview
Dependencies
Maintainers
1
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@axflow/models - npm Package Compare versions

Comparing version 0.0.1-beta.2 to 0.0.1-beta.3

58

dist/anthropic/completion.d.ts

@@ -47,6 +47,64 @@ declare namespace AnthropicCompletionTypes {

}
/**
* Run a completion against the Anthropic API.
*
* @see https://docs.anthropic.com/claude/reference/complete_post
*
* @param request The request body sent to Anthropic. See Anthropic's documentation for /v1/complete for supported parameters.
* @param options
* @param options.apiKey Anthropic API key.
* @param options.apiUrl The url of the Anthropic (or compatible) API. Defaults to https://api.anthropic.com/v1/complete.
* @param options.version The Anthropic API version. Defaults to 2023-06-01. Note that older versions are not currently supported.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns Anthropic completion. See Anthropic's documentation for /v1/complete.
*/
declare function run(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<AnthropicCompletionTypes.Response>;
/**
* Run a streaming completion against the Anthropic API. The resulting stream is the raw unmodified bytes from the API.
*
* @see https://docs.anthropic.com/claude/reference/complete_post
* @see https://docs.anthropic.com/claude/reference/streaming
*
* @param request The request body sent to Anthropic. See Anthropic's documentation for /v1/complete for supported parameters.
* @param options
* @param options.apiKey Anthropic API key.
* @param options.apiUrl The url of the Anthropic (or compatible) API. Defaults to https://api.anthropic.com/v1/complete.
* @param options.version The Anthropic API version. Defaults to 2023-06-01. Note that older versions are not currently supported.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of bytes directly from the API.
*/
declare function streamBytes(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>;
/**
* Run a streaming completion against the Anthropic API. The resulting stream is the parsed stream data as JavaScript objects.
*
* @see https://docs.anthropic.com/claude/reference/complete_post
* @see https://docs.anthropic.com/claude/reference/streaming
*
* @param request The request body sent to Anthropic. See Anthropic's documentation for /v1/complete for supported parameters.
* @param options
* @param options.apiKey Anthropic API key.
* @param options.apiUrl The url of the Anthropic (or compatible) API. Defaults to https://api.anthropic.com/v1/complete.
* @param options.version The Anthropic API version. Defaults to 2023-06-01. Note that older versions are not currently supported.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of objects representing each chunk from the API.
*/
declare function stream(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<AnthropicCompletionTypes.Chunk>>;
/**
* Run a streaming completion against the Anthropic API. The resulting stream emits only the string tokens.
*
* @see https://docs.anthropic.com/claude/reference/complete_post
* @see https://docs.anthropic.com/claude/reference/streaming
*
* @param request The request body sent to Anthropic. See Anthropic's documentation for /v1/complete for supported parameters.
* @param options
* @param options.apiKey Anthropic API key.
* @param options.apiUrl The url of the Anthropic (or compatible) API. Defaults to https://api.anthropic.com/v1/complete.
* @param options.version The Anthropic API version. Defaults to 2023-06-01. Note that older versions are not currently supported.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of tokens from the API.
*/
declare function streamTokens(request: AnthropicCompletionTypes.Request, options: AnthropicCompletionTypes.RequestOptions): Promise<ReadableStream<string>>;
/**
* An object that encapsulates methods for calling the Anthropic Completion API.
*/
declare class AnthropicCompletion {

@@ -53,0 +111,0 @@ static run: typeof run;

@@ -28,3 +28,18 @@ type SharedRequestOptions = {

}
/**
* Calculate text embeddings using the Cohere API.
*
* @see https://docs.cohere.com/reference/embed
*
* @param request The request body sent to Cohere. See Cohere's documentation for /v1/embed for supported parameters.
* @param options
* @param options.apiKey Cohere API key.
* @param options.apiUrl The url of the Cohere (or compatible) API. Defaults to https://api.cohere.ai/v1/embed.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns An object consisting of the text embeddings and other metadata. See Cohere's documentation for /v1/embed.
*/
declare function run(request: CohereEmbeddingTypes.Request, options: CohereEmbeddingTypes.RequestOptions): Promise<CohereEmbeddingTypes.Response>;
/**
* An object that encapsulates methods for calling the Cohere Embed API.
*/
declare class CohereEmbedding {

@@ -31,0 +46,0 @@ static run: typeof run;

@@ -60,6 +60,57 @@ type SharedRequestOptions = {

}
/**
* Run a generation against the Cohere API.
*
* @see https://docs.cohere.com/reference/generate
*
* @param request The request body sent to Cohere. See Cohere's documentation for /v1/generate for supported parameters.
* @param options
* @param options.apiKey Cohere API key.
* @param options.apiUrl The url of the Cohere (or compatible) API. Defaults to https://api.cohere.ai/v1/generate.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns Cohere completion. See Cohere's documentation for /v1/generate.
*/
declare function run(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<CohereGenerationTypes.Response>;
/**
* Run a streaming generation against the Cohere API. The resulting stream is the raw unmodified bytes from the API.
*
* @see https://docs.cohere.com/reference/generate
*
* @param request The request body sent to Cohere. See Cohere's documentation for /v1/generate for supported parameters.
* @param options
* @param options.apiKey Cohere API key.
* @param options.apiUrl The url of the Cohere (or compatible) API. Defaults to https://api.cohere.ai/v1/generate.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of bytes directly from the API.
*/
declare function streamBytes(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>;
/**
* Run a streaming generation against the Cohere API. The resulting stream is the parsed stream data as JavaScript objects.
*
* @see https://docs.cohere.com/reference/generate
*
* @param request The request body sent to Cohere. See Cohere's documentation for /v1/generate for supported parameters.
* @param options
* @param options.apiKey Cohere API key.
* @param options.apiUrl The url of the Cohere (or compatible) API. Defaults to https://api.cohere.ai/v1/generate.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of objects representing each chunk from the API.
*/
declare function stream(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<ReadableStream<CohereGenerationTypes.Chunk>>;
/**
* Run a streaming generation against the Cohere API. The resulting stream emits only the string tokens.
*
* @see https://docs.cohere.com/reference/generate
*
* @param request The request body sent to Cohere. See Cohere's documentation for /v1/generate for supported parameters.
* @param options
* @param options.apiKey Cohere API key.
* @param options.apiUrl The url of the Cohere (or compatible) API. Defaults to https://api.cohere.ai/v1/generate.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of tokens from the API.
*/
declare function streamTokens(request: CohereGenerationTypes.Request, options: CohereGenerationTypes.RequestOptions): Promise<ReadableStream<string>>;
/**
* An object that encapsulates methods for calling the Cohere Generate API.
*/
declare class CohereGeneration {

@@ -66,0 +117,0 @@ static run: typeof run;

@@ -75,6 +75,57 @@ type SharedRequestOptions = {

}
/**
* Run a chat completion against the OpenAI API.
*
* @see https://platform.openai.com/docs/api-reference/chat
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/chat/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/chat/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns OpenAI chat completion. See OpenAI's documentation for /v1/chat/completions.
*/
declare function run(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<OpenAIChatTypes.Response>;
/**
* Run a streaming chat completion against the OpenAI API. The resulting stream is the raw unmodified bytes from the API.
*
* @see https://platform.openai.com/docs/api-reference/chat
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/chat/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/chat/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of bytes directly from the API.
*/
declare function streamBytes(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>;
/**
* Run a streaming chat completion against the OpenAI API. The resulting stream is the parsed stream data as JavaScript objects.
*
* @see https://platform.openai.com/docs/api-reference/chat
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/chat/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/chat/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of objects representing each chunk from the API.
*/
declare function stream(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<ReadableStream<OpenAIChatTypes.Chunk>>;
/**
* Run a streaming chat completion against the OpenAI API. The resulting stream emits only the string tokens.
*
* @see https://platform.openai.com/docs/api-reference/chat
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/chat/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/chat/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of tokens from the API.
*/
declare function streamTokens(request: OpenAIChatTypes.Request, options: OpenAIChatTypes.RequestOptions): Promise<ReadableStream<string>>;
/**
* An object that encapsulates methods for calling the OpenAI Chat Completion API.
*/
declare class OpenAIChat {

@@ -81,0 +132,0 @@ static run: typeof run;

@@ -57,6 +57,57 @@ type SharedRequestOptions = {

}
/**
* Run a completion against the OpenAI API.
*
* @see https://platform.openai.com/docs/api-reference/completions
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns OpenAI completion. See OpenAI's documentation for /v1/completions.
*/
declare function run(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<OpenAICompletionTypes.Response>;
/**
* Run a streaming completion against the OpenAI API. The resulting stream is the raw unmodified bytes from the API.
*
* @see https://platform.openai.com/docs/api-reference/completions
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of bytes directly from the API.
*/
declare function streamBytes(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<ReadableStream<Uint8Array>>;
/**
* Run a streaming completion against the OpenAI API. The resulting stream is the parsed stream data as JavaScript objects.
*
* @see https://platform.openai.com/docs/api-reference/completions
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of objects representing each chunk from the API.
*/
declare function stream(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<ReadableStream<OpenAICompletionTypes.Chunk>>;
/**
* Run a streaming completion against the OpenAI API. The resulting stream emits only the string tokens.
*
* @see https://platform.openai.com/docs/api-reference/completions
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/completions for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/completions.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns A stream of tokens from the API.
*/
declare function streamTokens(request: OpenAICompletionTypes.Request, options: OpenAICompletionTypes.RequestOptions): Promise<ReadableStream<string>>;
/**
* An object that encapsulates methods for calling the OpenAI Completion API.
*/
declare class OpenAICompletion {

@@ -63,0 +114,0 @@ static run: typeof run;

@@ -29,2 +29,14 @@ type SharedRequestOptions = {

}
/**
* Calculate text embeddings using the OpenAI API.
*
* @see https://platform.openai.com/docs/api-reference/embeddings
*
* @param request The request body sent to OpenAI. See OpenAI's documentation for /v1/embeddings for supported parameters.
* @param options
* @param options.apiKey OpenAI API key.
* @param options.apiUrl The url of the OpenAI (or compatible) API. Defaults to https://api.openai.com/v1/embeddings.
* @param options.fetch A custom implementation of fetch. Defaults to globalThis.fetch.
* @returns An object consisting of the text embeddings and other metadata. See OpenAI's documentation for /v1/embeddings.
*/
declare function run(request: OpenAIEmbeddingTypes.Request, options: OpenAIEmbeddingTypes.RequestOptions): Promise<OpenAIEmbeddingTypes.Response>;

@@ -31,0 +43,0 @@ declare class OpenAIEmbedding {

6

package.json
{
"name": "@axflow/models",
"version": "0.0.1-beta.2",
"description": "Zero-dependency module to run, stream, and render results across the most popular LLMs and embedding models",
"version": "0.0.1-beta.3",
"description": "Zero-dependency, modular SDK for integrating LLMs and embedding models into your application",
"author": "Axilla (https://axilla.io)",

@@ -142,3 +142,3 @@ "homepage": "https://github.com/axilla-io/ax/tree/main/packages/models#readme",

},
"gitHead": "6e7c2a345c3db43f64f750783abeb85d31ee6885"
"gitHead": "89aee5d61842162b57239f6eb8dc8c6e3516ced0"
}
# @axflow/models
Zero-dependency module to run, stream, and render results across the most popular LLMs and embedding models.
Zero-dependency, modular SDK for integrating LLMs and embedding models into your application.

@@ -92,5 +92,51 @@ ```

## `useChat` hook for dead simple UI integration
We've made building chat and completion UIs trivial. It doesn't get any easier than this 🚀
```ts
///////////////////
// On the server //
///////////////////
import { OpenAIChat } from '@axflow/models/openai/chat';
import { StreamingJsonResponse, type MessageType } from '@axflow/models/shared';
export const runtime = 'edge';
export async function POST(request: Request) {
const { messages } = await request.json();
const stream = await OpenAIChat.streamTokens(
{
model: 'gpt-4',
messages: messages.map((msg: MessageType) => ({ role: msg.role, content: msg.content })),
},
{
apiKey: process.env.OPENAI_API_KEY!,
},
);
return new StreamingJsonResponse(stream);
}
///////////////////
// On the client //
///////////////////
import { useChat } from '@axflow/models/react';
function ChatComponent() {
const {input, messages, onChange, onSubmit} = useChat();
return (
<>
<Messages messages={messages} />
<Form input={input} onChange={onChange} onSubmit={onSubmit} />
</>
);
}
```
## Next.js edge proxy example
The server intercepts the request on the edge, adds the proper API key, and forwards the byte stream back to the client.
Sometimes you just want to create a proxy to the underlying LLM API. In this example, the server intercepts the request on the edge, adds the proper API key, and forwards the byte stream back to the client.

@@ -97,0 +143,0 @@ *Note this pattern works exactly the same with our other models that support streaming, like Cohere and Anthropic.*

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc