Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@axflow/models

Package Overview
Dependencies
Maintainers
2
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@axflow/models - npm Package Compare versions

Comparing version 0.0.15 to 0.0.16

77

dist/react/index.d.ts

@@ -1,3 +0,12 @@

import { MessageType, JSONValueType } from '@axflow/models/shared';
import { MessageType, FunctionType, JSONValueType } from '@axflow/models/shared';
interface AccessorType<T = any> {
(value: T): string | null | undefined;
}
interface FunctionCallAccessorType<T = any> {
(value: T): {
name?: string;
arguments?: string;
} | null | undefined;
}
type BodyType = Record<string, JSONValueType> | ((message: MessageType, history: MessageType[]) => JSONValueType);

@@ -39,6 +48,46 @@ /**

*
* For example, if this hook is used to stream an OpenAI-compatible API response,
* the following option can be defined to interpret the response content:
*
* import { useChat } from '@axflow/models/react';
* import type { OpenAIChatTypes } from '@axflow/models/openai/chat';
*
* const { ... } = useChat({
* accessor: (value: OpenAIChatTypes.Chunk) => {
* return value.choices[0].delta.content;
* }
* });
*
* By default, it assumes the value from the API is the message text itself.
*/
accessor?: (value: any) => string;
accessor?: AccessorType;
/**
* An accessor used to pluck out a function call for LLMs that support it. This
* feature was built to support OpenAI functions, but it can be used for any model
* that supports a concept of functions.
*
* This is used to return the function call object which will then be populated
* on the assistant message's `functionCall` property. A function call object
* consists of a `name` property (the function name) and an `arguments` property
* (the function arguments), both of which are strings. The `arguments` property
* is encoded as JSON.
*
* For example, if this hook is used to stream an OpenAI-compatible API response
* using functions, the following options can be defined to interpret the response:
*
* import { useChat } from '@axflow/models/react';
* import type { OpenAIChatTypes } from '@axflow/models/openai/chat';
*
* const { ... } = useChat({
* accessor: (value: OpenAIChatTypes.Chunk) => {
* return value.choices[0].delta.content;
* },
*
* functionCallAccessor: (value: OpenAIChatTypes.Chunk) => {
* return value.choices[0].delta.function_call;
* }
* });
*/
functionCallAccessor?: FunctionCallAccessorType;
/**
* Initial message input. Defaults to empty string.

@@ -52,2 +101,10 @@ */

/**
* Initial set of available functions for the user's next message.
*
* This is primarily intended for OpenAI's functions feature.
*
* @see https://platform.openai.com/docs/api-reference/chat/create
*/
initialFunctions?: FunctionType[];
/**
* Callback to handle errors should they arise.

@@ -101,2 +158,18 @@ *

/**
* List of available functions to send along with the next user message.
*
* This is primarily intended for OpenAI's functions feature.
*
* @see https://platform.openai.com/docs/api-reference/chat/create
*/
functions: FunctionType[];
/**
* Update list of functions for the next user message.
*
* This is primarily intended for OpenAI's functions feature.
*
* @see https://platform.openai.com/docs/api-reference/chat/create
*/
setFunctions: (functions: FunctionType[]) => void;
/**
* If a request is in progress, this will be `true`.

@@ -103,0 +176,0 @@ *

@@ -33,3 +33,3 @@ "use strict";

}
async function handleStreamingResponse(response, messagesRef, setMessages, accessor, onNewMessage) {
async function handleStreamingResponse(response, messagesRef, setMessages, accessor, functionCallAccessor, onNewMessage) {
const responseBody = response.body;

@@ -62,13 +62,30 @@ if (responseBody === null) {

const chunkContent = accessor(chunk.value);
const chunkFunctionCall = functionCallAccessor(chunk.value);
if (!id) {
id = uuid();
messages = messages.concat({
const message = {
id,
role: "assistant",
content: chunkContent,
content: chunkContent ?? "",
created: Date.now()
});
};
if (chunkFunctionCall) {
message.functionCall = {
name: chunkFunctionCall.name ?? "",
arguments: chunkFunctionCall.arguments ?? ""
};
}
messages = messages.concat(message);
} else {
messages = messages.map((msg) => {
return msg.id !== id ? msg : { ...msg, content: msg.content + chunkContent };
if (msg.id !== id) {
return msg;
}
const content = msg.content + (chunkContent ?? "");
if (!chunkFunctionCall) {
return { ...msg, content };
}
const name = msg.functionCall.name + (chunkFunctionCall.name ?? "");
const args = msg.functionCall.arguments + (chunkFunctionCall.arguments ?? "");
return { ...msg, content, functionCall: { name, arguments: args } };
});

@@ -82,11 +99,18 @@ }

}
async function handleJsonResponse(response, messagesRef, setMessages, accessor, onNewMessage) {
async function handleJsonResponse(response, messagesRef, setMessages, accessor, functionCallAccessor, onNewMessage) {
const responseBody = await response.json();
const content = accessor(responseBody);
const functionCall = functionCallAccessor(responseBody);
const newMessage = {
id: uuid(),
role: "assistant",
content,
content: content ?? "",
created: Date.now()
};
if (functionCall) {
newMessage.functionCall = {
name: functionCall.name ?? "",
arguments: functionCall.arguments ?? ""
};
}
const messages = messagesRef.current.concat(newMessage);

@@ -96,3 +120,3 @@ setMessages(messages);

}
async function request(prepare, messagesRef, setMessages, url, headers, accessor, loadingRef, setLoading, setError, onError, onNewMessage) {
async function request(prepare, messagesRef, setMessages, url, headers, accessor, functionCallAccessor, loadingRef, setLoading, setError, onError, onNewMessage, onSuccess) {
if (loadingRef.current) {

@@ -113,3 +137,4 @@ return;

const handler = isStreaming ? handleStreamingResponse : handleJsonResponse;
await handler(response, messagesRef, setMessages, accessor, onNewMessage);
await handler(response, messagesRef, setMessages, accessor, functionCallAccessor, onNewMessage);
onSuccess();
} catch (error) {

@@ -122,3 +147,3 @@ setError(error);

}
async function stableAppend(message, messagesRef, setMessages, url, headers, body, accessor, loadingRef, setLoading, setError, onError, onNewMessage) {
async function stableAppend(message, messagesRef, setMessages, url, headers, body, accessor, functionCallAccessor, loadingRef, setLoading, setError, onError, onNewMessage, setFunctions) {
function prepare() {

@@ -138,2 +163,3 @@ const history = messagesRef.current;

accessor,
functionCallAccessor,
loadingRef,

@@ -143,6 +169,8 @@ setLoading,

onError,
onNewMessage
onNewMessage,
() => setFunctions([])
// Clear functions after each request (similar to clearing user input)
);
}
async function stableReload(messagesRef, setMessages, url, headers, body, accessor, loadingRef, setLoading, setError, onError, onNewMessage) {
async function stableReload(messagesRef, setMessages, url, headers, body, accessor, functionCallAccessor, loadingRef, setLoading, setError, onError, onNewMessage) {
function prepare() {

@@ -175,2 +203,3 @@ const messages = messagesRef.current;

accessor,
functionCallAccessor,
loadingRef,

@@ -180,7 +209,14 @@ setLoading,

onError,
onNewMessage
onNewMessage,
() => {
}
);
}
var DEFAULT_URL = "/api/chat";
var DEFAULT_ACCESSOR = (value) => value;
var DEFAULT_ACCESSOR = (value) => {
return typeof value === "string" ? value : void 0;
};
var DEFAULT_FUNCTION_CALL_ACCESSOR = (_value) => {
return void 0;
};
var DEFAULT_BODY = (message, history) => ({

@@ -204,2 +240,4 @@ messages: [...history, message]

const messagesRef = (0, import_react.useRef)(initialMessages);
const initialFunctions = options.initialFunctions ?? [];
const [functions, setFunctions] = (0, import_react.useState)(initialFunctions);
const [loading, _setLoading] = (0, import_react.useState)(false);

@@ -210,2 +248,3 @@ const loadingRef = (0, import_react.useRef)(false);

const accessor = options.accessor ?? DEFAULT_ACCESSOR;
const functionCallAccessor = options.functionCallAccessor ?? DEFAULT_FUNCTION_CALL_ACCESSOR;
const body = options.body ?? DEFAULT_BODY;

@@ -248,2 +287,5 @@ const headers = options.headers ?? DEFAULT_HEADERS;

};
if (functions.length > 0) {
newMessage.functions = functions;
}
stableAppend(

@@ -257,2 +299,3 @@ newMessage,

accessor,
functionCallAccessor,
loadingRef,

@@ -262,3 +305,4 @@ setLoading,

onError,
onNewMessage
onNewMessage,
setFunctions
);

@@ -275,2 +319,3 @@ setInput("");

accessor,
functionCallAccessor,
loadingRef,

@@ -283,3 +328,15 @@ setLoading,

}
return { input, setInput, messages, setMessages, loading, error, onChange, onSubmit, reload };
return {
input,
setInput,
messages,
setMessages,
functions,
setFunctions,
loading,
error,
onChange,
onSubmit,
reload
};
}

@@ -286,0 +343,0 @@ // Annotate the CommonJS export names for ESM import in node:

@@ -15,8 +15,51 @@ declare class HttpError extends Error {

} | Array<JSONValueType>;
type FunctionType = {
name: string;
description?: string;
parameters: JSONValueType;
};
type MessageType = {
/**
* Can be any unique string.
*
* For example, the `useChat` hook uses UUIDs because of their native availability in both Node and browsers.
*
* @see https://developer.mozilla.org/en-US/docs/Web/API/Crypto/randomUUID
*/
id: string;
/**
* Specifies who this message is from.
*/
role: 'user' | 'assistant' | 'system';
data?: JSONValueType[];
/**
* The content of the message. If the message was a function call from the assistant,
* then this field will be an empty string and the `functionCall` field will be populated.
*/
content: string;
/**
* The time this message was created, expressed as milliseconds since Epoch.
*
* @see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/now
*/
created: number;
/**
* Any additional data to associate with the message.
*/
data?: JSONValueType[];
/**
* If using OpenAI functions, the functions available to the assistant can be defined here.
*
* @see https://platform.openai.com/docs/api-reference/chat/create
*/
functions?: FunctionType[];
/**
* If using OpenAI functions and the assistant responds with a function call,
* this field will be populated with the function invocation information.
*
* @see https://platform.openai.com/docs/api-reference/chat/object
*/
functionCall?: {
name: string;
arguments: string;
};
};

@@ -160,2 +203,2 @@

export { HttpError, IterableToStream, JSONValueType, MessageType, NdJsonStream, NdJsonValueType, POST, StreamToIterable, StreamingJsonResponse, isHttpError };
export { FunctionType, HttpError, IterableToStream, JSONValueType, MessageType, NdJsonStream, NdJsonValueType, POST, StreamToIterable, StreamingJsonResponse, isHttpError };

4

package.json
{
"name": "@axflow/models",
"version": "0.0.15",
"version": "0.0.16",
"description": "Zero-dependency, modular SDK for building robust natural language applications",

@@ -199,3 +199,3 @@ "author": "Axflow (https://axflow.dev)",

},
"gitHead": "e3a7d387dd29e2db277e4fad890ffc4c2be686f0"
"gitHead": "7135ff716f0b2845379f6fc97e430759d0b067ec"
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc