@ai-sdk/react
Advanced tools
Comparing version 1.1.6 to 1.1.7
# @ai-sdk/react | ||
## 1.1.7 | ||
### Patch Changes | ||
- 0d2d9bf: fix (ui): empty submits (with allowEmptySubmit) create user messages | ||
- 0d2d9bf: fix (ui): single assistant message with multiple tool steps | ||
- Updated dependencies [0d2d9bf] | ||
- @ai-sdk/ui-utils@1.1.7 | ||
## 1.1.6 | ||
@@ -4,0 +13,0 @@ |
@@ -136,3 +136,4 @@ import { Message, CreateMessage, AssistantStatus, UseAssistantOptions, ChatRequestOptions, JSONValue, UseChatOptions, RequestOptions, UseCompletionOptions, Schema, DeepPartial } from '@ai-sdk/ui-utils'; | ||
/** | ||
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1. | ||
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls. | ||
Must be at least 1. | ||
@@ -139,0 +140,0 @@ A maximum number is required to prevent infinite loops in the case of misconfigured tools. |
@@ -216,65 +216,2 @@ "use strict"; | ||
// src/use-chat.ts | ||
var processResponseStream = async (api, chatRequest, mutate, mutateStreamData, existingDataRef, extraMetadataRef, messagesRef, abortControllerRef, generateId2, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, experimental_prepareRequestBody, fetch2, keepLastMessageOnError, id) => { | ||
var _a; | ||
const previousMessages = messagesRef.current; | ||
mutate(chatRequest.messages, false); | ||
const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map( | ||
({ | ||
role, | ||
content, | ||
experimental_attachments, | ||
data, | ||
annotations, | ||
toolInvocations | ||
}) => ({ | ||
role, | ||
content, | ||
...experimental_attachments !== void 0 && { | ||
experimental_attachments | ||
}, | ||
...data !== void 0 && { data }, | ||
...annotations !== void 0 && { annotations }, | ||
...toolInvocations !== void 0 && { toolInvocations } | ||
}) | ||
); | ||
const existingData = existingDataRef.current; | ||
return await (0, import_ui_utils2.callChatApi)({ | ||
api, | ||
body: (_a = experimental_prepareRequestBody == null ? void 0 : experimental_prepareRequestBody({ | ||
id, | ||
messages: chatRequest.messages, | ||
requestData: chatRequest.data, | ||
requestBody: chatRequest.body | ||
})) != null ? _a : { | ||
id, | ||
messages: constructedMessagesPayload, | ||
data: chatRequest.data, | ||
...extraMetadataRef.current.body, | ||
...chatRequest.body | ||
}, | ||
streamProtocol, | ||
credentials: extraMetadataRef.current.credentials, | ||
headers: { | ||
...extraMetadataRef.current.headers, | ||
...chatRequest.headers | ||
}, | ||
abortController: () => abortControllerRef.current, | ||
restoreMessagesOnFailure() { | ||
if (!keepLastMessageOnError) { | ||
mutate(previousMessages, false); | ||
} | ||
}, | ||
onResponse, | ||
onUpdate(merged, data) { | ||
mutate([...chatRequest.messages, ...merged], false); | ||
if (data == null ? void 0 : data.length) { | ||
mutateStreamData([...existingData != null ? existingData : [], ...data], false); | ||
} | ||
}, | ||
onToolCall, | ||
onFinish, | ||
generateId: generateId2, | ||
fetch: fetch2 | ||
}); | ||
}; | ||
function useChat({ | ||
@@ -339,3 +276,7 @@ api = "/api/chat", | ||
async (chatRequest) => { | ||
const messageCount = messagesRef.current.length; | ||
var _a, _b, _c; | ||
const messageCount = chatRequest.messages.length; | ||
const maxStep = (0, import_ui_utils2.extractMaxToolInvocationStep)( | ||
(_a = chatRequest.messages[chatRequest.messages.length - 1]) == null ? void 0 : _a.toolInvocations | ||
); | ||
try { | ||
@@ -346,23 +287,80 @@ mutateLoading(true); | ||
abortControllerRef.current = abortController; | ||
await processResponseStream( | ||
const throttledMutate = throttle(mutate, throttleWaitMs); | ||
const throttledMutateStreamData = throttle( | ||
mutateStreamData, | ||
throttleWaitMs | ||
); | ||
const previousMessages = messagesRef.current; | ||
throttledMutate(chatRequest.messages, false); | ||
const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map( | ||
({ | ||
role, | ||
content, | ||
experimental_attachments, | ||
data, | ||
annotations, | ||
toolInvocations | ||
}) => ({ | ||
role, | ||
content, | ||
...experimental_attachments !== void 0 && { | ||
experimental_attachments | ||
}, | ||
...data !== void 0 && { data }, | ||
...annotations !== void 0 && { annotations }, | ||
...toolInvocations !== void 0 && { toolInvocations } | ||
}) | ||
); | ||
const existingData = streamDataRef.current; | ||
await (0, import_ui_utils2.callChatApi)({ | ||
api, | ||
chatRequest, | ||
// throttle streamed ui updates: | ||
throttle(mutate, throttleWaitMs), | ||
throttle(mutateStreamData, throttleWaitMs), | ||
streamDataRef, | ||
extraMetadataRef, | ||
messagesRef, | ||
abortControllerRef, | ||
generateId2, | ||
body: (_b = experimental_prepareRequestBody == null ? void 0 : experimental_prepareRequestBody({ | ||
id: chatId, | ||
messages: chatRequest.messages, | ||
requestData: chatRequest.data, | ||
requestBody: chatRequest.body | ||
})) != null ? _b : { | ||
id: chatId, | ||
messages: constructedMessagesPayload, | ||
data: chatRequest.data, | ||
...extraMetadataRef.current.body, | ||
...chatRequest.body | ||
}, | ||
streamProtocol, | ||
onFinish, | ||
credentials: extraMetadataRef.current.credentials, | ||
headers: { | ||
...extraMetadataRef.current.headers, | ||
...chatRequest.headers | ||
}, | ||
abortController: () => abortControllerRef.current, | ||
restoreMessagesOnFailure() { | ||
if (!keepLastMessageOnError) { | ||
throttledMutate(previousMessages, false); | ||
} | ||
}, | ||
onResponse, | ||
onUpdate({ message, data, replaceLastMessage }) { | ||
throttledMutate( | ||
[ | ||
...replaceLastMessage ? chatRequest.messages.slice( | ||
0, | ||
chatRequest.messages.length - 1 | ||
) : chatRequest.messages, | ||
message | ||
], | ||
false | ||
); | ||
if (data == null ? void 0 : data.length) { | ||
throttledMutateStreamData( | ||
[...existingData != null ? existingData : [], ...data], | ||
false | ||
); | ||
} | ||
}, | ||
onToolCall, | ||
sendExtraMessageFields, | ||
experimental_prepareRequestBody, | ||
fetch2, | ||
keepLastMessageOnError, | ||
chatId | ||
); | ||
onFinish, | ||
generateId: generateId2, | ||
fetch: fetch2, | ||
lastMessage: chatRequest.messages[chatRequest.messages.length - 1] | ||
}); | ||
abortControllerRef.current = null; | ||
@@ -384,8 +382,10 @@ } catch (err) { | ||
if ( | ||
// ensure we actually have new messages (to prevent infinite loops in case of errors): | ||
messages2.length > messageCount && // ensure there is a last message: | ||
lastMessage != null && // check if the feature is enabled: | ||
// ensure there is a last message: | ||
lastMessage != null && // ensure we actually have new steps (to prevent infinite loops in case of errors): | ||
(messages2.length > messageCount || (0, import_ui_utils2.extractMaxToolInvocationStep)(lastMessage.toolInvocations) !== maxStep) && // check if the feature is enabled: | ||
maxSteps > 1 && // check that next step is possible: | ||
isAssistantMessageWithCompletedToolCalls(lastMessage) && // limit the number of automatic steps: | ||
countTrailingAssistantMessages(messages2) < maxSteps | ||
isAssistantMessageWithCompletedToolCalls(lastMessage) && // check that assistant has not answered yet: | ||
!lastMessage.content && // empty string or undefined | ||
// limit the number of automatic steps: | ||
((_c = (0, import_ui_utils2.extractMaxToolInvocationStep)(lastMessage.toolInvocations)) != null ? _c : 0) < maxSteps | ||
) { | ||
@@ -499,3 +499,3 @@ await triggerRequest({ messages: messages2 }); | ||
); | ||
const messages2 = !input && !attachmentsForRequest.length && options.allowEmptySubmit ? messagesRef.current : messagesRef.current.concat({ | ||
const messages2 = messagesRef.current.concat({ | ||
id: generateId2(), | ||
@@ -565,15 +565,4 @@ createdAt: /* @__PURE__ */ new Date(), | ||
function isAssistantMessageWithCompletedToolCalls(message) { | ||
return message.role === "assistant" && message.toolInvocations && message.toolInvocations.length > 0 && message.toolInvocations.every((toolInvocation) => "result" in toolInvocation); | ||
return message.role === "assistant" && message.toolInvocations != null && message.toolInvocations.length > 0 && message.toolInvocations.every((toolInvocation) => "result" in toolInvocation); | ||
} | ||
function countTrailingAssistantMessages(messages) { | ||
let count = 0; | ||
for (let i = messages.length - 1; i >= 0; i--) { | ||
if (messages[i].role === "assistant") { | ||
count++; | ||
} else { | ||
break; | ||
} | ||
} | ||
return count; | ||
} | ||
@@ -580,0 +569,0 @@ // src/use-completion.ts |
{ | ||
"name": "@ai-sdk/react", | ||
"version": "1.1.6", | ||
"version": "1.1.7", | ||
"license": "Apache-2.0", | ||
@@ -23,3 +23,3 @@ "sideEffects": false, | ||
"@ai-sdk/provider-utils": "2.1.5", | ||
"@ai-sdk/ui-utils": "1.1.6", | ||
"@ai-sdk/ui-utils": "1.1.7", | ||
"swr": "^2.2.5", | ||
@@ -26,0 +26,0 @@ "throttleit": "2.1.0" |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
194768
1871
+ Added@ai-sdk/ui-utils@1.1.7(transitive)
- Removed@ai-sdk/ui-utils@1.1.6(transitive)
Updated@ai-sdk/ui-utils@1.1.7