Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/solid

Package Overview
Dependencies
Maintainers
2
Versions
59
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/solid - npm Package Compare versions

Comparing version 1.0.0-canary.0 to 1.0.0-canary.1

121

./dist/index.js

@@ -32,3 +32,3 @@ "use strict";

var import_store = require("solid-js/store");
var getStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var _a;

@@ -39,6 +39,5 @@ const previousMessages = messagesRef;

const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(
({ role, content, name, data, annotations, toolInvocations }) => ({
({ role, content, data, annotations, toolInvocations }) => ({
role,
content,
...name !== void 0 && { name },
...data !== void 0 && { data },

@@ -134,3 +133,3 @@ ...annotations !== void 0 && { annotations },

const triggerRequest = async (chatRequest) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
const messageCount = messagesRef.length;

@@ -141,31 +140,21 @@ try {

abortController = new AbortController();
await (0, import_ui_utils.processChatStream)({
getStreamedResponse: () => {
var _a3, _b3, _c3, _d3, _e3, _f3, _g3, _h3, _i2, _j2, _k, _l, _m, _n, _o, _p, _q, _r;
return getStreamedResponse(
api(),
chatRequest,
mutate,
setStreamData,
streamData,
extraMetadata,
messagesRef,
abortController,
generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e3 = (_b3 = (_a3 = useChatOptions()).streamProtocol) == null ? void 0 : _b3.call(_a3)) != null ? _e3 : ((_d3 = (_c3 = useChatOptions()).streamMode) == null ? void 0 : _d3.call(_c3)) === "text") ? "text" : void 0,
(_g3 = (_f3 = useChatOptions()).onFinish) == null ? void 0 : _g3.call(_f3),
(_i2 = (_h3 = useChatOptions()).onResponse) == null ? void 0 : _i2.call(_h3),
(_k = (_j2 = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j2),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
);
},
experimental_onFunctionCall: (_b2 = (_a2 = useChatOptions()).experimental_onFunctionCall) == null ? void 0 : _b2.call(_a2),
updateChatRequest(newChatRequest) {
chatRequest = newChatRequest;
},
getCurrentMessages: () => messagesRef
});
await processStreamedResponse(
api(),
chatRequest,
mutate,
setStreamData,
streamData,
extraMetadata,
messagesRef,
abortController,
generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e2 = (_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2)) != null ? _e2 : ((_d2 = (_c2 = useChatOptions()).streamMode) == null ? void 0 : _d2.call(_c2)) === "text") ? "text" : void 0,
(_g2 = (_f2 = useChatOptions()).onFinish) == null ? void 0 : _g2.call(_f2),
(_i = (_h2 = useChatOptions()).onResponse) == null ? void 0 : _i.call(_h2),
(_k = (_j = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
);
abortController = null;

@@ -177,3 +166,3 @@ } catch (err) {

}
const onError = (_d2 = (_c2 = useChatOptions()).onError) == null ? void 0 : _d2.call(_c2);
const onError = (_t = (_s = useChatOptions()).onError) == null ? void 0 : _t.call(_s);
if (onError && err instanceof Error) {

@@ -186,3 +175,3 @@ onError(err);

}
const maxSteps = (_j = (_f2 = (_e2 = useChatOptions()).maxSteps) == null ? void 0 : _f2.call(_e2)) != null ? _j : ((_i = (_h2 = (_g2 = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _h2.call(_g2)) != null ? _i : 0) + 1;
const maxSteps = (_z = (_v = (_u = useChatOptions()).maxSteps) == null ? void 0 : _v.call(_u)) != null ? _z : ((_y = (_x = (_w = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _x.call(_w)) != null ? _y : 0) + 1;
const messages2 = messagesRef;

@@ -201,21 +190,14 @@ const lastMessage = messages2[messages2.length - 1];

};
const append = async (message, { options, data, headers, body } = {}) => {
const append = async (message, { data, headers, body } = {}) => {
if (!message.id) {
message.id = generateId()();
}
const requestOptions = {
headers: headers != null ? headers : options == null ? void 0 : options.headers,
body: body != null ? body : options == null ? void 0 : options.body
};
const chatRequest = {
return triggerRequest({
messages: messagesRef.concat(message),
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
headers,
body,
data
};
return triggerRequest(chatRequest);
});
};
const reload = async ({
options,
data,

@@ -225,27 +207,12 @@ headers,

} = {}) => {
if (messagesRef.length === 0)
if (messagesRef.length === 0) {
return null;
const requestOptions = {
headers: headers != null ? headers : options == null ? void 0 : options.headers,
body: body != null ? body : options == null ? void 0 : options.body
};
}
const lastMessage = messagesRef[messagesRef.length - 1];
if (lastMessage.role === "assistant") {
const chatRequest2 = {
messages: messagesRef.slice(0, -1),
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
data
};
return triggerRequest(chatRequest2);
}
const chatRequest = {
messages: messagesRef,
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
return triggerRequest({
messages: lastMessage.role === "assistant" ? messagesRef.slice(0, -1) : messagesRef,
headers,
body,
data
};
return triggerRequest(chatRequest);
});
};

@@ -275,3 +242,3 @@ const stop = () => {

const handleSubmit = (event, options = {}, metadata) => {
var _a2, _b2, _c2, _d2, _e2;
var _a2;
(_a2 = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a2.call(event);

@@ -287,7 +254,3 @@ const inputValue = input();

}
const requestOptions = {
headers: (_c2 = options.headers) != null ? _c2 : (_b2 = options.options) == null ? void 0 : _b2.headers,
body: (_e2 = options.body) != null ? _e2 : (_d2 = options.options) == null ? void 0 : _d2.body
};
const chatRequest = {
triggerRequest({
messages: !inputValue && options.allowEmptySubmit ? messagesRef : messagesRef.concat({

@@ -299,8 +262,6 @@ id: generateId()(),

}),
options: requestOptions,
body: requestOptions.body,
headers: requestOptions.headers,
headers: options.headers,
body: options.body,
data: options.data
};
triggerRequest(chatRequest);
});
setInput("");

@@ -307,0 +268,0 @@ };

# @ai-sdk/solid
## 1.0.0-canary.1
### Major Changes
- 8bf5756: chore: remove legacy function/tool calling
### Patch Changes
- Updated dependencies [8bf5756]
- @ai-sdk/ui-utils@1.0.0-canary.1
## 1.0.0-canary.0

@@ -4,0 +15,0 @@

@@ -32,3 +32,3 @@ "use strict";

var import_store = require("solid-js/store");
var getStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var _a;

@@ -39,6 +39,5 @@ const previousMessages = messagesRef;

const constructedMessagesPayload = sendExtraMessageFields ? chatRequest.messages : chatRequest.messages.map(
({ role, content, name, data, annotations, toolInvocations }) => ({
({ role, content, data, annotations, toolInvocations }) => ({
role,
content,
...name !== void 0 && { name },
...data !== void 0 && { data },

@@ -134,3 +133,3 @@ ...annotations !== void 0 && { annotations },

const triggerRequest = async (chatRequest) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
const messageCount = messagesRef.length;

@@ -141,31 +140,21 @@ try {

abortController = new AbortController();
await (0, import_ui_utils.processChatStream)({
getStreamedResponse: () => {
var _a3, _b3, _c3, _d3, _e3, _f3, _g3, _h3, _i2, _j2, _k, _l, _m, _n, _o, _p, _q, _r;
return getStreamedResponse(
api(),
chatRequest,
mutate,
setStreamData,
streamData,
extraMetadata,
messagesRef,
abortController,
generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e3 = (_b3 = (_a3 = useChatOptions()).streamProtocol) == null ? void 0 : _b3.call(_a3)) != null ? _e3 : ((_d3 = (_c3 = useChatOptions()).streamMode) == null ? void 0 : _d3.call(_c3)) === "text") ? "text" : void 0,
(_g3 = (_f3 = useChatOptions()).onFinish) == null ? void 0 : _g3.call(_f3),
(_i2 = (_h3 = useChatOptions()).onResponse) == null ? void 0 : _i2.call(_h3),
(_k = (_j2 = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j2),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
);
},
experimental_onFunctionCall: (_b2 = (_a2 = useChatOptions()).experimental_onFunctionCall) == null ? void 0 : _b2.call(_a2),
updateChatRequest(newChatRequest) {
chatRequest = newChatRequest;
},
getCurrentMessages: () => messagesRef
});
await processStreamedResponse(
api(),
chatRequest,
mutate,
setStreamData,
streamData,
extraMetadata,
messagesRef,
abortController,
generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e2 = (_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2)) != null ? _e2 : ((_d2 = (_c2 = useChatOptions()).streamMode) == null ? void 0 : _d2.call(_c2)) === "text") ? "text" : void 0,
(_g2 = (_f2 = useChatOptions()).onFinish) == null ? void 0 : _g2.call(_f2),
(_i = (_h2 = useChatOptions()).onResponse) == null ? void 0 : _i.call(_h2),
(_k = (_j = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
);
abortController = null;

@@ -177,3 +166,3 @@ } catch (err) {

}
const onError = (_d2 = (_c2 = useChatOptions()).onError) == null ? void 0 : _d2.call(_c2);
const onError = (_t = (_s = useChatOptions()).onError) == null ? void 0 : _t.call(_s);
if (onError && err instanceof Error) {

@@ -186,3 +175,3 @@ onError(err);

}
const maxSteps = (_j = (_f2 = (_e2 = useChatOptions()).maxSteps) == null ? void 0 : _f2.call(_e2)) != null ? _j : ((_i = (_h2 = (_g2 = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _h2.call(_g2)) != null ? _i : 0) + 1;
const maxSteps = (_z = (_v = (_u = useChatOptions()).maxSteps) == null ? void 0 : _v.call(_u)) != null ? _z : ((_y = (_x = (_w = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _x.call(_w)) != null ? _y : 0) + 1;
const messages2 = messagesRef;

@@ -201,21 +190,14 @@ const lastMessage = messages2[messages2.length - 1];

};
const append = async (message, { options, data, headers, body } = {}) => {
const append = async (message, { data, headers, body } = {}) => {
if (!message.id) {
message.id = generateId()();
}
const requestOptions = {
headers: headers != null ? headers : options == null ? void 0 : options.headers,
body: body != null ? body : options == null ? void 0 : options.body
};
const chatRequest = {
return triggerRequest({
messages: messagesRef.concat(message),
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
headers,
body,
data
};
return triggerRequest(chatRequest);
});
};
const reload = async ({
options,
data,

@@ -225,27 +207,12 @@ headers,

} = {}) => {
if (messagesRef.length === 0)
if (messagesRef.length === 0) {
return null;
const requestOptions = {
headers: headers != null ? headers : options == null ? void 0 : options.headers,
body: body != null ? body : options == null ? void 0 : options.body
};
}
const lastMessage = messagesRef[messagesRef.length - 1];
if (lastMessage.role === "assistant") {
const chatRequest2 = {
messages: messagesRef.slice(0, -1),
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
data
};
return triggerRequest(chatRequest2);
}
const chatRequest = {
messages: messagesRef,
options: requestOptions,
headers: requestOptions.headers,
body: requestOptions.body,
return triggerRequest({
messages: lastMessage.role === "assistant" ? messagesRef.slice(0, -1) : messagesRef,
headers,
body,
data
};
return triggerRequest(chatRequest);
});
};

@@ -275,3 +242,3 @@ const stop = () => {

const handleSubmit = (event, options = {}, metadata) => {
var _a2, _b2, _c2, _d2, _e2;
var _a2;
(_a2 = event == null ? void 0 : event.preventDefault) == null ? void 0 : _a2.call(event);

@@ -287,7 +254,3 @@ const inputValue = input();

}
const requestOptions = {
headers: (_c2 = options.headers) != null ? _c2 : (_b2 = options.options) == null ? void 0 : _b2.headers,
body: (_e2 = options.body) != null ? _e2 : (_d2 = options.options) == null ? void 0 : _d2.body
};
const chatRequest = {
triggerRequest({
messages: !inputValue && options.allowEmptySubmit ? messagesRef : messagesRef.concat({

@@ -299,8 +262,6 @@ id: generateId()(),

}),
options: requestOptions,
body: requestOptions.body,
headers: requestOptions.headers,
headers: options.headers,
body: options.body,
data: options.data
};
triggerRequest(chatRequest);
});
setInput("");

@@ -307,0 +268,0 @@ };

{
"name": "@ai-sdk/solid",
"version": "1.0.0-canary.0",
"version": "1.0.0-canary.1",
"license": "Apache-2.0",

@@ -23,3 +23,3 @@ "sideEffects": false,

"@ai-sdk/provider-utils": "1.0.22",
"@ai-sdk/ui-utils": "1.0.0-canary.0"
"@ai-sdk/ui-utils": "1.0.0-canary.1"
},

@@ -26,0 +26,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc