Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ai-sdk/solid

Package Overview
Dependencies
Maintainers
2
Versions
59
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ai-sdk/solid - npm Package Compare versions

Comparing version 1.0.0-canary.1 to 1.0.0-canary.2

36

./dist/index.js

@@ -32,3 +32,3 @@ "use strict";

var import_store = require("solid-js/store");
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol = "data", onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var _a;

@@ -132,3 +132,3 @@ const previousMessages = messagesRef;

const triggerRequest = async (chatRequest) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t;
const messageCount = messagesRef.length;

@@ -149,10 +149,9 @@ try {

generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e2 = (_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2)) != null ? _e2 : ((_d2 = (_c2 = useChatOptions()).streamMode) == null ? void 0 : _d2.call(_c2)) === "text") ? "text" : void 0,
(_g2 = (_f2 = useChatOptions()).onFinish) == null ? void 0 : _g2.call(_f2),
(_i = (_h2 = useChatOptions()).onResponse) == null ? void 0 : _i.call(_h2),
(_k = (_j = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
(_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2),
(_d2 = (_c2 = useChatOptions()).onFinish) == null ? void 0 : _d2.call(_c2),
(_f2 = (_e2 = useChatOptions()).onResponse) == null ? void 0 : _f2.call(_e2),
(_h2 = (_g2 = useChatOptions()).onToolCall) == null ? void 0 : _h2.call(_g2),
(_j = (_i = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _j.call(_i),
(_l = (_k = useChatOptions()).fetch) == null ? void 0 : _l.call(_k),
(_o = (_n = (_m = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _n.call(_m)) != null ? _o : false
);

@@ -165,3 +164,3 @@ abortController = null;

}
const onError = (_t = (_s = useChatOptions()).onError) == null ? void 0 : _t.call(_s);
const onError = (_q = (_p = useChatOptions()).onError) == null ? void 0 : _q.call(_p);
if (onError && err instanceof Error) {

@@ -174,3 +173,3 @@ onError(err);

}
const maxSteps = (_z = (_v = (_u = useChatOptions()).maxSteps) == null ? void 0 : _v.call(_u)) != null ? _z : ((_y = (_x = (_w = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _x.call(_w)) != null ? _y : 0) + 1;
const maxSteps = (_t = (_s = (_r = useChatOptions()).maxSteps) == null ? void 0 : _s.call(_r)) != null ? _t : 1;
const messages2 = messagesRef;

@@ -385,3 +384,3 @@ const lastMessage = messages2[messages2.length - 1];

const complete = async (prompt, options) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m, _n, _o, _p;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m;
const existingData = (_a2 = streamData()) != null ? _a2 : [];

@@ -397,4 +396,3 @@ return (0, import_ui_utils2.callCompletionApi)({

},
// streamMode is deprecated, use streamProtocol instead:
streamProtocol: ((_h2 = (_e2 = (_d2 = useCompletionOptions()).streamProtocol) == null ? void 0 : _e2.call(_d2)) != null ? _h2 : ((_g2 = (_f2 = useCompletionOptions()).streamMode) == null ? void 0 : _g2.call(_f2)) === "text") ? "text" : void 0,
streamProtocol: (_e2 = (_d2 = useCompletionOptions()).streamProtocol) == null ? void 0 : _e2.call(_d2),
setCompletion: mutate,

@@ -404,9 +402,9 @@ setLoading: setIsLoading,

setAbortController,
onResponse: (_j = (_i2 = useCompletionOptions()).onResponse) == null ? void 0 : _j.call(_i2),
onFinish: (_l = (_k = useCompletionOptions()).onFinish) == null ? void 0 : _l.call(_k),
onError: (_n = (_m = useCompletionOptions()).onError) == null ? void 0 : _n.call(_m),
onResponse: (_g2 = (_f2 = useCompletionOptions()).onResponse) == null ? void 0 : _g2.call(_f2),
onFinish: (_i2 = (_h2 = useCompletionOptions()).onFinish) == null ? void 0 : _i2.call(_h2),
onError: (_k = (_j = useCompletionOptions()).onError) == null ? void 0 : _k.call(_j),
onData: (data) => {
setStreamData([...existingData, ...data != null ? data : []]);
},
fetch: (_p = (_o = useCompletionOptions()).fetch) == null ? void 0 : _p.call(_o)
fetch: (_m = (_l = useCompletionOptions()).fetch) == null ? void 0 : _m.call(_l)
});

@@ -413,0 +411,0 @@ };

# @ai-sdk/solid
## 1.0.0-canary.2
### Major Changes
- e117b54: chore (ui): remove deprecated useChat roundtrip options
- 7814c4b: chore (ui): remove streamMode setting from useChat & useCompletion
### Patch Changes
- Updated dependencies [b469a7e]
- Updated dependencies [7814c4b]
- Updated dependencies [db46ce5]
- @ai-sdk/provider-utils@2.0.0-canary.0
- @ai-sdk/ui-utils@1.0.0-canary.2
## 1.0.0-canary.1

@@ -4,0 +19,0 @@

@@ -58,17 +58,2 @@ import { FetchFunction } from '@ai-sdk/provider-utils';

/**
Maximum number of automatic roundtrips for tool calls.
An automatic tool call roundtrip is a call to the server with the
tool call results when all tool calls in the last assistant
message have results.
A maximum number is required to prevent infinite loops in the
case of misconfigured tools.
By default, it's set to 0, which will disable the feature.
@deprecated Use `maxSteps` instead (which is `maxToolRoundtrips` + 1).
*/
maxToolRoundtrips?: number;
/**
Maximum number of sequential LLM calls (steps), e.g. when you use tool calls. Must be at least 1.

@@ -75,0 +60,0 @@

@@ -32,3 +32,3 @@ "use strict";

var import_store = require("solid-js/store");
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol, onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var processStreamedResponse = async (api, chatRequest, mutate, setStreamData, streamData, extraMetadata, messagesRef, abortController, generateId, streamProtocol = "data", onFinish, onResponse, onToolCall, sendExtraMessageFields, fetch, keepLastMessageOnError) => {
var _a;

@@ -132,3 +132,3 @@ const previousMessages = messagesRef;

const triggerRequest = async (chatRequest) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i, _j, _k, _l, _m, _n, _o, _p, _q, _r, _s, _t;
const messageCount = messagesRef.length;

@@ -149,10 +149,9 @@ try {

generateId(),
// streamMode is deprecated, use streamProtocol instead:
((_e2 = (_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2)) != null ? _e2 : ((_d2 = (_c2 = useChatOptions()).streamMode) == null ? void 0 : _d2.call(_c2)) === "text") ? "text" : void 0,
(_g2 = (_f2 = useChatOptions()).onFinish) == null ? void 0 : _g2.call(_f2),
(_i = (_h2 = useChatOptions()).onResponse) == null ? void 0 : _i.call(_h2),
(_k = (_j = useChatOptions()).onToolCall) == null ? void 0 : _k.call(_j),
(_m = (_l = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _m.call(_l),
(_o = (_n = useChatOptions()).fetch) == null ? void 0 : _o.call(_n),
(_r = (_q = (_p = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _q.call(_p)) != null ? _r : false
(_b2 = (_a2 = useChatOptions()).streamProtocol) == null ? void 0 : _b2.call(_a2),
(_d2 = (_c2 = useChatOptions()).onFinish) == null ? void 0 : _d2.call(_c2),
(_f2 = (_e2 = useChatOptions()).onResponse) == null ? void 0 : _f2.call(_e2),
(_h2 = (_g2 = useChatOptions()).onToolCall) == null ? void 0 : _h2.call(_g2),
(_j = (_i = useChatOptions()).sendExtraMessageFields) == null ? void 0 : _j.call(_i),
(_l = (_k = useChatOptions()).fetch) == null ? void 0 : _l.call(_k),
(_o = (_n = (_m = useChatOptions()).keepLastMessageOnError) == null ? void 0 : _n.call(_m)) != null ? _o : false
);

@@ -165,3 +164,3 @@ abortController = null;

}
const onError = (_t = (_s = useChatOptions()).onError) == null ? void 0 : _t.call(_s);
const onError = (_q = (_p = useChatOptions()).onError) == null ? void 0 : _q.call(_p);
if (onError && err instanceof Error) {

@@ -174,3 +173,3 @@ onError(err);

}
const maxSteps = (_z = (_v = (_u = useChatOptions()).maxSteps) == null ? void 0 : _v.call(_u)) != null ? _z : ((_y = (_x = (_w = useChatOptions()).maxToolRoundtrips) == null ? void 0 : _x.call(_w)) != null ? _y : 0) + 1;
const maxSteps = (_t = (_s = (_r = useChatOptions()).maxSteps) == null ? void 0 : _s.call(_r)) != null ? _t : 1;
const messages2 = messagesRef;

@@ -385,3 +384,3 @@ const lastMessage = messages2[messages2.length - 1];

const complete = async (prompt, options) => {
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m, _n, _o, _p;
var _a2, _b2, _c2, _d2, _e2, _f2, _g2, _h2, _i2, _j, _k, _l, _m;
const existingData = (_a2 = streamData()) != null ? _a2 : [];

@@ -397,4 +396,3 @@ return (0, import_ui_utils2.callCompletionApi)({

},
// streamMode is deprecated, use streamProtocol instead:
streamProtocol: ((_h2 = (_e2 = (_d2 = useCompletionOptions()).streamProtocol) == null ? void 0 : _e2.call(_d2)) != null ? _h2 : ((_g2 = (_f2 = useCompletionOptions()).streamMode) == null ? void 0 : _g2.call(_f2)) === "text") ? "text" : void 0,
streamProtocol: (_e2 = (_d2 = useCompletionOptions()).streamProtocol) == null ? void 0 : _e2.call(_d2),
setCompletion: mutate,

@@ -404,9 +402,9 @@ setLoading: setIsLoading,

setAbortController,
onResponse: (_j = (_i2 = useCompletionOptions()).onResponse) == null ? void 0 : _j.call(_i2),
onFinish: (_l = (_k = useCompletionOptions()).onFinish) == null ? void 0 : _l.call(_k),
onError: (_n = (_m = useCompletionOptions()).onError) == null ? void 0 : _n.call(_m),
onResponse: (_g2 = (_f2 = useCompletionOptions()).onResponse) == null ? void 0 : _g2.call(_f2),
onFinish: (_i2 = (_h2 = useCompletionOptions()).onFinish) == null ? void 0 : _i2.call(_h2),
onError: (_k = (_j = useCompletionOptions()).onError) == null ? void 0 : _k.call(_j),
onData: (data) => {
setStreamData([...existingData, ...data != null ? data : []]);
},
fetch: (_p = (_o = useCompletionOptions()).fetch) == null ? void 0 : _p.call(_o)
fetch: (_m = (_l = useCompletionOptions()).fetch) == null ? void 0 : _m.call(_l)
});

@@ -413,0 +411,0 @@ };

{
"name": "@ai-sdk/solid",
"version": "1.0.0-canary.1",
"version": "1.0.0-canary.2",
"license": "Apache-2.0",

@@ -22,4 +22,4 @@ "sideEffects": false,

"dependencies": {
"@ai-sdk/provider-utils": "1.0.22",
"@ai-sdk/ui-utils": "1.0.0-canary.1"
"@ai-sdk/provider-utils": "2.0.0-canary.0",
"@ai-sdk/ui-utils": "1.0.0-canary.2"
},

@@ -26,0 +26,0 @@ "devDependencies": {

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc