@reduxjs/toolkit
Advanced tools
Comparing version 2.2.8 to 2.3.0
@@ -174,4 +174,97 @@ import * as _reduxjs_toolkit_query from '@reduxjs/toolkit/query'; | ||
type TypedUseLazyQuerySubscription<ResultType, QueryArg, BaseQuery extends BaseQueryFn> = UseLazyQuerySubscription<QueryDefinition<QueryArg, BaseQuery, string, ResultType, string>>; | ||
/** | ||
* @internal | ||
*/ | ||
type QueryStateSelector<R extends Record<string, any>, D extends QueryDefinition<any, any, any, any>> = (state: UseQueryStateDefaultResult<D>) => R; | ||
/** | ||
* Provides a way to define a strongly-typed version of | ||
* {@linkcode QueryStateSelector} for use with a specific query. | ||
* This is useful for scenarios where you want to create a "pre-typed" | ||
* {@linkcode UseQueryStateOptions.selectFromResult | selectFromResult} | ||
* function. | ||
* | ||
* @example | ||
* <caption>#### __Create a strongly-typed `selectFromResult` selector function__</caption> | ||
* | ||
* ```tsx | ||
* import type { TypedQueryStateSelector } from '@reduxjs/toolkit/query/react' | ||
* import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react' | ||
* | ||
* type Post = { | ||
* id: number | ||
* title: string | ||
* } | ||
* | ||
* type PostsApiResponse = { | ||
* posts: Post[] | ||
* total: number | ||
* skip: number | ||
* limit: number | ||
* } | ||
* | ||
* type QueryArgument = number | undefined | ||
* | ||
* type BaseQueryFunction = ReturnType<typeof fetchBaseQuery> | ||
* | ||
* type SelectedResult = Pick<PostsApiResponse, 'posts'> | ||
* | ||
* const postsApiSlice = createApi({ | ||
* baseQuery: fetchBaseQuery({ baseUrl: 'https://dummyjson.com/posts' }), | ||
* reducerPath: 'postsApi', | ||
* tagTypes: ['Posts'], | ||
* endpoints: (build) => ({ | ||
* getPosts: build.query<PostsApiResponse, QueryArgument>({ | ||
* query: (limit = 5) => `?limit=${limit}&select=title`, | ||
* }), | ||
* }), | ||
* }) | ||
* | ||
* const { useGetPostsQuery } = postsApiSlice | ||
* | ||
* function PostById({ id }: { id: number }) { | ||
* const { post } = useGetPostsQuery(undefined, { | ||
* selectFromResult: (state) => ({ | ||
* post: state.data?.posts.find((post) => post.id === id), | ||
* }), | ||
* }) | ||
* | ||
* return <li>{post?.title}</li> | ||
* } | ||
* | ||
* const EMPTY_ARRAY: Post[] = [] | ||
* | ||
* const typedSelectFromResult: TypedQueryStateSelector< | ||
* PostsApiResponse, | ||
* QueryArgument, | ||
* BaseQueryFunction, | ||
* SelectedResult | ||
* > = (state) => ({ posts: state.data?.posts ?? EMPTY_ARRAY }) | ||
* | ||
* function PostsList() { | ||
* const { posts } = useGetPostsQuery(undefined, { | ||
* selectFromResult: typedSelectFromResult, | ||
* }) | ||
* | ||
* return ( | ||
* <div> | ||
* <ul> | ||
* {posts.map((post) => ( | ||
* <PostById key={post.id} id={post.id} /> | ||
* ))} | ||
* </ul> | ||
* </div> | ||
* ) | ||
* } | ||
* ``` | ||
* | ||
* @template ResultType - The type of the result `data` returned by the query. | ||
* @template QueryArgumentType - The type of the argument passed into the query. | ||
* @template BaseQueryFunctionType - The type of the base query function being used. | ||
* @template SelectedResultType - The type of the selected result returned by the __`selectFromResult`__ function. | ||
* | ||
* @since 2.7.9 | ||
* @public | ||
*/ | ||
type TypedQueryStateSelector<ResultType, QueryArgumentType, BaseQueryFunctionType extends BaseQueryFn, SelectedResultType extends Record<string, any> = UseQueryStateDefaultResult<QueryDefinition<QueryArgumentType, BaseQueryFunctionType, string, ResultType, string>>> = QueryStateSelector<SelectedResultType, QueryDefinition<QueryArgumentType, BaseQueryFunctionType, string, ResultType, string>>; | ||
/** | ||
* A React hook that reads the request status and cached data from the Redux store. The component will re-render as the loading status changes and the data becomes available. | ||
@@ -567,2 +660,2 @@ * | ||
export { ApiProvider, type TypedLazyQueryTrigger, type TypedMutationTrigger, type TypedUseLazyQuery, type TypedUseLazyQuerySubscription, type TypedUseMutation, type TypedUseMutationResult, type TypedUseQuery, type TypedUseQueryHookResult, type TypedUseQueryState, type TypedUseQueryStateOptions, type TypedUseQueryStateResult, type TypedUseQuerySubscription, type TypedUseQuerySubscriptionResult, createApi, reactHooksModule }; | ||
export { ApiProvider, type TypedLazyQueryTrigger, type TypedMutationTrigger, type TypedQueryStateSelector, type TypedUseLazyQuery, type TypedUseLazyQuerySubscription, type TypedUseMutation, type TypedUseMutationResult, type TypedUseQuery, type TypedUseQueryHookResult, type TypedUseQueryState, type TypedUseQueryStateOptions, type TypedUseQueryStateResult, type TypedUseQuerySubscription, type TypedUseQuerySubscriptionResult, createApi, reactHooksModule }; |
{ | ||
"name": "@reduxjs/toolkit", | ||
"version": "2.2.8", | ||
"version": "2.3.0", | ||
"description": "The official, opinionated, batteries-included toolset for efficient Redux development", | ||
@@ -5,0 +5,0 @@ "author": "Mark Erikson <mark@isquaredsoftware.com>", |
@@ -66,2 +66,5 @@ import type { ThunkDispatch } from '@reduxjs/toolkit' | ||
/** | ||
* @public | ||
*/ | ||
export type BaseQueryResult<BaseQuery extends BaseQueryFn> = | ||
@@ -74,2 +77,5 @@ UnwrapPromise<ReturnType<BaseQuery>> extends infer Unwrapped | ||
/** | ||
* @public | ||
*/ | ||
export type BaseQueryMeta<BaseQuery extends BaseQueryFn> = UnwrapPromise< | ||
@@ -79,2 +85,5 @@ ReturnType<BaseQuery> | ||
/** | ||
* @public | ||
*/ | ||
export type BaseQueryError<BaseQuery extends BaseQueryFn> = Exclude< | ||
@@ -85,6 +94,12 @@ UnwrapPromise<ReturnType<BaseQuery>>, | ||
/** | ||
* @public | ||
*/ | ||
export type BaseQueryArg<T extends (arg: any, ...args: any[]) => any> = | ||
T extends (arg: infer A, ...args: any[]) => any ? A : any | ||
/** | ||
* @public | ||
*/ | ||
export type BaseQueryExtraOptions<BaseQuery extends BaseQueryFn> = | ||
Parameters<BaseQuery>[2] |
@@ -47,3 +47,4 @@ import type { QueryDefinition } from '../../endpointDefinitions' | ||
}) => { | ||
const { removeQueryResult, unsubscribeQueryResult } = api.internalActions | ||
const { removeQueryResult, unsubscribeQueryResult, cacheEntriesUpserted } = | ||
api.internalActions | ||
@@ -54,2 +55,3 @@ const canTriggerUnsubscribe = isAnyOf( | ||
queryThunk.rejected, | ||
cacheEntriesUpserted.match, | ||
) | ||
@@ -71,12 +73,23 @@ | ||
const state = mwApi.getState()[reducerPath] | ||
const { queryCacheKey } = unsubscribeQueryResult.match(action) | ||
? action.payload | ||
: action.meta.arg | ||
let queryCacheKeys: QueryCacheKey[] | ||
handleUnsubscribe( | ||
queryCacheKey, | ||
state.queries[queryCacheKey]?.endpointName, | ||
mwApi, | ||
state.config, | ||
) | ||
if (cacheEntriesUpserted.match(action)) { | ||
queryCacheKeys = action.payload.map( | ||
(entry) => entry.queryDescription.queryCacheKey, | ||
) | ||
} else { | ||
const { queryCacheKey } = unsubscribeQueryResult.match(action) | ||
? action.payload | ||
: action.meta.arg | ||
queryCacheKeys = [queryCacheKey] | ||
} | ||
for (const queryCacheKey of queryCacheKeys) { | ||
handleUnsubscribe( | ||
queryCacheKey, | ||
state.queries[queryCacheKey]?.endpointName, | ||
mwApi, | ||
state.config, | ||
) | ||
} | ||
} | ||
@@ -138,2 +151,3 @@ | ||
} | ||
currentRemovalTimeouts[queryCacheKey] = setTimeout(() => { | ||
@@ -140,0 +154,0 @@ if (!anySubscriptionsRemainingForKey(queryCacheKey)) { |
@@ -186,2 +186,26 @@ import type { ThunkDispatch, UnknownAction } from '@reduxjs/toolkit' | ||
function resolveLifecycleEntry( | ||
cacheKey: string, | ||
data: unknown, | ||
meta: unknown, | ||
) { | ||
const lifecycle = lifecycleMap[cacheKey] | ||
if (lifecycle?.valueResolved) { | ||
lifecycle.valueResolved({ | ||
data, | ||
meta, | ||
}) | ||
delete lifecycle.valueResolved | ||
} | ||
} | ||
function removeLifecycleEntry(cacheKey: string) { | ||
const lifecycle = lifecycleMap[cacheKey] | ||
if (lifecycle) { | ||
delete lifecycleMap[cacheKey] | ||
lifecycle.cacheEntryRemoved() | ||
} | ||
} | ||
const handler: ApiMiddlewareInternalHandler = ( | ||
@@ -194,13 +218,33 @@ action, | ||
if (queryThunk.pending.match(action)) { | ||
function checkForNewCacheKey( | ||
endpointName: string, | ||
cacheKey: string, | ||
requestId: string, | ||
originalArgs: unknown, | ||
) { | ||
const oldState = stateBefore[reducerPath].queries[cacheKey] | ||
const state = mwApi.getState()[reducerPath].queries[cacheKey] | ||
if (!oldState && state) { | ||
handleNewKey( | ||
action.meta.arg.endpointName, | ||
action.meta.arg.originalArgs, | ||
cacheKey, | ||
mwApi, | ||
handleNewKey(endpointName, originalArgs, cacheKey, mwApi, requestId) | ||
} | ||
} | ||
if (queryThunk.pending.match(action)) { | ||
checkForNewCacheKey( | ||
action.meta.arg.endpointName, | ||
cacheKey, | ||
action.meta.requestId, | ||
action.meta.arg.originalArgs, | ||
) | ||
} else if (api.internalActions.cacheEntriesUpserted.match(action)) { | ||
for (const { queryDescription, value } of action.payload) { | ||
const { endpointName, originalArgs, queryCacheKey } = queryDescription | ||
checkForNewCacheKey( | ||
endpointName, | ||
queryCacheKey, | ||
action.meta.requestId, | ||
originalArgs, | ||
) | ||
resolveLifecycleEntry(queryCacheKey, value, {}) | ||
} | ||
@@ -219,10 +263,3 @@ } else if (mutationThunk.pending.match(action)) { | ||
} else if (isFulfilledThunk(action)) { | ||
const lifecycle = lifecycleMap[cacheKey] | ||
if (lifecycle?.valueResolved) { | ||
lifecycle.valueResolved({ | ||
data: action.payload, | ||
meta: action.meta.baseQueryMeta, | ||
}) | ||
delete lifecycle.valueResolved | ||
} | ||
resolveLifecycleEntry(cacheKey, action.payload, action.meta.baseQueryMeta) | ||
} else if ( | ||
@@ -232,11 +269,6 @@ api.internalActions.removeQueryResult.match(action) || | ||
) { | ||
const lifecycle = lifecycleMap[cacheKey] | ||
if (lifecycle) { | ||
delete lifecycleMap[cacheKey] | ||
lifecycle.cacheEntryRemoved() | ||
} | ||
removeLifecycleEntry(cacheKey) | ||
} else if (api.util.resetApiState.match(action)) { | ||
for (const [cacheKey, lifecycle] of Object.entries(lifecycleMap)) { | ||
delete lifecycleMap[cacheKey] | ||
lifecycle.cacheEntryRemoved() | ||
for (const cacheKey of Object.keys(lifecycleMap)) { | ||
removeLifecycleEntry(cacheKey) | ||
} | ||
@@ -243,0 +275,0 @@ } |
@@ -11,2 +11,4 @@ import type { Action, PayloadAction, UnknownAction } from '@reduxjs/toolkit' | ||
prepareAutoBatched, | ||
SHOULD_AUTOBATCH, | ||
nanoid, | ||
} from './rtkImports' | ||
@@ -25,11 +27,20 @@ import type { | ||
ConfigState, | ||
QueryKeys, | ||
} from './apiState' | ||
import { QueryStatus } from './apiState' | ||
import type { MutationThunk, QueryThunk, RejectedAction } from './buildThunks' | ||
import type { | ||
MutationThunk, | ||
QueryThunk, | ||
QueryThunkArg, | ||
RejectedAction, | ||
} from './buildThunks' | ||
import { calculateProvidedByThunk } from './buildThunks' | ||
import type { | ||
AssertTagTypes, | ||
DefinitionType, | ||
EndpointDefinitions, | ||
FullTagDescription, | ||
QueryArgFrom, | ||
QueryDefinition, | ||
ResultTypeFrom, | ||
} from '../endpointDefinitions' | ||
@@ -47,3 +58,46 @@ import type { Patch } from 'immer' | ||
import { isUpsertQuery } from './buildInitiate' | ||
import type { InternalSerializeQueryArgs } from '../defaultSerializeQueryArgs' | ||
/** | ||
* A typesafe single entry to be upserted into the cache | ||
*/ | ||
export type NormalizedQueryUpsertEntry< | ||
Definitions extends EndpointDefinitions, | ||
EndpointName extends QueryKeys<Definitions>, | ||
> = { | ||
endpointName: EndpointName | ||
arg: QueryArgFrom<Definitions[EndpointName]> | ||
value: ResultTypeFrom<Definitions[EndpointName]> | ||
} | ||
/** | ||
* The internal version that is not typesafe since we can't carry the generics through `createSlice` | ||
*/ | ||
type NormalizedQueryUpsertEntryPayload = { | ||
endpointName: string | ||
arg: unknown | ||
value: unknown | ||
} | ||
export type ProcessedQueryUpsertEntry = { | ||
queryDescription: QueryThunkArg | ||
value: unknown | ||
} | ||
/** | ||
* A typesafe representation of a util action creator that accepts cache entry descriptions to upsert | ||
*/ | ||
export type UpsertEntries<Definitions extends EndpointDefinitions> = < | ||
EndpointNames extends Array<QueryKeys<Definitions>>, | ||
>( | ||
entries: [ | ||
...{ | ||
[I in keyof EndpointNames]: NormalizedQueryUpsertEntry< | ||
Definitions, | ||
EndpointNames[I] | ||
> | ||
}, | ||
], | ||
) => PayloadAction<NormalizedQueryUpsertEntryPayload[]> | ||
function updateQuerySubstateIfExists( | ||
@@ -98,2 +152,3 @@ state: QueryState<any>, | ||
mutationThunk, | ||
serializeQueryArgs, | ||
context: { | ||
@@ -111,2 +166,3 @@ endpointDefinitions: definitions, | ||
mutationThunk: MutationThunk | ||
serializeQueryArgs: InternalSerializeQueryArgs | ||
context: ApiContext<EndpointDefinitions> | ||
@@ -120,2 +176,96 @@ assertTagType: AssertTagTypes | ||
const resetApiState = createAction(`${reducerPath}/resetApiState`) | ||
function writePendingCacheEntry( | ||
draft: QueryState<any>, | ||
arg: QueryThunkArg, | ||
upserting: boolean, | ||
meta: { | ||
arg: QueryThunkArg | ||
requestId: string | ||
// requestStatus: 'pending' | ||
} & { startedTimeStamp: number }, | ||
) { | ||
draft[arg.queryCacheKey] ??= { | ||
status: QueryStatus.uninitialized, | ||
endpointName: arg.endpointName, | ||
} | ||
updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => { | ||
substate.status = QueryStatus.pending | ||
substate.requestId = | ||
upserting && substate.requestId | ||
? // for `upsertQuery` **updates**, keep the current `requestId` | ||
substate.requestId | ||
: // for normal queries or `upsertQuery` **inserts** always update the `requestId` | ||
meta.requestId | ||
if (arg.originalArgs !== undefined) { | ||
substate.originalArgs = arg.originalArgs | ||
} | ||
substate.startedTimeStamp = meta.startedTimeStamp | ||
}) | ||
} | ||
function writeFulfilledCacheEntry( | ||
draft: QueryState<any>, | ||
meta: { | ||
arg: QueryThunkArg | ||
requestId: string | ||
// requestStatus: 'fulfilled' | ||
} & { | ||
fulfilledTimeStamp: number | ||
baseQueryMeta: unknown | ||
// RTK_autoBatch: true | ||
}, | ||
payload: unknown, | ||
) { | ||
updateQuerySubstateIfExists(draft, meta.arg.queryCacheKey, (substate) => { | ||
if (substate.requestId !== meta.requestId && !isUpsertQuery(meta.arg)) | ||
return | ||
const { merge } = definitions[meta.arg.endpointName] as QueryDefinition< | ||
any, | ||
any, | ||
any, | ||
any | ||
> | ||
substate.status = QueryStatus.fulfilled | ||
if (merge) { | ||
if (substate.data !== undefined) { | ||
const { fulfilledTimeStamp, arg, baseQueryMeta, requestId } = meta | ||
// There's existing cache data. Let the user merge it in themselves. | ||
// We're already inside an Immer-powered reducer, and the user could just mutate `substate.data` | ||
// themselves inside of `merge()`. But, they might also want to return a new value. | ||
// Try to let Immer figure that part out, save the result, and assign it to `substate.data`. | ||
let newData = createNextState(substate.data, (draftSubstateData) => { | ||
// As usual with Immer, you can mutate _or_ return inside here, but not both | ||
return merge(draftSubstateData, payload, { | ||
arg: arg.originalArgs, | ||
baseQueryMeta, | ||
fulfilledTimeStamp, | ||
requestId, | ||
}) | ||
}) | ||
substate.data = newData | ||
} else { | ||
// Presumably a fresh request. Just cache the response data. | ||
substate.data = payload | ||
} | ||
} else { | ||
// Assign or safely update the cache data. | ||
substate.data = | ||
definitions[meta.arg.endpointName].structuralSharing ?? true | ||
? copyWithStructuralSharing( | ||
isDraft(substate.data) | ||
? original(substate.data) | ||
: substate.data, | ||
payload, | ||
) | ||
: payload | ||
} | ||
delete substate.error | ||
substate.fulfilledTimeStamp = meta.fulfilledTimeStamp | ||
}) | ||
} | ||
const querySlice = createSlice({ | ||
@@ -136,2 +286,65 @@ name: `${reducerPath}/queries`, | ||
}, | ||
cacheEntriesUpserted: { | ||
reducer( | ||
draft, | ||
action: PayloadAction< | ||
ProcessedQueryUpsertEntry[], | ||
string, | ||
{ | ||
RTK_autoBatch: boolean | ||
requestId: string | ||
timestamp: number | ||
} | ||
>, | ||
) { | ||
for (const entry of action.payload) { | ||
const { queryDescription: arg, value } = entry | ||
writePendingCacheEntry(draft, arg, true, { | ||
arg, | ||
requestId: action.meta.requestId, | ||
startedTimeStamp: action.meta.timestamp, | ||
}) | ||
writeFulfilledCacheEntry( | ||
draft, | ||
{ | ||
arg, | ||
requestId: action.meta.requestId, | ||
fulfilledTimeStamp: action.meta.timestamp, | ||
baseQueryMeta: {}, | ||
}, | ||
value, | ||
) | ||
} | ||
}, | ||
prepare: (payload: NormalizedQueryUpsertEntryPayload[]) => { | ||
const queryDescriptions: ProcessedQueryUpsertEntry[] = payload.map( | ||
(entry) => { | ||
const { endpointName, arg, value } = entry | ||
const endpointDefinition = definitions[endpointName] | ||
const queryDescription: QueryThunkArg = { | ||
type: 'query', | ||
endpointName: endpointName, | ||
originalArgs: entry.arg, | ||
queryCacheKey: serializeQueryArgs({ | ||
queryArgs: arg, | ||
endpointDefinition, | ||
endpointName, | ||
}), | ||
} | ||
return { queryDescription, value } | ||
}, | ||
) | ||
const result = { | ||
payload: queryDescriptions, | ||
meta: { | ||
[SHOULD_AUTOBATCH]: true, | ||
requestId: nanoid(), | ||
timestamp: Date.now(), | ||
}, | ||
} | ||
return result | ||
}, | ||
}, | ||
queryResultPatched: { | ||
@@ -159,79 +372,6 @@ reducer( | ||
const upserting = isUpsertQuery(arg) | ||
draft[arg.queryCacheKey] ??= { | ||
status: QueryStatus.uninitialized, | ||
endpointName: arg.endpointName, | ||
} | ||
updateQuerySubstateIfExists(draft, arg.queryCacheKey, (substate) => { | ||
substate.status = QueryStatus.pending | ||
substate.requestId = | ||
upserting && substate.requestId | ||
? // for `upsertQuery` **updates**, keep the current `requestId` | ||
substate.requestId | ||
: // for normal queries or `upsertQuery` **inserts** always update the `requestId` | ||
meta.requestId | ||
if (arg.originalArgs !== undefined) { | ||
substate.originalArgs = arg.originalArgs | ||
} | ||
substate.startedTimeStamp = meta.startedTimeStamp | ||
}) | ||
writePendingCacheEntry(draft, arg, upserting, meta) | ||
}) | ||
.addCase(queryThunk.fulfilled, (draft, { meta, payload }) => { | ||
updateQuerySubstateIfExists( | ||
draft, | ||
meta.arg.queryCacheKey, | ||
(substate) => { | ||
if ( | ||
substate.requestId !== meta.requestId && | ||
!isUpsertQuery(meta.arg) | ||
) | ||
return | ||
const { merge } = definitions[ | ||
meta.arg.endpointName | ||
] as QueryDefinition<any, any, any, any> | ||
substate.status = QueryStatus.fulfilled | ||
if (merge) { | ||
if (substate.data !== undefined) { | ||
const { fulfilledTimeStamp, arg, baseQueryMeta, requestId } = | ||
meta | ||
// There's existing cache data. Let the user merge it in themselves. | ||
// We're already inside an Immer-powered reducer, and the user could just mutate `substate.data` | ||
// themselves inside of `merge()`. But, they might also want to return a new value. | ||
// Try to let Immer figure that part out, save the result, and assign it to `substate.data`. | ||
let newData = createNextState( | ||
substate.data, | ||
(draftSubstateData) => { | ||
// As usual with Immer, you can mutate _or_ return inside here, but not both | ||
return merge(draftSubstateData, payload, { | ||
arg: arg.originalArgs, | ||
baseQueryMeta, | ||
fulfilledTimeStamp, | ||
requestId, | ||
}) | ||
}, | ||
) | ||
substate.data = newData | ||
} else { | ||
// Presumably a fresh request. Just cache the response data. | ||
substate.data = payload | ||
} | ||
} else { | ||
// Assign or safely update the cache data. | ||
substate.data = | ||
definitions[meta.arg.endpointName].structuralSharing ?? true | ||
? copyWithStructuralSharing( | ||
isDraft(substate.data) | ||
? original(substate.data) | ||
: substate.data, | ||
payload, | ||
) | ||
: payload | ||
} | ||
delete substate.error | ||
substate.fulfilledTimeStamp = meta.fulfilledTimeStamp | ||
}, | ||
) | ||
writeFulfilledCacheEntry(draft, meta, payload) | ||
}) | ||
@@ -238,0 +378,0 @@ .addCase( |
@@ -159,3 +159,3 @@ import type { | ||
endpointName: EndpointName, | ||
args: QueryArgFrom<Definitions[EndpointName]>, | ||
arg: QueryArgFrom<Definitions[EndpointName]>, | ||
patches: readonly Patch[], | ||
@@ -170,3 +170,3 @@ updateProvided?: boolean, | ||
endpointName: EndpointName, | ||
args: QueryArgFrom<Definitions[EndpointName]>, | ||
arg: QueryArgFrom<Definitions[EndpointName]>, | ||
updateRecipe: Recipe<ResultTypeFrom<Definitions[EndpointName]>>, | ||
@@ -181,3 +181,3 @@ updateProvided?: boolean, | ||
endpointName: EndpointName, | ||
args: QueryArgFrom<Definitions[EndpointName]>, | ||
arg: QueryArgFrom<Definitions[EndpointName]>, | ||
value: ResultTypeFrom<Definitions[EndpointName]>, | ||
@@ -235,7 +235,7 @@ ) => ThunkAction< | ||
const patchQueryData: PatchQueryDataThunk<EndpointDefinitions, State> = | ||
(endpointName, args, patches, updateProvided) => (dispatch, getState) => { | ||
(endpointName, arg, patches, updateProvided) => (dispatch, getState) => { | ||
const endpointDefinition = endpointDefinitions[endpointName] | ||
const queryCacheKey = serializeQueryArgs({ | ||
queryArgs: args, | ||
queryArgs: arg, | ||
endpointDefinition, | ||
@@ -253,3 +253,3 @@ endpointName, | ||
const newValue = api.endpoints[endpointName].select(args)( | ||
const newValue = api.endpoints[endpointName].select(arg)( | ||
// Work around TS 4.1 mismatch | ||
@@ -263,3 +263,3 @@ getState() as RootState<any, any, any>, | ||
undefined, | ||
args, | ||
arg, | ||
{}, | ||
@@ -275,7 +275,7 @@ assertTagType, | ||
const updateQueryData: UpdateQueryDataThunk<EndpointDefinitions, State> = | ||
(endpointName, args, updateRecipe, updateProvided = true) => | ||
(endpointName, arg, updateRecipe, updateProvided = true) => | ||
(dispatch, getState) => { | ||
const endpointDefinition = api.endpoints[endpointName] | ||
const currentState = endpointDefinition.select(args)( | ||
const currentState = endpointDefinition.select(arg)( | ||
// Work around TS 4.1 mismatch | ||
@@ -292,3 +292,3 @@ getState() as RootState<any, any, any>, | ||
endpointName, | ||
args, | ||
arg, | ||
ret.inversePatches, | ||
@@ -328,8 +328,3 @@ updateProvided, | ||
dispatch( | ||
api.util.patchQueryData( | ||
endpointName, | ||
args, | ||
ret.patches, | ||
updateProvided, | ||
), | ||
api.util.patchQueryData(endpointName, arg, ret.patches, updateProvided), | ||
) | ||
@@ -341,3 +336,3 @@ | ||
const upsertQueryData: UpsertQueryDataThunk<Definitions, State> = | ||
(endpointName, args, value) => (dispatch) => { | ||
(endpointName, arg, value) => (dispatch) => { | ||
return dispatch( | ||
@@ -349,3 +344,3 @@ ( | ||
> | ||
).initiate(args, { | ||
).initiate(arg, { | ||
subscribe: false, | ||
@@ -352,0 +347,0 @@ forceRefetch: true, |
@@ -50,3 +50,3 @@ /** | ||
import { buildSelectors } from './buildSelectors' | ||
import type { SliceActions } from './buildSlice' | ||
import type { SliceActions, UpsertEntries } from './buildSlice' | ||
import { buildSlice } from './buildSlice' | ||
@@ -154,3 +154,3 @@ import type { | ||
* A thunk that (if dispatched) will return a specific running query, identified | ||
* by `endpointName` and `args`. | ||
* by `endpointName` and `arg`. | ||
* If that query is not running, dispatching the thunk will result in `undefined`. | ||
@@ -165,3 +165,3 @@ * | ||
endpointName: EndpointName, | ||
args: QueryArgFrom<Definitions[EndpointName]>, | ||
arg: QueryArgFrom<Definitions[EndpointName]>, | ||
): ThunkWithReturnValue< | ||
@@ -243,5 +243,5 @@ | QueryActionCreatorResult< | ||
* | ||
* This is typically used as the first step in implementing optimistic updates. The generated `inversePatches` can be used to revert the updates by calling `dispatch(patchQueryData(endpointName, args, inversePatches))`. Alternatively, the `undo` method can be called directly to achieve the same effect. | ||
* This is typically used as the first step in implementing optimistic updates. The generated `inversePatches` can be used to revert the updates by calling `dispatch(patchQueryData(endpointName, arg, inversePatches))`. Alternatively, the `undo` method can be called directly to achieve the same effect. | ||
* | ||
* Note that the first two arguments (`endpointName` and `args`) are used to determine which existing cache entry to update. If no existing cache entry is found, the `updateRecipe` callback will not run. | ||
* Note that the first two arguments (`endpointName` and `arg`) are used to determine which existing cache entry to update. If no existing cache entry is found, the `updateRecipe` callback will not run. | ||
* | ||
@@ -327,2 +327,5 @@ * @example | ||
resetApiState: SliceActions['resetApiState'] | ||
upsertQueryEntries: UpsertEntries<Definitions> | ||
/** | ||
@@ -535,2 +538,3 @@ * A Redux action creator that can be used to manually invalidate cache tags for [automated re-fetching](../../usage/automated-refetching.mdx). | ||
mutationThunk, | ||
serializeQueryArgs, | ||
reducerPath, | ||
@@ -554,2 +558,3 @@ assertTagType, | ||
resetApiState: sliceActions.resetApiState, | ||
upsertQueryEntries: sliceActions.cacheEntriesUpserted as any, | ||
}) | ||
@@ -556,0 +561,0 @@ safeAssign(api.internalActions, sliceActions) |
@@ -95,3 +95,3 @@ import type { Api, ApiContext, Module, ModuleName } from './apiTypes' | ||
*/ | ||
serializeQueryArgs?: SerializeQueryArgs<BaseQueryArg<BaseQuery>> | ||
serializeQueryArgs?: SerializeQueryArgs<unknown> | ||
/** | ||
@@ -98,0 +98,0 @@ * Endpoints are just a set of operations that you want to perform against your server. You define them as an object using the builder syntax. There are two basic endpoint types: [`query`](../../rtk-query/usage/queries) and [`mutation`](../../rtk-query/usage/mutations). |
@@ -234,2 +234,6 @@ import type { Api } from '@reduxjs/toolkit/query' | ||
export type TagDescription<TagType> = TagType | FullTagDescription<TagType> | ||
/** | ||
* @public | ||
*/ | ||
export type ResultDescription< | ||
@@ -236,0 +240,0 @@ TagTypes extends string, |
@@ -115,3 +115,3 @@ import { joinUrls } from './utils' | ||
'getState' | 'extra' | 'endpoint' | 'type' | 'forced' | ||
>, | ||
> & { arg: string | FetchArgs; extraOptions: unknown }, | ||
) => MaybePromise<Headers | void> | ||
@@ -168,5 +168,5 @@ fetchFn?: ( | ||
* | ||
* @param {(headers: Headers, api: { getState: () => unknown; extra: unknown; endpoint: string; type: 'query' | 'mutation'; forced: boolean; }) => Headers} prepareHeaders | ||
* @param {(headers: Headers, api: { getState: () => unknown; arg: string | FetchArgs; extra: unknown; endpoint: string; type: 'query' | 'mutation'; forced: boolean; }) => Headers} prepareHeaders | ||
* An optional function that can be used to inject headers on requests. | ||
* Provides a Headers object, as well as most of the `BaseQueryApi` (`dispatch` is not available). | ||
* Provides a Headers object, most of the `BaseQueryApi` (`dispatch` is not available), and the arg passed into the query function. | ||
* Useful for setting authentication or headers that need to be set conditionally. | ||
@@ -193,2 +193,3 @@ * | ||
*/ | ||
export function fetchBaseQuery({ | ||
@@ -218,3 +219,3 @@ baseUrl, | ||
} | ||
return async (arg, api) => { | ||
return async (arg, api, extraOptions) => { | ||
const { getState, extra, endpoint, forced, type } = api | ||
@@ -232,3 +233,4 @@ let meta: FetchBaseQueryMeta | undefined | ||
let abortController: AbortController | undefined, signal = api.signal | ||
let abortController: AbortController | undefined, | ||
signal = api.signal | ||
if (timeout) { | ||
@@ -250,2 +252,3 @@ abortController = new AbortController() | ||
getState, | ||
arg, | ||
extra, | ||
@@ -255,2 +258,3 @@ endpoint, | ||
type, | ||
extraOptions, | ||
})) || headers | ||
@@ -307,3 +311,6 @@ | ||
if (timeoutId) clearTimeout(timeoutId) | ||
abortController?.signal.removeEventListener('abort', abortController.abort) | ||
abortController?.signal.removeEventListener( | ||
'abort', | ||
abortController.abort, | ||
) | ||
} | ||
@@ -310,0 +317,0 @@ const responseClone = response.clone() |
@@ -18,5 +18,10 @@ // This must remain here so that the `mangleErrors.cjs` build script | ||
BaseQueryApi, | ||
BaseQueryArg, | ||
BaseQueryEnhancer, | ||
BaseQueryError, | ||
BaseQueryExtraOptions, | ||
BaseQueryFn, | ||
QueryReturnValue | ||
BaseQueryMeta, | ||
BaseQueryResult, | ||
QueryReturnValue, | ||
} from './baseQueryTypes' | ||
@@ -38,2 +43,3 @@ export type { | ||
OverrideResultType, | ||
ResultDescription, | ||
TagTypesFromApi, | ||
@@ -40,0 +46,0 @@ UpdateDefinitions, |
@@ -330,2 +330,5 @@ import type { | ||
/** | ||
* @internal | ||
*/ | ||
export type QueryStateSelector< | ||
@@ -337,2 +340,115 @@ R extends Record<string, any>, | ||
/** | ||
* Provides a way to define a strongly-typed version of | ||
* {@linkcode QueryStateSelector} for use with a specific query. | ||
* This is useful for scenarios where you want to create a "pre-typed" | ||
* {@linkcode UseQueryStateOptions.selectFromResult | selectFromResult} | ||
* function. | ||
* | ||
* @example | ||
* <caption>#### __Create a strongly-typed `selectFromResult` selector function__</caption> | ||
* | ||
* ```tsx | ||
* import type { TypedQueryStateSelector } from '@reduxjs/toolkit/query/react' | ||
* import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react' | ||
* | ||
* type Post = { | ||
* id: number | ||
* title: string | ||
* } | ||
* | ||
* type PostsApiResponse = { | ||
* posts: Post[] | ||
* total: number | ||
* skip: number | ||
* limit: number | ||
* } | ||
* | ||
* type QueryArgument = number | undefined | ||
* | ||
* type BaseQueryFunction = ReturnType<typeof fetchBaseQuery> | ||
* | ||
* type SelectedResult = Pick<PostsApiResponse, 'posts'> | ||
* | ||
* const postsApiSlice = createApi({ | ||
* baseQuery: fetchBaseQuery({ baseUrl: 'https://dummyjson.com/posts' }), | ||
* reducerPath: 'postsApi', | ||
* tagTypes: ['Posts'], | ||
* endpoints: (build) => ({ | ||
* getPosts: build.query<PostsApiResponse, QueryArgument>({ | ||
* query: (limit = 5) => `?limit=${limit}&select=title`, | ||
* }), | ||
* }), | ||
* }) | ||
* | ||
* const { useGetPostsQuery } = postsApiSlice | ||
* | ||
* function PostById({ id }: { id: number }) { | ||
* const { post } = useGetPostsQuery(undefined, { | ||
* selectFromResult: (state) => ({ | ||
* post: state.data?.posts.find((post) => post.id === id), | ||
* }), | ||
* }) | ||
* | ||
* return <li>{post?.title}</li> | ||
* } | ||
* | ||
* const EMPTY_ARRAY: Post[] = [] | ||
* | ||
* const typedSelectFromResult: TypedQueryStateSelector< | ||
* PostsApiResponse, | ||
* QueryArgument, | ||
* BaseQueryFunction, | ||
* SelectedResult | ||
* > = (state) => ({ posts: state.data?.posts ?? EMPTY_ARRAY }) | ||
* | ||
* function PostsList() { | ||
* const { posts } = useGetPostsQuery(undefined, { | ||
* selectFromResult: typedSelectFromResult, | ||
* }) | ||
* | ||
* return ( | ||
* <div> | ||
* <ul> | ||
* {posts.map((post) => ( | ||
* <PostById key={post.id} id={post.id} /> | ||
* ))} | ||
* </ul> | ||
* </div> | ||
* ) | ||
* } | ||
* ``` | ||
* | ||
* @template ResultType - The type of the result `data` returned by the query. | ||
* @template QueryArgumentType - The type of the argument passed into the query. | ||
* @template BaseQueryFunctionType - The type of the base query function being used. | ||
* @template SelectedResultType - The type of the selected result returned by the __`selectFromResult`__ function. | ||
* | ||
* @since 2.7.9 | ||
* @public | ||
*/ | ||
export type TypedQueryStateSelector< | ||
ResultType, | ||
QueryArgumentType, | ||
BaseQueryFunctionType extends BaseQueryFn, | ||
SelectedResultType extends Record<string, any> = UseQueryStateDefaultResult< | ||
QueryDefinition< | ||
QueryArgumentType, | ||
BaseQueryFunctionType, | ||
string, | ||
ResultType, | ||
string | ||
> | ||
>, | ||
> = QueryStateSelector< | ||
SelectedResultType, | ||
QueryDefinition< | ||
QueryArgumentType, | ||
BaseQueryFunctionType, | ||
string, | ||
ResultType, | ||
string | ||
> | ||
> | ||
/** | ||
* A React hook that reads the request status and cached data from the Redux store. The component will re-render as the loading status changes and the data becomes available. | ||
@@ -339,0 +455,0 @@ * |
@@ -25,2 +25,3 @@ // This must remain here so that the `mangleErrors.cjs` build script | ||
TypedMutationTrigger, | ||
TypedQueryStateSelector, | ||
TypedUseQueryState, | ||
@@ -27,0 +28,0 @@ TypedUseQuery, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
5539089
64772