@frontegg/redux-store
Advanced tools
Comparing version 2.1.1-alpha.786143057 to 2.2.0-alpha.788357573
@@ -1,360 +0,3 @@ | ||
import { c as createAction, i as auditsStoreName, b as createSlice, t as takeEvery$1, s as select$1, p as put, d as call, f as all } from '../redux-saga-effects.esm-6ff02caf.js'; | ||
export { i as auditsStoreName } from '../redux-saga-effects.esm-6ff02caf.js'; | ||
import { api } from '@frontegg/rest-api'; | ||
const typeReducerForKey = (key) => ({ | ||
prepare: (payload) => ({ payload }), | ||
reducer: (state, { payload }) => { | ||
return Object.assign(Object.assign({}, state), { [key]: Object.assign(Object.assign({}, state[key]), payload) }); | ||
}, | ||
}); | ||
const auditLogsState = { | ||
loading: true, | ||
pageOffset: 0, | ||
pageSize: 20, | ||
logs: [], | ||
sort: [], | ||
filter: [], | ||
totalPages: 0, | ||
isDownloadingCsv: false, | ||
isDownloadingPdf: false, | ||
}; | ||
const reducers = { | ||
setAuditLogsState: typeReducerForKey('auditLogsState'), | ||
resetAuditLogsState: (state) => (Object.assign(Object.assign({}, state), { auditLogsState })), | ||
}; | ||
const actions = { | ||
exportAuditsCsv: createAction(`${auditsStoreName}/exportAuditsCsv`), | ||
exportAuditsPdf: createAction(`${auditsStoreName}/exportAuditsPdf`), | ||
loadAuditLogs: createAction(`${auditsStoreName}/loadAuditLogs`, (payload) => ({ payload })), | ||
}; | ||
const auditsMetadataState = { | ||
loading: true, | ||
}; | ||
const reducers$1 = { | ||
setAuditsMetadataState: typeReducerForKey('auditsMetadataState'), | ||
resetAuditsMetadataState: (state) => (Object.assign(Object.assign({}, state), { auditsMetadataState })), | ||
}; | ||
const actions$1 = { | ||
loadAuditsMetadata: createAction(`${auditsStoreName}/loadAuditsMetadata`), | ||
}; | ||
const initialState = { | ||
auditLogsState, | ||
auditsMetadataState, | ||
}; | ||
const { reducer, actions: sliceActions } = createSlice({ | ||
name: auditsStoreName, | ||
initialState, | ||
reducers: Object.assign(Object.assign({ resetState: (state) => (Object.assign(Object.assign({}, state), initialState)), setState: (state, { payload }) => (Object.assign(Object.assign({}, state), payload)) }, reducers), reducers$1), | ||
}); | ||
const actions$2 = Object.assign(Object.assign(Object.assign({}, sliceActions), actions), actions$1); | ||
const randomAction = () => { | ||
const random = Math.floor(Math.random() * 6); | ||
switch (random) { | ||
case 0: | ||
return { | ||
action: 'Updated profile', | ||
description: 'User updated profile details', | ||
severity: 'Info', | ||
}; | ||
case 1: | ||
return { | ||
action: 'User logged in', | ||
severity: 'Info', | ||
description: 'User logged in to the product', | ||
}; | ||
case 2: | ||
return { | ||
action: 'User failed to login', | ||
severity: 'High', | ||
description: 'User failed to login with password', | ||
}; | ||
case 3: | ||
return { | ||
action: 'Added user', | ||
severity: 'Info', | ||
description: 'Added user david+535@frontegg.com to tenant', | ||
}; | ||
case 4: | ||
return { | ||
action: 'Assigned roles', | ||
severity: 'Info', | ||
description: 'Assigned 0 roles for user', | ||
}; | ||
case 5: | ||
return { | ||
action: 'Removed user david+108@frontegg.com from account', | ||
severity: 'Info', | ||
description: 'Removed access from the account for user david+108@frontegg.com', | ||
}; | ||
} | ||
}; | ||
const auditLogsDataDemo = { | ||
total: 20, | ||
data: Array.from(Array(10).keys()).map((i) => (Object.assign(Object.assign({ ip: '198.143.51.1', email: 'user@frontegg.com' }, randomAction()), { json: { key1: 'value1', key2: 'value2', key3: 'value3', key4: 'value4' }, tenantId: 'my-tenant-id', vendorId: 'my-vendor-id', userAgent: 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36', createdAt: `2021-04-07 10:${50 - i}:40.201` }))), | ||
}; | ||
const auditLogsMetadataDemo = { | ||
properties: [ | ||
{ | ||
showInTable: true, | ||
name: 'createdAt', | ||
displayName: 'Time', | ||
type: 'Timestamp', | ||
sortable: true, | ||
filterable: true, | ||
showInMoreInfo: 'Always', | ||
chosen: false, | ||
selected: false, | ||
isPredefined: false, | ||
}, | ||
{ | ||
showInTable: true, | ||
name: 'email', | ||
displayName: 'User', | ||
type: 'Email', | ||
filterable: true, | ||
sortable: true, | ||
isPredefined: true, | ||
chosen: false, | ||
selected: false, | ||
}, | ||
{ | ||
showInTable: true, | ||
name: 'action', | ||
displayName: 'Action', | ||
type: 'AlphaNumeric', | ||
sortable: true, | ||
filterable: true, | ||
showInMoreInfo: 'Always', | ||
chosen: false, | ||
selected: false, | ||
isPredefined: true, | ||
}, | ||
{ | ||
showInTable: true, | ||
name: 'description', | ||
type: 'AlphaNumeric', | ||
sortable: false, | ||
filterable: false, | ||
displayName: 'Description', | ||
isPredefined: true, | ||
chosen: false, | ||
selected: false, | ||
}, | ||
{ | ||
showInTable: true, | ||
name: 'ip', | ||
displayName: 'IP Address', | ||
type: 'IpAddress', | ||
sortable: true, | ||
filterable: true, | ||
showInMoreInfo: 'Always', | ||
chosen: false, | ||
selected: false, | ||
isPredefined: true, | ||
}, | ||
{ | ||
showInTable: false, | ||
name: 'severity', | ||
displayName: 'Severity', | ||
type: 'Severity', | ||
sortable: true, | ||
filterable: true, | ||
showInMoreInfo: 'Always', | ||
chosen: false, | ||
selected: false, | ||
isPredefined: true, | ||
}, | ||
{ | ||
showInTable: false, | ||
name: 'userAgent', | ||
displayName: 'User Agent', | ||
type: 'AlphaNumeric', | ||
filterable: true, | ||
sortable: true, | ||
isPredefined: true, | ||
chosen: false, | ||
selected: false, | ||
}, | ||
{ | ||
showInTable: false, | ||
name: 'json', | ||
type: 'Json', | ||
sortable: true, | ||
filterable: true, | ||
displayName: 'Json Data', | ||
showInMoreInfo: 'Always', | ||
isPredefined: false, | ||
}, | ||
], | ||
}; | ||
const select = () => select$1((_) => _[auditsStoreName].auditLogsState); | ||
const selectMetadata = () => select$1((_) => _[auditsStoreName].auditsMetadataState); | ||
function* exportAuditsCsv() { | ||
const state = yield select(); | ||
const { columns } = yield selectMetadata(); | ||
try { | ||
const filter = state.filter; | ||
const sort = state.sort; | ||
const sortParams = sort.reduce((p, n) => (Object.assign(Object.assign({}, p), { sortBy: n.id, sortDirection: n.desc ? 'desc' : 'asc' })), {}); | ||
const filterParams = filter.reduce((p, n) => (Object.assign(Object.assign({}, p), { [n.id]: encodeURIComponent(n.value) })), {}); | ||
yield put(actions$2.setAuditLogsState({ isDownloadingCsv: true })); | ||
const outputFileName = `audits.csv`; | ||
yield api.audits.exportAudits(Object.assign(Object.assign({ endpoint: 'csv/v2', headerProps: columns, offset: 0, outputFileName }, sortParams), filterParams)); | ||
} | ||
catch (e) { | ||
console.error('failed to export audits - ', e); | ||
} | ||
yield put(actions$2.setAuditLogsState({ isDownloadingCsv: false })); | ||
} | ||
function* exportAuditsPdf() { | ||
const state = yield select(); | ||
const { columns } = yield selectMetadata(); | ||
try { | ||
const filter = state.filter; | ||
const sort = state.sort; | ||
const sortParams = sort.reduce((p, n) => (Object.assign(Object.assign({}, p), { sortBy: n.id, sortDirection: n.desc ? 'desc' : 'asc' })), {}); | ||
const filterParams = filter.reduce((p, n) => (Object.assign(Object.assign({}, p), { [n.id]: encodeURIComponent(n.value) })), {}); | ||
yield put(actions$2.setAuditLogsState({ isDownloadingPdf: true })); | ||
const outputFileName = `audits.pdf`; | ||
yield api.audits.exportAudits(Object.assign(Object.assign({ endpoint: 'pdf', headerProps: columns, offset: 0, outputFileName }, sortParams), filterParams)); | ||
} | ||
catch (e) { | ||
console.error('failed to export audits - ', e); | ||
} | ||
yield put(actions$2.setAuditLogsState({ isDownloadingPdf: false })); | ||
} | ||
function* loadAuditLogs({ payload }) { | ||
var _a, _b, _c, _d, _e, _f; | ||
yield put(actions$2.setAuditLogsState({ loading: !(payload === null || payload === void 0 ? void 0 : payload.silentLoading), error: null })); | ||
const state = yield select(); | ||
const { columns } = yield selectMetadata(); | ||
try { | ||
const pageSize = (_a = payload.pageSize) !== null && _a !== void 0 ? _a : state.pageSize; | ||
const pageOffset = (_b = payload.pageOffset) !== null && _b !== void 0 ? _b : state.pageOffset; | ||
const filter = (_c = payload.filter) !== null && _c !== void 0 ? _c : state.filter; | ||
const sort = (_d = payload.sort) !== null && _d !== void 0 ? _d : state.sort; | ||
yield put(actions$2.setAuditLogsState({ | ||
pageSize, | ||
pageOffset, | ||
filter, | ||
sort, | ||
})); | ||
const sortParams = sort.reduce((p, n) => (Object.assign(Object.assign({}, p), { sortBy: n.id, sortDirection: n.desc ? 'desc' : 'asc' })), {}); | ||
const filterParams = filter.reduce((p, n) => (Object.assign(Object.assign({}, p), { [n.id]: encodeURIComponent(n.value) })), {}); | ||
if (!columns) { | ||
yield put(actions$2.loadAuditsMetadata()); | ||
} | ||
const { data, total } = yield call(api.audits.getAudits, Object.assign(Object.assign({ offset: pageOffset, count: pageSize }, sortParams), filterParams)); | ||
yield put(actions$2.setAuditLogsState({ | ||
loading: false, | ||
logs: data, | ||
totalPages: +(total / pageSize).toFixed(0), | ||
})); | ||
(_e = payload === null || payload === void 0 ? void 0 : payload.callback) === null || _e === void 0 ? void 0 : _e.call(payload, true); | ||
} | ||
catch (e) { | ||
yield put(actions$2.setAuditLogsState({ loading: false, error: e.message })); | ||
(_f = payload === null || payload === void 0 ? void 0 : payload.callback) === null || _f === void 0 ? void 0 : _f.call(payload, null, e); | ||
} | ||
} | ||
function* auditLogsSagas() { | ||
yield takeEvery$1(actions$2.exportAuditsCsv, exportAuditsCsv); | ||
yield takeEvery$1(actions$2.exportAuditsPdf, exportAuditsPdf); | ||
yield takeEvery$1(actions$2.loadAuditLogs, loadAuditLogs); | ||
} | ||
/********************************* | ||
* Preview Sagas | ||
*********************************/ | ||
function* loadAuditLogsMock({ payload }) { | ||
var _a, _b, _c, _d, _e, _f; | ||
yield put(actions$2.setAuditLogsState({ loading: !(payload === null || payload === void 0 ? void 0 : payload.silentLoading), error: null })); | ||
const state = yield select(); | ||
const { columns } = yield selectMetadata(); | ||
try { | ||
const pageSize = (_a = payload.pageSize) !== null && _a !== void 0 ? _a : state.pageSize; | ||
const pageOffset = (_b = payload.pageOffset) !== null && _b !== void 0 ? _b : state.pageOffset; | ||
const filter = (_c = payload.filter) !== null && _c !== void 0 ? _c : state.filter; | ||
const sort = (_d = payload.sort) !== null && _d !== void 0 ? _d : state.sort; | ||
yield put(actions$2.setAuditLogsState({ | ||
pageSize, | ||
pageOffset, | ||
filter, | ||
sort, | ||
})); | ||
if (!columns) { | ||
yield put(actions$2.loadAuditsMetadata()); | ||
} | ||
const { data, total } = auditLogsDataDemo; | ||
yield put(actions$2.setAuditLogsState({ | ||
loading: false, | ||
logs: data, | ||
totalPages: +(total / pageSize).toFixed(0), | ||
})); | ||
(_e = payload === null || payload === void 0 ? void 0 : payload.callback) === null || _e === void 0 ? void 0 : _e.call(payload, true); | ||
} | ||
catch (e) { | ||
yield put(actions$2.setAuditLogsState({ loading: false, error: e.message })); | ||
(_f = payload === null || payload === void 0 ? void 0 : payload.callback) === null || _f === void 0 ? void 0 : _f.call(payload, null, e); | ||
} | ||
} | ||
function* auditLogsSagasMock() { | ||
yield takeEvery$1(actions$2.loadAuditLogs, loadAuditLogsMock); | ||
} | ||
function* loadAuditsMetadata() { | ||
yield put(actions$2.setAuditsMetadataState({ loading: true, error: null })); | ||
try { | ||
const { properties } = yield call(api.metadata.getAuditsMetadata); | ||
yield put(actions$2.setAuditsMetadataState({ columns: properties, loading: false })); | ||
} | ||
catch (e) { | ||
yield put(actions$2.setAuditsMetadataState({ error: e, loading: false })); | ||
} | ||
} | ||
function* auditsMetadataSagas() { | ||
yield takeEvery$1(actions$2.loadAuditsMetadata, loadAuditsMetadata); | ||
} | ||
/********************************* | ||
* Preview Sagas | ||
*********************************/ | ||
function* loadAuditsMetadataMock() { | ||
yield put(actions$2.setAuditsMetadataState({ loading: true, error: null })); | ||
try { | ||
const { properties } = auditLogsMetadataDemo; | ||
yield put(actions$2.setAuditsMetadataState({ columns: properties, loading: false })); | ||
} | ||
catch (e) { | ||
yield put(actions$2.setAuditsMetadataState({ error: e, loading: false })); | ||
} | ||
} | ||
function* auditsMetadataSagasMock() { | ||
yield takeEvery$1(actions$2.loadAuditLogs, loadAuditsMetadataMock); | ||
} | ||
function* sagas() { | ||
yield all([call(auditLogsSagas), call(auditsMetadataSagas)]); | ||
} | ||
function* mockSagas() { | ||
yield all([call(auditLogsSagasMock), call(auditsMetadataSagasMock)]); | ||
} | ||
// export store | ||
var auditsStore = { | ||
sagas, | ||
mockSagas, | ||
storeName: auditsStoreName, | ||
initialState, | ||
reducer, | ||
actions: actions$2, | ||
}; | ||
export default auditsStore; | ||
export { actions as auditLogsActions, reducers as auditLogsReducers, auditLogsState, actions$2 as auditsActions, initialState as auditsInitialState, actions$1 as auditsMetadataActions, reducers$1 as auditsMetadataReducers, auditsMetadataState, mockSagas as auditsMockSagas, reducer as auditsReducers, sagas as auditsSagas }; | ||
export { ax as auditLogsActions, aw as auditLogsReducers, av as auditLogsState, as as auditsActions, at as auditsInitialState, aA as auditsMetadataActions, az as auditsMetadataReducers, ay as auditsMetadataState, aq as auditsMockSagas, ar as auditsReducers, ap as auditsSagas, au as auditsStoreName, ai as default } from '../index-f62cda5d.js'; | ||
import '@frontegg/rest-api'; | ||
//# sourceMappingURL=index.js.map |
@@ -68,2 +68,3 @@ import { reducer, actions } from './reducer'; | ||
loadSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadPublicSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
@@ -163,2 +164,7 @@ saveSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[import("../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>], import("../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>, string, never, never>; | ||
}>, string, never, never>; | ||
setSecurityPolicyPublicState: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[Partial<import("../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>], Partial<import("../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>, string, never, never>; | ||
setSecurityPolicyMfaState: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[Partial<import("../interfaces").WithStatus & { | ||
@@ -165,0 +171,0 @@ policy?: import("@frontegg/rest-api").ISecurityPolicyMfa | undefined; |
@@ -0,4 +1,5 @@ | ||
import { ILoginResponse } from '@frontegg/rest-api'; | ||
import { LoginStep } from './interfaces'; | ||
export declare function afterAuthNavigation(): Generator<import("redux-saga/effects").SelectEffect | import("redux-saga/effects").CallEffect<true>, void, { | ||
routes: any; | ||
onRedirectTo: any; | ||
}>; | ||
@@ -9,9 +10,22 @@ export declare function refreshMetadata(): Generator<import("redux-saga/effects").CallEffect<import("@frontegg/rest-api").ISamlMetadata> | import("redux-saga/effects").PutEffect<{ | ||
}>, void, unknown>; | ||
export declare const isMfaRequired: (user: ILoginResponse) => boolean; | ||
export declare const getMfaRequiredState: (user: any) => { | ||
loginState: { | ||
mfaToken: any; | ||
mfaRequired: any; | ||
loading: boolean; | ||
error: undefined; | ||
step: LoginStep.loginWithTwoFactor | LoginStep.forceTwoFactor; | ||
tenantsLoading: boolean; | ||
tenants: never[]; | ||
}; | ||
user: undefined; | ||
isAuthenticated: boolean; | ||
}; | ||
export declare function refreshToken(): Generator<import("redux-saga/effects").SelectEffect | Generator<import("redux-saga/effects").SelectEffect | import("redux-saga/effects").CallEffect<true>, void, { | ||
routes: any; | ||
onRedirectTo: any; | ||
}> | import("redux-saga/effects").PutEffect<{ | ||
payload: Partial<import("../interfaces").AuthState>; | ||
type: string; | ||
}> | import("redux-saga/effects").CallEffect<import("@frontegg/rest-api").ILoginResponse> | import("redux-saga/effects").PutEffect<{ | ||
}> | import("redux-saga/effects").CallEffect<ILoginResponse> | import("redux-saga/effects").PutEffect<{ | ||
payload: { | ||
@@ -23,5 +37,4 @@ callback?: ((data: import("@frontegg/rest-api").ITenantsResponse[] | null, error?: string | undefined) => void) | undefined; | ||
routes: any; | ||
onRedirectTo: any; | ||
}>; | ||
export declare function loginSagas(): Generator<import("redux-saga/effects").ForkEffect<never>, void, unknown>; | ||
export declare function loginSagasMock(): Generator<import("redux-saga/effects").ForkEffect<never>, void, unknown>; |
@@ -42,2 +42,3 @@ import { AuthState, User } from './interfaces'; | ||
loadSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadPublicSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
@@ -137,2 +138,7 @@ saveSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[import("../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>], import("../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>, string, never, never>; | ||
}>, string, never, never>; | ||
setSecurityPolicyPublicState: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[Partial<import("../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>], Partial<import("../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>, string, never, never>; | ||
setSecurityPolicyMfaState: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[Partial<import("../interfaces").WithStatus & { | ||
@@ -139,0 +145,0 @@ policy?: import("@frontegg/rest-api").ISecurityPolicyMfa | undefined; |
@@ -1,2 +0,2 @@ | ||
import { CaptchaPolicyState, GlobalPolicyState, LockoutPolicyState, MfaPolicyState, PasswordPolicyState, SaveSecurityPolicyLockoutPayload, SaveSecurityPolicyMfaPayload, SecurityPolicyState, PasswordHistoryPolicyState, SaveSecurityPolicyPasswordHistoryPayload } from './interfaces'; | ||
import { CaptchaPolicyState, GlobalPolicyState, PublicPolicyState, LockoutPolicyState, MfaPolicyState, PasswordPolicyState, SaveSecurityPolicyLockoutPayload, SaveSecurityPolicyMfaPayload, SecurityPolicyState, PasswordHistoryPolicyState, SaveSecurityPolicyPasswordHistoryPayload } from './interfaces'; | ||
declare const securityPolicyState: SecurityPolicyState; | ||
@@ -82,2 +82,44 @@ declare const reducers: { | ||
}; | ||
setSecurityPolicyPublicState: { | ||
prepare: (payload: Partial<import("../../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>) => { | ||
payload: Partial<import("../../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>; | ||
}; | ||
reducer: (state: import("..").AuthState, { payload }: { | ||
payload: Partial<import("../../interfaces").WithStatus & { | ||
policy?: import("@frontegg/rest-api").IVendorConfig | undefined; | ||
}>; | ||
type: string; | ||
}) => { | ||
onRedirectTo: (path: string, opts?: import("@frontegg/rest-api").RedirectOptions | undefined) => void; | ||
error?: any; | ||
isAuthenticated: boolean; | ||
isLoading: boolean; | ||
keepSessionAlive?: boolean | undefined; | ||
user?: import("..").User | null | undefined; | ||
isSSOAuth: boolean; | ||
ssoACS?: string | undefined; | ||
loginState: import("..").LoginState; | ||
activateState: import("..").ActivateAccountState; | ||
acceptInvitationState: import("..").AcceptInvitationState; | ||
forgotPasswordState: import("..").ForgotPasswordState; | ||
ssoState: import("..").SSOState; | ||
profileState: import("..").ProfileState; | ||
mfaState: import("..").MFAState; | ||
teamState: import("..").TeamState; | ||
socialLoginState: import("..").SocialLoginState; | ||
signUpState: import("..").SignUpState; | ||
apiTokensState: import("..").ApiTokensState; | ||
securityPolicyState: SecurityPolicyState; | ||
accountSettingsState: import("..").AccountSettingsState; | ||
tenantsState: import("..").TenantsState; | ||
rolesState: import("..").RolesState; | ||
routes: import("..").AuthPageRoutes; | ||
header?: any; | ||
loaderComponent?: any; | ||
}; | ||
}; | ||
setSecurityPolicyMfaState: { | ||
@@ -324,2 +366,3 @@ prepare: (payload: Partial<import("../../interfaces").WithStatus & { | ||
loadSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadPublicSecurityPolicy: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
loadSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithoutPayload<string>; | ||
@@ -341,2 +384,3 @@ saveSecurityPolicyMfa: import("@reduxjs/toolkit").ActionCreatorWithPreparedPayload<[import("../../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>], import("../../interfaces").WithCallback<import("@frontegg/rest-api").ISaveSecurityPolicyMfa, import("@frontegg/rest-api").ISecurityPolicyMfa>, string, never, never>; | ||
setSecurityPolicyGlobalState: (state: Partial<GlobalPolicyState>) => void; | ||
setSecurityPolicyPublicState: (state: Partial<PublicPolicyState>) => void; | ||
setSecurityPolicyMfaState: (state: Partial<MfaPolicyState>) => void; | ||
@@ -349,2 +393,3 @@ setSecurityPolicyLockoutState: (state: Partial<LockoutPolicyState>) => void; | ||
loadSecurityPolicy: () => void; | ||
loadPublicSecurityPolicy: () => void; | ||
loadVendorPasswordConfig: () => void; | ||
@@ -351,0 +396,0 @@ loadSecurityPolicyMfa: () => void; |
import { WithCallback, WithStatus } from '../../interfaces'; | ||
import { ISaveSecurityPolicyLockout, ISaveSecurityPolicyMfa, ISaveSecurityPolicyPasswordHistory, ISecurityPolicy, ISecurityPolicyCaptcha, ISecurityPolicyLockout, ISecurityPolicyMfa, ISecurityPolicyPasswordHistory, ISecurityPolicyPasswordConfig } from '@frontegg/rest-api'; | ||
import { ISaveSecurityPolicyLockout, ISaveSecurityPolicyMfa, ISaveSecurityPolicyPasswordHistory, ISecurityPolicy, ISecurityPolicyCaptcha, ISecurityPolicyLockout, ISecurityPolicyMfa, ISecurityPolicyPasswordHistory, ISecurityPolicyPasswordConfig, IVendorConfig } from '@frontegg/rest-api'; | ||
declare type PolicyState<T> = WithStatus & { | ||
@@ -7,2 +7,3 @@ policy?: T; | ||
export declare type GlobalPolicyState = PolicyState<ISecurityPolicy>; | ||
export declare type PublicPolicyState = PolicyState<IVendorConfig>; | ||
export declare type MfaPolicyState = PolicyState<ISecurityPolicyMfa>; | ||
@@ -15,2 +16,3 @@ export declare type LockoutPolicyState = PolicyState<ISecurityPolicyLockout>; | ||
globalPolicy: GlobalPolicyState; | ||
publicPolicy: PublicPolicyState; | ||
mfaPolicy: MfaPolicyState; | ||
@@ -17,0 +19,0 @@ lockoutPolicy: LockoutPolicyState; |
@@ -1,7 +0,5 @@ | ||
export { f as all, i as auditsStoreName, a as authStoreName, ao as bindActionCreators, d as call, an as combineReducers, am as configureStore, g as delay, al as getDefaultMiddleware, p as put, s as select, t as takeEvery, h as takeLatest } from './redux-saga-effects.esm-6ff02caf.js'; | ||
export { AcceptInvitationStep, ActivateAccountStep, ApiStateKeys, ForgotPasswordStep, LoginStep, MFAStep, SamlVendors, SignUpStage, TeamStateKeys, acceptInvitationActions, acceptInvitationReducers, acceptInvitationState, accountSettingsActions, accountSettingsReducers, accountSettingsState, activateAccountActions, activateAccountReducers, activateState, apiTokensActions, apiTokensReducers, apiTokensState, default as auth, authActions, authInitialState, authMockSagas, authReducers, authSagas, forgotPasswordActions, forgotPasswordReducers, forgotPasswordState, loginActions, loginReducers, loginState, mfaActions, mfaReducers, mfaState, profileActions, profileReducers, profileState, rolesActions, rolesReducers, rolesState, securityPolicyActions, securityPolicyReducers, securityPolicyState, signUpActions, signUpReducers, signUpState, socialLoginState, socialLoginsActions, socialLoginsReducer, ssoActions, ssoReducers, ssoState, teamActions, teamReducers, teamState, tenantsActions, tenantsReducers, tenantsState } from './auth/index.js'; | ||
export { ak as all, ax as auditLogsActions, aw as auditLogsReducers, av as auditLogsState, ai as audits, as as auditsActions, at as auditsInitialState, aA as auditsMetadataActions, az as auditsMetadataReducers, ay as auditsMetadataState, aq as auditsMockSagas, ar as auditsReducers, ap as auditsSagas, au as auditsStoreName, a as authStoreName, al as bindActionCreators, d as call, aj as combineReducers, ah as configureStore, an as delay, ag as getDefaultMiddleware, p as put, s as select, t as takeEvery, am as takeLatest } from './index-f62cda5d.js'; | ||
export { h as AcceptInvitationStep, A as ActivateAccountStep, Q as ApiStateKeys, F as ForgotPasswordStep, L as LoginStep, M as MFAStep, S as SamlVendors, K as SignUpStage, T as TeamStateKeys, n as acceptInvitationActions, k as acceptInvitationReducers, j as acceptInvitationState, $ as accountSettingsActions, _ as accountSettingsReducers, Z as accountSettingsState, g as activateAccountActions, f as activateAccountReducers, e as activateState, V as apiTokensActions, U as apiTokensReducers, R as apiTokensState, a as auth, b as authActions, i as authInitialState, m as authMockSagas, r as authReducers, s as authSagas, a6 as createFronteggStore, a7 as createSagaMiddleware, q as forgotPasswordActions, p as forgotPasswordReducers, o as forgotPasswordState, d as loginActions, c as loginReducers, l as loginState, y as mfaActions, x as mfaReducers, w as mfaState, C as profileActions, B as profileReducers, z as profileState, a5 as rolesActions, a4 as rolesReducers, a3 as rolesState, Y as securityPolicyActions, X as securityPolicyReducers, W as securityPolicyState, P as signUpActions, O as signUpReducers, N as signUpState, H as socialLoginState, J as socialLoginsActions, I as socialLoginsReducer, v as ssoActions, u as ssoReducers, t as ssoState, G as teamActions, E as teamReducers, D as teamState, a2 as tenantsActions, a1 as tenantsReducers, a0 as tenantsState } from './index-ac9d1e59.js'; | ||
import 'tslib'; | ||
import '@frontegg/rest-api'; | ||
export { auditLogsActions, auditLogsReducers, auditLogsState, default as audits, auditsActions, auditsInitialState, auditsMetadataActions, auditsMetadataReducers, auditsMetadataState, auditsMockSagas, auditsReducers, auditsSagas } from './audits/index.js'; | ||
export { createFronteggStore, createSagaMiddleware } from './toolkit/index.js'; | ||
//# sourceMappingURL=index.js.map |
{ | ||
"name": "@frontegg/redux-store", | ||
"libName": "FronteggReduxStore", | ||
"version": "2.1.1-alpha.786143057", | ||
"version": "2.2.0-alpha.788357573", | ||
"author": "Frontegg LTD", | ||
@@ -9,5 +9,5 @@ "module": "index.esm.js", | ||
"dependencies": { | ||
"@frontegg/rest-api": "2.1.1-alpha.786143057" | ||
"@frontegg/rest-api": "2.2.0-alpha.788357573" | ||
}, | ||
"gitHead": "ac9d6cba7935ae5a39b8ef024fd3a6f8804c7d21", | ||
"gitHead": "7e5d75899e0ade1a11b2c856cda21d0772dda711", | ||
"main": "index.js", | ||
@@ -14,0 +14,0 @@ "es2015": "index.es.js", |
1472
toolkit/index.js
@@ -1,1471 +0,5 @@ | ||
import { T as TAKE, P as PUT, A as ALL, R as RACE, C as CALL, j as CPS, k as FORK, J as JOIN, l as CANCEL, S as SELECT, m as ACTION_CHANNEL, n as CANCELLED$1, o as FLUSH, G as GET_CONTEXT, q as SET_CONTEXT, u as check, M as MULTICAST, v as notUndef, w as CHANNEL_END_TYPE, x as MATCH, y as once, z as SAGA_ACTION, B as kTrue, D as promise, E as iterator, H as getMetaInfo, I as undef, K as array$1, L as createAllStyleChildCallbacks, N as SELF_CANCELLATION, O as createEmptyArray, Q as assignWithSymbols, U as asyncIteratorSymbol, V as shouldCancel, W as func, X as TASK_CANCEL, Y as shouldTerminate, Z as IO, _ as noop, $ as channel$1, a0 as _extends, a1 as createSetContextWarning, a2 as object, a3 as string$1, a4 as stringableFunc, a5 as symbol$1, a6 as expanding, a7 as buffer, a8 as remove, a9 as CANCEL$1, aa as makeIterator, ab as TERMINATE, ac as TASK, ad as compose, ae as internalErr, af as shouldComplete, ag as logError, ah as wrapSagaDispatch, ai as flatMap, aj as identity, ak as getLocation, al as getDefaultMiddleware, b as createSlice, am as configureStore, an as combineReducers, f as all, d as call } from '../redux-saga-effects.esm-6ff02caf.js'; | ||
export { f as all, ao as bindActionCreators, d as call, an as combineReducers, am as configureStore, g as delay, al as getDefaultMiddleware, p as put, s as select, t as takeEvery, h as takeLatest } from '../redux-saga-effects.esm-6ff02caf.js'; | ||
import authStore from '../auth/index.js'; | ||
export { ak as all, al as bindActionCreators, d as call, aj as combineReducers, ah as configureStore, an as delay, ag as getDefaultMiddleware, p as put, s as select, t as takeEvery, am as takeLatest } from '../index-f62cda5d.js'; | ||
export { a6 as createFronteggStore, a7 as createSagaMiddleware } from '../index-ac9d1e59.js'; | ||
import 'tslib'; | ||
import { ContextHolder } from '@frontegg/rest-api'; | ||
import auditsStore from '../audits/index.js'; | ||
function _objectWithoutPropertiesLoose(source, excluded) { | ||
if (source == null) return {}; | ||
var target = {}; | ||
var sourceKeys = Object.keys(source); | ||
var key, i; | ||
for (i = 0; i < sourceKeys.length; i++) { | ||
key = sourceKeys[i]; | ||
if (excluded.indexOf(key) >= 0) continue; | ||
target[key] = source[key]; | ||
} | ||
return target; | ||
} | ||
function deferred() { | ||
var def = {}; | ||
def.promise = new Promise(function (resolve, reject) { | ||
def.resolve = resolve; | ||
def.reject = reject; | ||
}); | ||
return def; | ||
} | ||
var queue = []; | ||
/** | ||
Variable to hold a counting semaphore | ||
- Incrementing adds a lock and puts the scheduler in a `suspended` state (if it's not | ||
already suspended) | ||
- Decrementing releases a lock. Zero locks puts the scheduler in a `released` state. This | ||
triggers flushing the queued tasks. | ||
**/ | ||
var semaphore = 0; | ||
/** | ||
Executes a task 'atomically'. Tasks scheduled during this execution will be queued | ||
and flushed after this task has finished (assuming the scheduler endup in a released | ||
state). | ||
**/ | ||
function exec(task) { | ||
try { | ||
suspend(); | ||
task(); | ||
} finally { | ||
release(); | ||
} | ||
} | ||
/** | ||
Executes or queues a task depending on the state of the scheduler (`suspended` or `released`) | ||
**/ | ||
function asap(task) { | ||
queue.push(task); | ||
if (!semaphore) { | ||
suspend(); | ||
flush(); | ||
} | ||
} | ||
/** | ||
* Puts the scheduler in a `suspended` state and executes a task immediately. | ||
*/ | ||
function immediately(task) { | ||
try { | ||
suspend(); | ||
return task(); | ||
} finally { | ||
flush(); | ||
} | ||
} | ||
/** | ||
Puts the scheduler in a `suspended` state. Scheduled tasks will be queued until the | ||
scheduler is released. | ||
**/ | ||
function suspend() { | ||
semaphore++; | ||
} | ||
/** | ||
Puts the scheduler in a `released` state. | ||
**/ | ||
function release() { | ||
semaphore--; | ||
} | ||
/** | ||
Releases the current lock. Executes all queued tasks if the scheduler is in the released state. | ||
**/ | ||
function flush() { | ||
release(); | ||
var task; | ||
while (!semaphore && (task = queue.shift()) !== undefined) { | ||
exec(task); | ||
} | ||
} | ||
var array = function array(patterns) { | ||
return function (input) { | ||
return patterns.some(function (p) { | ||
return matcher(p)(input); | ||
}); | ||
}; | ||
}; | ||
var predicate = function predicate(_predicate) { | ||
return function (input) { | ||
return _predicate(input); | ||
}; | ||
}; | ||
var string = function string(pattern) { | ||
return function (input) { | ||
return input.type === String(pattern); | ||
}; | ||
}; | ||
var symbol = function symbol(pattern) { | ||
return function (input) { | ||
return input.type === pattern; | ||
}; | ||
}; | ||
var wildcard = function wildcard() { | ||
return kTrue; | ||
}; | ||
function matcher(pattern) { | ||
// prettier-ignore | ||
var matcherCreator = pattern === '*' ? wildcard : string$1(pattern) ? string : array$1(pattern) ? array : stringableFunc(pattern) ? string : func(pattern) ? predicate : symbol$1(pattern) ? symbol : null; | ||
if (matcherCreator === null) { | ||
throw new Error("invalid pattern: " + pattern); | ||
} | ||
return matcherCreator(pattern); | ||
} | ||
var END = { | ||
type: CHANNEL_END_TYPE | ||
}; | ||
var isEnd = function isEnd(a) { | ||
return a && a.type === CHANNEL_END_TYPE; | ||
}; | ||
var CLOSED_CHANNEL_WITH_TAKERS = 'Cannot have a closed channel with pending takers'; | ||
var INVALID_BUFFER = 'invalid buffer passed to channel factory function'; | ||
var UNDEFINED_INPUT_ERROR = "Saga or channel was provided with an undefined action\nHints:\n - check that your Action Creator returns a non-undefined value\n - if the Saga was started using runSaga, check that your subscribe source provides the action to its listeners"; | ||
function channel(buffer$1) { | ||
if (buffer$1 === void 0) { | ||
buffer$1 = expanding(); | ||
} | ||
var closed = false; | ||
var takers = []; | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(buffer$1, buffer, INVALID_BUFFER); | ||
} | ||
function checkForbiddenStates() { | ||
if (closed && takers.length) { | ||
throw internalErr(CLOSED_CHANNEL_WITH_TAKERS); | ||
} | ||
if (takers.length && !buffer$1.isEmpty()) { | ||
throw internalErr('Cannot have pending takers with non empty buffer'); | ||
} | ||
} | ||
function put(input) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
check(input, notUndef, UNDEFINED_INPUT_ERROR); | ||
} | ||
if (closed) { | ||
return; | ||
} | ||
if (takers.length === 0) { | ||
return buffer$1.put(input); | ||
} | ||
var cb = takers.shift(); | ||
cb(input); | ||
} | ||
function take(cb) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
check(cb, func, "channel.take's callback must be a function"); | ||
} | ||
if (closed && buffer$1.isEmpty()) { | ||
cb(END); | ||
} else if (!buffer$1.isEmpty()) { | ||
cb(buffer$1.take()); | ||
} else { | ||
takers.push(cb); | ||
cb.cancel = function () { | ||
remove(takers, cb); | ||
}; | ||
} | ||
} | ||
function flush(cb) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
check(cb, func, "channel.flush' callback must be a function"); | ||
} | ||
if (closed && buffer$1.isEmpty()) { | ||
cb(END); | ||
return; | ||
} | ||
cb(buffer$1.flush()); | ||
} | ||
function close() { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
} | ||
if (closed) { | ||
return; | ||
} | ||
closed = true; | ||
var arr = takers; | ||
takers = []; | ||
for (var i = 0, len = arr.length; i < len; i++) { | ||
var taker = arr[i]; | ||
taker(END); | ||
} | ||
} | ||
return { | ||
take: take, | ||
put: put, | ||
flush: flush, | ||
close: close | ||
}; | ||
} | ||
function multicastChannel() { | ||
var _ref; | ||
var closed = false; | ||
var currentTakers = []; | ||
var nextTakers = currentTakers; | ||
function checkForbiddenStates() { | ||
if (closed && nextTakers.length) { | ||
throw internalErr(CLOSED_CHANNEL_WITH_TAKERS); | ||
} | ||
} | ||
var ensureCanMutateNextTakers = function ensureCanMutateNextTakers() { | ||
if (nextTakers !== currentTakers) { | ||
return; | ||
} | ||
nextTakers = currentTakers.slice(); | ||
}; | ||
var close = function close() { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
} | ||
closed = true; | ||
var takers = currentTakers = nextTakers; | ||
nextTakers = []; | ||
takers.forEach(function (taker) { | ||
taker(END); | ||
}); | ||
}; | ||
return _ref = {}, _ref[MULTICAST] = true, _ref.put = function put(input) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
check(input, notUndef, UNDEFINED_INPUT_ERROR); | ||
} | ||
if (closed) { | ||
return; | ||
} | ||
if (isEnd(input)) { | ||
close(); | ||
return; | ||
} | ||
var takers = currentTakers = nextTakers; | ||
for (var i = 0, len = takers.length; i < len; i++) { | ||
var taker = takers[i]; | ||
if (taker[MATCH](input)) { | ||
taker.cancel(); | ||
taker(input); | ||
} | ||
} | ||
}, _ref.take = function take(cb, matcher) { | ||
if (matcher === void 0) { | ||
matcher = wildcard; | ||
} | ||
if (process.env.NODE_ENV !== 'production') { | ||
checkForbiddenStates(); | ||
} | ||
if (closed) { | ||
cb(END); | ||
return; | ||
} | ||
cb[MATCH] = matcher; | ||
ensureCanMutateNextTakers(); | ||
nextTakers.push(cb); | ||
cb.cancel = once(function () { | ||
ensureCanMutateNextTakers(); | ||
remove(nextTakers, cb); | ||
}); | ||
}, _ref.close = close, _ref; | ||
} | ||
function stdChannel() { | ||
var chan = multicastChannel(); | ||
var put = chan.put; | ||
chan.put = function (input) { | ||
if (input[SAGA_ACTION]) { | ||
put(input); | ||
return; | ||
} | ||
asap(function () { | ||
put(input); | ||
}); | ||
}; | ||
return chan; | ||
} | ||
var RUNNING = 0; | ||
var CANCELLED = 1; | ||
var ABORTED = 2; | ||
var DONE = 3; | ||
function resolvePromise(promise, cb) { | ||
var cancelPromise = promise[CANCEL$1]; | ||
if (func(cancelPromise)) { | ||
cb.cancel = cancelPromise; | ||
} | ||
promise.then(cb, function (error) { | ||
cb(error, true); | ||
}); | ||
} | ||
var current = 0; | ||
var nextSagaId = (function () { | ||
return ++current; | ||
}); | ||
var _effectRunnerMap; | ||
function getIteratorMetaInfo(iterator, fn) { | ||
if (iterator.isSagaIterator) { | ||
return { | ||
name: iterator.meta.name | ||
}; | ||
} | ||
return getMetaInfo(fn); | ||
} | ||
function createTaskIterator(_ref) { | ||
var context = _ref.context, | ||
fn = _ref.fn, | ||
args = _ref.args; | ||
// catch synchronous failures; see #152 and #441 | ||
try { | ||
var result = fn.apply(context, args); // i.e. a generator function returns an iterator | ||
if (iterator(result)) { | ||
return result; | ||
} | ||
var resolved = false; | ||
var next = function next(arg) { | ||
if (!resolved) { | ||
resolved = true; // Only promises returned from fork will be interpreted. See #1573 | ||
return { | ||
value: result, | ||
done: !promise(result) | ||
}; | ||
} else { | ||
return { | ||
value: arg, | ||
done: true | ||
}; | ||
} | ||
}; | ||
return makeIterator(next); | ||
} catch (err) { | ||
// do not bubble up synchronous failures for detached forks | ||
// instead create a failed task. See #152 and #441 | ||
return makeIterator(function () { | ||
throw err; | ||
}); | ||
} | ||
} | ||
function runPutEffect(env, _ref2, cb) { | ||
var channel = _ref2.channel, | ||
action = _ref2.action, | ||
resolve = _ref2.resolve; | ||
/** | ||
Schedule the put in case another saga is holding a lock. | ||
The put will be executed atomically. ie nested puts will execute after | ||
this put has terminated. | ||
**/ | ||
asap(function () { | ||
var result; | ||
try { | ||
result = (channel ? channel.put : env.dispatch)(action); | ||
} catch (error) { | ||
cb(error, true); | ||
return; | ||
} | ||
if (resolve && promise(result)) { | ||
resolvePromise(result, cb); | ||
} else { | ||
cb(result); | ||
} | ||
}); // Put effects are non cancellables | ||
} | ||
function runTakeEffect(env, _ref3, cb) { | ||
var _ref3$channel = _ref3.channel, | ||
channel = _ref3$channel === void 0 ? env.channel : _ref3$channel, | ||
pattern = _ref3.pattern, | ||
maybe = _ref3.maybe; | ||
var takeCb = function takeCb(input) { | ||
if (input instanceof Error) { | ||
cb(input, true); | ||
return; | ||
} | ||
if (isEnd(input) && !maybe) { | ||
cb(TERMINATE); | ||
return; | ||
} | ||
cb(input); | ||
}; | ||
try { | ||
channel.take(takeCb, notUndef(pattern) ? matcher(pattern) : null); | ||
} catch (err) { | ||
cb(err, true); | ||
return; | ||
} | ||
cb.cancel = takeCb.cancel; | ||
} | ||
function runCallEffect(env, _ref4, cb, _ref5) { | ||
var context = _ref4.context, | ||
fn = _ref4.fn, | ||
args = _ref4.args; | ||
var task = _ref5.task; | ||
// catch synchronous failures; see #152 | ||
try { | ||
var result = fn.apply(context, args); | ||
if (promise(result)) { | ||
resolvePromise(result, cb); | ||
return; | ||
} | ||
if (iterator(result)) { | ||
// resolve iterator | ||
proc(env, result, task.context, current, getMetaInfo(fn), | ||
/* isRoot */ | ||
false, cb); | ||
return; | ||
} | ||
cb(result); | ||
} catch (error) { | ||
cb(error, true); | ||
} | ||
} | ||
function runCPSEffect(env, _ref6, cb) { | ||
var context = _ref6.context, | ||
fn = _ref6.fn, | ||
args = _ref6.args; | ||
// CPS (ie node style functions) can define their own cancellation logic | ||
// by setting cancel field on the cb | ||
// catch synchronous failures; see #152 | ||
try { | ||
var cpsCb = function cpsCb(err, res) { | ||
if (undef(err)) { | ||
cb(res); | ||
} else { | ||
cb(err, true); | ||
} | ||
}; | ||
fn.apply(context, args.concat(cpsCb)); | ||
if (cpsCb.cancel) { | ||
cb.cancel = cpsCb.cancel; | ||
} | ||
} catch (error) { | ||
cb(error, true); | ||
} | ||
} | ||
function runForkEffect(env, _ref7, cb, _ref8) { | ||
var context = _ref7.context, | ||
fn = _ref7.fn, | ||
args = _ref7.args, | ||
detached = _ref7.detached; | ||
var parent = _ref8.task; | ||
var taskIterator = createTaskIterator({ | ||
context: context, | ||
fn: fn, | ||
args: args | ||
}); | ||
var meta = getIteratorMetaInfo(taskIterator, fn); | ||
immediately(function () { | ||
var child = proc(env, taskIterator, parent.context, current, meta, detached, undefined); | ||
if (detached) { | ||
cb(child); | ||
} else { | ||
if (child.isRunning()) { | ||
parent.queue.addTask(child); | ||
cb(child); | ||
} else if (child.isAborted()) { | ||
parent.queue.abort(child.error()); | ||
} else { | ||
cb(child); | ||
} | ||
} | ||
}); // Fork effects are non cancellables | ||
} | ||
function runJoinEffect(env, taskOrTasks, cb, _ref9) { | ||
var task = _ref9.task; | ||
var joinSingleTask = function joinSingleTask(taskToJoin, cb) { | ||
if (taskToJoin.isRunning()) { | ||
var joiner = { | ||
task: task, | ||
cb: cb | ||
}; | ||
cb.cancel = function () { | ||
if (taskToJoin.isRunning()) remove(taskToJoin.joiners, joiner); | ||
}; | ||
taskToJoin.joiners.push(joiner); | ||
} else { | ||
if (taskToJoin.isAborted()) { | ||
cb(taskToJoin.error(), true); | ||
} else { | ||
cb(taskToJoin.result()); | ||
} | ||
} | ||
}; | ||
if (array$1(taskOrTasks)) { | ||
if (taskOrTasks.length === 0) { | ||
cb([]); | ||
return; | ||
} | ||
var childCallbacks = createAllStyleChildCallbacks(taskOrTasks, cb); | ||
taskOrTasks.forEach(function (t, i) { | ||
joinSingleTask(t, childCallbacks[i]); | ||
}); | ||
} else { | ||
joinSingleTask(taskOrTasks, cb); | ||
} | ||
} | ||
function cancelSingleTask(taskToCancel) { | ||
if (taskToCancel.isRunning()) { | ||
taskToCancel.cancel(); | ||
} | ||
} | ||
function runCancelEffect(env, taskOrTasks, cb, _ref10) { | ||
var task = _ref10.task; | ||
if (taskOrTasks === SELF_CANCELLATION) { | ||
cancelSingleTask(task); | ||
} else if (array$1(taskOrTasks)) { | ||
taskOrTasks.forEach(cancelSingleTask); | ||
} else { | ||
cancelSingleTask(taskOrTasks); | ||
} | ||
cb(); // cancel effects are non cancellables | ||
} | ||
function runAllEffect(env, effects, cb, _ref11) { | ||
var digestEffect = _ref11.digestEffect; | ||
var effectId = current; | ||
var keys = Object.keys(effects); | ||
if (keys.length === 0) { | ||
cb(array$1(effects) ? [] : {}); | ||
return; | ||
} | ||
var childCallbacks = createAllStyleChildCallbacks(effects, cb); | ||
keys.forEach(function (key) { | ||
digestEffect(effects[key], effectId, childCallbacks[key], key); | ||
}); | ||
} | ||
function runRaceEffect(env, effects, cb, _ref12) { | ||
var digestEffect = _ref12.digestEffect; | ||
var effectId = current; | ||
var keys = Object.keys(effects); | ||
var response = array$1(effects) ? createEmptyArray(keys.length) : {}; | ||
var childCbs = {}; | ||
var completed = false; | ||
keys.forEach(function (key) { | ||
var chCbAtKey = function chCbAtKey(res, isErr) { | ||
if (completed) { | ||
return; | ||
} | ||
if (isErr || shouldComplete(res)) { | ||
// Race Auto cancellation | ||
cb.cancel(); | ||
cb(res, isErr); | ||
} else { | ||
cb.cancel(); | ||
completed = true; | ||
response[key] = res; | ||
cb(response); | ||
} | ||
}; | ||
chCbAtKey.cancel = noop; | ||
childCbs[key] = chCbAtKey; | ||
}); | ||
cb.cancel = function () { | ||
// prevents unnecessary cancellation | ||
if (!completed) { | ||
completed = true; | ||
keys.forEach(function (key) { | ||
return childCbs[key].cancel(); | ||
}); | ||
} | ||
}; | ||
keys.forEach(function (key) { | ||
if (completed) { | ||
return; | ||
} | ||
digestEffect(effects[key], effectId, childCbs[key], key); | ||
}); | ||
} | ||
function runSelectEffect(env, _ref13, cb) { | ||
var selector = _ref13.selector, | ||
args = _ref13.args; | ||
try { | ||
var state = selector.apply(void 0, [env.getState()].concat(args)); | ||
cb(state); | ||
} catch (error) { | ||
cb(error, true); | ||
} | ||
} | ||
function runChannelEffect(env, _ref14, cb) { | ||
var pattern = _ref14.pattern, | ||
buffer = _ref14.buffer; | ||
var chan = channel(buffer); | ||
var match = matcher(pattern); | ||
var taker = function taker(action) { | ||
if (!isEnd(action)) { | ||
env.channel.take(taker, match); | ||
} | ||
chan.put(action); | ||
}; | ||
var close = chan.close; | ||
chan.close = function () { | ||
taker.cancel(); | ||
close(); | ||
}; | ||
env.channel.take(taker, match); | ||
cb(chan); | ||
} | ||
function runCancelledEffect(env, data, cb, _ref15) { | ||
var task = _ref15.task; | ||
cb(task.isCancelled()); | ||
} | ||
function runFlushEffect(env, channel, cb) { | ||
channel.flush(cb); | ||
} | ||
function runGetContextEffect(env, prop, cb, _ref16) { | ||
var task = _ref16.task; | ||
cb(task.context[prop]); | ||
} | ||
function runSetContextEffect(env, props, cb, _ref17) { | ||
var task = _ref17.task; | ||
assignWithSymbols(task.context, props); | ||
cb(); | ||
} | ||
var effectRunnerMap = (_effectRunnerMap = {}, _effectRunnerMap[TAKE] = runTakeEffect, _effectRunnerMap[PUT] = runPutEffect, _effectRunnerMap[ALL] = runAllEffect, _effectRunnerMap[RACE] = runRaceEffect, _effectRunnerMap[CALL] = runCallEffect, _effectRunnerMap[CPS] = runCPSEffect, _effectRunnerMap[FORK] = runForkEffect, _effectRunnerMap[JOIN] = runJoinEffect, _effectRunnerMap[CANCEL] = runCancelEffect, _effectRunnerMap[SELECT] = runSelectEffect, _effectRunnerMap[ACTION_CHANNEL] = runChannelEffect, _effectRunnerMap[CANCELLED$1] = runCancelledEffect, _effectRunnerMap[FLUSH] = runFlushEffect, _effectRunnerMap[GET_CONTEXT] = runGetContextEffect, _effectRunnerMap[SET_CONTEXT] = runSetContextEffect, _effectRunnerMap); | ||
/** | ||
Used to track a parent task and its forks | ||
In the fork model, forked tasks are attached by default to their parent | ||
We model this using the concept of Parent task && main Task | ||
main task is the main flow of the current Generator, the parent tasks is the | ||
aggregation of the main tasks + all its forked tasks. | ||
Thus the whole model represents an execution tree with multiple branches (vs the | ||
linear execution tree in sequential (non parallel) programming) | ||
A parent tasks has the following semantics | ||
- It completes if all its forks either complete or all cancelled | ||
- If it's cancelled, all forks are cancelled as well | ||
- It aborts if any uncaught error bubbles up from forks | ||
- If it completes, the return value is the one returned by the main task | ||
**/ | ||
function forkQueue(mainTask, onAbort, cont) { | ||
var tasks = []; | ||
var result; | ||
var completed = false; | ||
addTask(mainTask); | ||
var getTasks = function getTasks() { | ||
return tasks; | ||
}; | ||
function abort(err) { | ||
onAbort(); | ||
cancelAll(); | ||
cont(err, true); | ||
} | ||
function addTask(task) { | ||
tasks.push(task); | ||
task.cont = function (res, isErr) { | ||
if (completed) { | ||
return; | ||
} | ||
remove(tasks, task); | ||
task.cont = noop; | ||
if (isErr) { | ||
abort(res); | ||
} else { | ||
if (task === mainTask) { | ||
result = res; | ||
} | ||
if (!tasks.length) { | ||
completed = true; | ||
cont(result); | ||
} | ||
} | ||
}; | ||
} | ||
function cancelAll() { | ||
if (completed) { | ||
return; | ||
} | ||
completed = true; | ||
tasks.forEach(function (t) { | ||
t.cont = noop; | ||
t.cancel(); | ||
}); | ||
tasks = []; | ||
} | ||
return { | ||
addTask: addTask, | ||
cancelAll: cancelAll, | ||
abort: abort, | ||
getTasks: getTasks | ||
}; | ||
} | ||
// there can be only a single saga error created at any given moment | ||
function formatLocation(fileName, lineNumber) { | ||
return fileName + "?" + lineNumber; | ||
} | ||
function effectLocationAsString(effect) { | ||
var location = getLocation(effect); | ||
if (location) { | ||
var code = location.code, | ||
fileName = location.fileName, | ||
lineNumber = location.lineNumber; | ||
var source = code + " " + formatLocation(fileName, lineNumber); | ||
return source; | ||
} | ||
return ''; | ||
} | ||
function sagaLocationAsString(sagaMeta) { | ||
var name = sagaMeta.name, | ||
location = sagaMeta.location; | ||
if (location) { | ||
return name + " " + formatLocation(location.fileName, location.lineNumber); | ||
} | ||
return name; | ||
} | ||
function cancelledTasksAsString(sagaStack) { | ||
var cancelledTasks = flatMap(function (i) { | ||
return i.cancelledTasks; | ||
}, sagaStack); | ||
if (!cancelledTasks.length) { | ||
return ''; | ||
} | ||
return ['Tasks cancelled due to error:'].concat(cancelledTasks).join('\n'); | ||
} | ||
var crashedEffect = null; | ||
var sagaStack = []; | ||
var addSagaFrame = function addSagaFrame(frame) { | ||
frame.crashedEffect = crashedEffect; | ||
sagaStack.push(frame); | ||
}; | ||
var clear = function clear() { | ||
crashedEffect = null; | ||
sagaStack.length = 0; | ||
}; // this sets crashed effect for the soon-to-be-reported saga frame | ||
// this slightly streatches the singleton nature of this module into wrong direction | ||
// as it's even less obvious what's the data flow here, but it is what it is for now | ||
var setCrashedEffect = function setCrashedEffect(effect) { | ||
crashedEffect = effect; | ||
}; | ||
/** | ||
@returns {string} | ||
@example | ||
The above error occurred in task errorInPutSaga {pathToFile} | ||
when executing effect put({type: 'REDUCER_ACTION_ERROR_IN_PUT'}) {pathToFile} | ||
created by fetchSaga {pathToFile} | ||
created by rootSaga {pathToFile} | ||
*/ | ||
var toString = function toString() { | ||
var firstSaga = sagaStack[0], | ||
otherSagas = sagaStack.slice(1); | ||
var crashedEffectLocation = firstSaga.crashedEffect ? effectLocationAsString(firstSaga.crashedEffect) : null; | ||
var errorMessage = "The above error occurred in task " + sagaLocationAsString(firstSaga.meta) + (crashedEffectLocation ? " \n when executing effect " + crashedEffectLocation : ''); | ||
return [errorMessage].concat(otherSagas.map(function (s) { | ||
return " created by " + sagaLocationAsString(s.meta); | ||
}), [cancelledTasksAsString(sagaStack)]).join('\n'); | ||
}; | ||
function newTask(env, mainTask, parentContext, parentEffectId, meta, isRoot, cont) { | ||
var _task; | ||
if (cont === void 0) { | ||
cont = noop; | ||
} | ||
var status = RUNNING; | ||
var taskResult; | ||
var taskError; | ||
var deferredEnd = null; | ||
var cancelledDueToErrorTasks = []; | ||
var context = Object.create(parentContext); | ||
var queue = forkQueue(mainTask, function onAbort() { | ||
cancelledDueToErrorTasks.push.apply(cancelledDueToErrorTasks, queue.getTasks().map(function (t) { | ||
return t.meta.name; | ||
})); | ||
}, end); | ||
/** | ||
This may be called by a parent generator to trigger/propagate cancellation | ||
cancel all pending tasks (including the main task), then end the current task. | ||
Cancellation propagates down to the whole execution tree held by this Parent task | ||
It's also propagated to all joiners of this task and their execution tree/joiners | ||
Cancellation is noop for terminated/Cancelled tasks tasks | ||
**/ | ||
function cancel() { | ||
if (status === RUNNING) { | ||
// Setting status to CANCELLED does not necessarily mean that the task/iterators are stopped | ||
// effects in the iterator's finally block will still be executed | ||
status = CANCELLED; | ||
queue.cancelAll(); // Ending with a TASK_CANCEL will propagate the Cancellation to all joiners | ||
end(TASK_CANCEL, false); | ||
} | ||
} | ||
function end(result, isErr) { | ||
if (!isErr) { | ||
// The status here may be RUNNING or CANCELLED | ||
// If the status is CANCELLED, then we do not need to change it here | ||
if (result === TASK_CANCEL) { | ||
status = CANCELLED; | ||
} else if (status !== CANCELLED) { | ||
status = DONE; | ||
} | ||
taskResult = result; | ||
deferredEnd && deferredEnd.resolve(result); | ||
} else { | ||
status = ABORTED; | ||
addSagaFrame({ | ||
meta: meta, | ||
cancelledTasks: cancelledDueToErrorTasks | ||
}); | ||
if (task.isRoot) { | ||
var sagaStack = toString(); // we've dumped the saga stack to string and are passing it to user's code | ||
// we know that it won't be needed anymore and we need to clear it | ||
clear(); | ||
env.onError(result, { | ||
sagaStack: sagaStack | ||
}); | ||
} | ||
taskError = result; | ||
deferredEnd && deferredEnd.reject(result); | ||
} | ||
task.cont(result, isErr); | ||
task.joiners.forEach(function (joiner) { | ||
joiner.cb(result, isErr); | ||
}); | ||
task.joiners = null; | ||
} | ||
function setContext(props) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(props, object, createSetContextWarning('task', props)); | ||
} | ||
assignWithSymbols(context, props); | ||
} | ||
function toPromise() { | ||
if (deferredEnd) { | ||
return deferredEnd.promise; | ||
} | ||
deferredEnd = deferred(); | ||
if (status === ABORTED) { | ||
deferredEnd.reject(taskError); | ||
} else if (status !== RUNNING) { | ||
deferredEnd.resolve(taskResult); | ||
} | ||
return deferredEnd.promise; | ||
} | ||
var task = (_task = {}, _task[TASK] = true, _task.id = parentEffectId, _task.meta = meta, _task.isRoot = isRoot, _task.context = context, _task.joiners = [], _task.queue = queue, _task.cancel = cancel, _task.cont = cont, _task.end = end, _task.setContext = setContext, _task.toPromise = toPromise, _task.isRunning = function isRunning() { | ||
return status === RUNNING; | ||
}, _task.isCancelled = function isCancelled() { | ||
return status === CANCELLED || status === RUNNING && mainTask.status === CANCELLED; | ||
}, _task.isAborted = function isAborted() { | ||
return status === ABORTED; | ||
}, _task.result = function result() { | ||
return taskResult; | ||
}, _task.error = function error() { | ||
return taskError; | ||
}, _task); | ||
return task; | ||
} | ||
function proc(env, iterator$1, parentContext, parentEffectId, meta, isRoot, cont) { | ||
if (process.env.NODE_ENV !== 'production' && iterator$1[asyncIteratorSymbol]) { | ||
throw new Error("redux-saga doesn't support async generators, please use only regular ones"); | ||
} | ||
var finalRunEffect = env.finalizeRunEffect(runEffect); | ||
/** | ||
Tracks the current effect cancellation | ||
Each time the generator progresses. calling runEffect will set a new value | ||
on it. It allows propagating cancellation to child effects | ||
**/ | ||
next.cancel = noop; | ||
/** Creates a main task to track the main flow */ | ||
var mainTask = { | ||
meta: meta, | ||
cancel: cancelMain, | ||
status: RUNNING | ||
}; | ||
/** | ||
Creates a new task descriptor for this generator. | ||
A task is the aggregation of it's mainTask and all it's forked tasks. | ||
**/ | ||
var task = newTask(env, mainTask, parentContext, parentEffectId, meta, isRoot, cont); | ||
var executingContext = { | ||
task: task, | ||
digestEffect: digestEffect | ||
}; | ||
/** | ||
cancellation of the main task. We'll simply resume the Generator with a TASK_CANCEL | ||
**/ | ||
function cancelMain() { | ||
if (mainTask.status === RUNNING) { | ||
mainTask.status = CANCELLED; | ||
next(TASK_CANCEL); | ||
} | ||
} | ||
/** | ||
attaches cancellation logic to this task's continuation | ||
this will permit cancellation to propagate down the call chain | ||
**/ | ||
if (cont) { | ||
cont.cancel = task.cancel; | ||
} // kicks up the generator | ||
next(); // then return the task descriptor to the caller | ||
return task; | ||
/** | ||
* This is the generator driver | ||
* It's a recursive async/continuation function which calls itself | ||
* until the generator terminates or throws | ||
* @param {internal commands(TASK_CANCEL | TERMINATE) | any} arg - value, generator will be resumed with. | ||
* @param {boolean} isErr - the flag shows if effect finished with an error | ||
* | ||
* receives either (command | effect result, false) or (any thrown thing, true) | ||
*/ | ||
function next(arg, isErr) { | ||
try { | ||
var result; | ||
if (isErr) { | ||
result = iterator$1.throw(arg); // user handled the error, we can clear bookkept values | ||
clear(); | ||
} else if (shouldCancel(arg)) { | ||
/** | ||
getting TASK_CANCEL automatically cancels the main task | ||
We can get this value here | ||
- By cancelling the parent task manually | ||
- By joining a Cancelled task | ||
**/ | ||
mainTask.status = CANCELLED; | ||
/** | ||
Cancels the current effect; this will propagate the cancellation down to any called tasks | ||
**/ | ||
next.cancel(); | ||
/** | ||
If this Generator has a `return` method then invokes it | ||
This will jump to the finally block | ||
**/ | ||
result = func(iterator$1.return) ? iterator$1.return(TASK_CANCEL) : { | ||
done: true, | ||
value: TASK_CANCEL | ||
}; | ||
} else if (shouldTerminate(arg)) { | ||
// We get TERMINATE flag, i.e. by taking from a channel that ended using `take` (and not `takem` used to trap End of channels) | ||
result = func(iterator$1.return) ? iterator$1.return() : { | ||
done: true | ||
}; | ||
} else { | ||
result = iterator$1.next(arg); | ||
} | ||
if (!result.done) { | ||
digestEffect(result.value, parentEffectId, next); | ||
} else { | ||
/** | ||
This Generator has ended, terminate the main task and notify the fork queue | ||
**/ | ||
if (mainTask.status !== CANCELLED) { | ||
mainTask.status = DONE; | ||
} | ||
mainTask.cont(result.value); | ||
} | ||
} catch (error) { | ||
if (mainTask.status === CANCELLED) { | ||
throw error; | ||
} | ||
mainTask.status = ABORTED; | ||
mainTask.cont(error, true); | ||
} | ||
} | ||
function runEffect(effect, effectId, currCb) { | ||
/** | ||
each effect runner must attach its own logic of cancellation to the provided callback | ||
it allows this generator to propagate cancellation downward. | ||
ATTENTION! effect runners must setup the cancel logic by setting cb.cancel = [cancelMethod] | ||
And the setup must occur before calling the callback | ||
This is a sort of inversion of control: called async functions are responsible | ||
of completing the flow by calling the provided continuation; while caller functions | ||
are responsible for aborting the current flow by calling the attached cancel function | ||
Library users can attach their own cancellation logic to promises by defining a | ||
promise[CANCEL] method in their returned promises | ||
ATTENTION! calling cancel must have no effect on an already completed or cancelled effect | ||
**/ | ||
if (promise(effect)) { | ||
resolvePromise(effect, currCb); | ||
} else if (iterator(effect)) { | ||
// resolve iterator | ||
proc(env, effect, task.context, effectId, meta, | ||
/* isRoot */ | ||
false, currCb); | ||
} else if (effect && effect[IO]) { | ||
var effectRunner = effectRunnerMap[effect.type]; | ||
effectRunner(env, effect.payload, currCb, executingContext); | ||
} else { | ||
// anything else returned as is | ||
currCb(effect); | ||
} | ||
} | ||
function digestEffect(effect, parentEffectId, cb, label) { | ||
if (label === void 0) { | ||
label = ''; | ||
} | ||
var effectId = nextSagaId(); | ||
env.sagaMonitor && env.sagaMonitor.effectTriggered({ | ||
effectId: effectId, | ||
parentEffectId: parentEffectId, | ||
label: label, | ||
effect: effect | ||
}); | ||
/** | ||
completion callback and cancel callback are mutually exclusive | ||
We can't cancel an already completed effect | ||
And We can't complete an already cancelled effectId | ||
**/ | ||
var effectSettled; // Completion callback passed to the appropriate effect runner | ||
function currCb(res, isErr) { | ||
if (effectSettled) { | ||
return; | ||
} | ||
effectSettled = true; | ||
cb.cancel = noop; // defensive measure | ||
if (env.sagaMonitor) { | ||
if (isErr) { | ||
env.sagaMonitor.effectRejected(effectId, res); | ||
} else { | ||
env.sagaMonitor.effectResolved(effectId, res); | ||
} | ||
} | ||
if (isErr) { | ||
setCrashedEffect(effect); | ||
} | ||
cb(res, isErr); | ||
} // tracks down the current cancel | ||
currCb.cancel = noop; // setup cancellation logic on the parent cb | ||
cb.cancel = function () { | ||
// prevents cancelling an already completed effect | ||
if (effectSettled) { | ||
return; | ||
} | ||
effectSettled = true; | ||
currCb.cancel(); // propagates cancel downward | ||
currCb.cancel = noop; // defensive measure | ||
env.sagaMonitor && env.sagaMonitor.effectCancelled(effectId); | ||
}; | ||
finalRunEffect(effect, effectId, currCb); | ||
} | ||
} | ||
var RUN_SAGA_SIGNATURE = 'runSaga(options, saga, ...args)'; | ||
var NON_GENERATOR_ERR = RUN_SAGA_SIGNATURE + ": saga argument must be a Generator function!"; | ||
function runSaga(_ref, saga) { | ||
var _ref$channel = _ref.channel, | ||
channel = _ref$channel === void 0 ? stdChannel() : _ref$channel, | ||
dispatch = _ref.dispatch, | ||
getState = _ref.getState, | ||
_ref$context = _ref.context, | ||
context = _ref$context === void 0 ? {} : _ref$context, | ||
sagaMonitor = _ref.sagaMonitor, | ||
effectMiddlewares = _ref.effectMiddlewares, | ||
_ref$onError = _ref.onError, | ||
onError = _ref$onError === void 0 ? logError : _ref$onError; | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(saga, func, NON_GENERATOR_ERR); | ||
} | ||
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) { | ||
args[_key - 2] = arguments[_key]; | ||
} | ||
var iterator$1 = saga.apply(void 0, args); | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(iterator$1, iterator, NON_GENERATOR_ERR); | ||
} | ||
var effectId = nextSagaId(); | ||
if (sagaMonitor) { | ||
// monitors are expected to have a certain interface, let's fill-in any missing ones | ||
sagaMonitor.rootSagaStarted = sagaMonitor.rootSagaStarted || noop; | ||
sagaMonitor.effectTriggered = sagaMonitor.effectTriggered || noop; | ||
sagaMonitor.effectResolved = sagaMonitor.effectResolved || noop; | ||
sagaMonitor.effectRejected = sagaMonitor.effectRejected || noop; | ||
sagaMonitor.effectCancelled = sagaMonitor.effectCancelled || noop; | ||
sagaMonitor.actionDispatched = sagaMonitor.actionDispatched || noop; | ||
sagaMonitor.rootSagaStarted({ | ||
effectId: effectId, | ||
saga: saga, | ||
args: args | ||
}); | ||
} | ||
if (process.env.NODE_ENV !== 'production') { | ||
if (notUndef(dispatch)) { | ||
check(dispatch, func, 'dispatch must be a function'); | ||
} | ||
if (notUndef(getState)) { | ||
check(getState, func, 'getState must be a function'); | ||
} | ||
if (notUndef(effectMiddlewares)) { | ||
var MIDDLEWARE_TYPE_ERROR = 'effectMiddlewares must be an array of functions'; | ||
check(effectMiddlewares, array$1, MIDDLEWARE_TYPE_ERROR); | ||
effectMiddlewares.forEach(function (effectMiddleware) { | ||
return check(effectMiddleware, func, MIDDLEWARE_TYPE_ERROR); | ||
}); | ||
} | ||
check(onError, func, 'onError passed to the redux-saga is not a function!'); | ||
} | ||
var finalizeRunEffect; | ||
if (effectMiddlewares) { | ||
var middleware = compose.apply(void 0, effectMiddlewares); | ||
finalizeRunEffect = function finalizeRunEffect(runEffect) { | ||
return function (effect, effectId, currCb) { | ||
var plainRunEffect = function plainRunEffect(eff) { | ||
return runEffect(eff, effectId, currCb); | ||
}; | ||
return middleware(plainRunEffect)(effect); | ||
}; | ||
}; | ||
} else { | ||
finalizeRunEffect = identity; | ||
} | ||
var env = { | ||
channel: channel, | ||
dispatch: wrapSagaDispatch(dispatch), | ||
getState: getState, | ||
sagaMonitor: sagaMonitor, | ||
onError: onError, | ||
finalizeRunEffect: finalizeRunEffect | ||
}; | ||
return immediately(function () { | ||
var task = proc(env, iterator$1, context, effectId, getMetaInfo(saga), | ||
/* isRoot */ | ||
true, undefined); | ||
if (sagaMonitor) { | ||
sagaMonitor.effectResolved(effectId, task); | ||
} | ||
return task; | ||
}); | ||
} | ||
function sagaMiddlewareFactory(_temp) { | ||
var _ref = _temp === void 0 ? {} : _temp, | ||
_ref$context = _ref.context, | ||
context = _ref$context === void 0 ? {} : _ref$context, | ||
_ref$channel = _ref.channel, | ||
channel = _ref$channel === void 0 ? stdChannel() : _ref$channel, | ||
sagaMonitor = _ref.sagaMonitor, | ||
options = _objectWithoutPropertiesLoose(_ref, ["context", "channel", "sagaMonitor"]); | ||
var boundRunSaga; | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(channel, channel$1, 'options.channel passed to the Saga middleware is not a channel'); | ||
} | ||
function sagaMiddleware(_ref2) { | ||
var getState = _ref2.getState, | ||
dispatch = _ref2.dispatch; | ||
boundRunSaga = runSaga.bind(null, _extends({}, options, { | ||
context: context, | ||
channel: channel, | ||
dispatch: dispatch, | ||
getState: getState, | ||
sagaMonitor: sagaMonitor | ||
})); | ||
return function (next) { | ||
return function (action) { | ||
if (sagaMonitor && sagaMonitor.actionDispatched) { | ||
sagaMonitor.actionDispatched(action); | ||
} | ||
var result = next(action); // hit reducers | ||
channel.put(action); | ||
return result; | ||
}; | ||
}; | ||
} | ||
sagaMiddleware.run = function () { | ||
if (process.env.NODE_ENV !== 'production' && !boundRunSaga) { | ||
throw new Error('Before running a Saga, you must mount the Saga middleware on the Store using applyMiddleware'); | ||
} | ||
return boundRunSaga.apply(void 0, arguments); | ||
}; | ||
sagaMiddleware.setContext = function (props) { | ||
if (process.env.NODE_ENV !== 'production') { | ||
check(props, object, createSetContextWarning('sagaMiddleware', props)); | ||
} | ||
assignWithSymbols(context, props); | ||
}; | ||
return sagaMiddleware; | ||
} | ||
const sagaMiddleware = sagaMiddlewareFactory(); | ||
const middleware = [ | ||
...getDefaultMiddleware({ thunk: false, immutableCheck: false, serializableCheck: false }), | ||
sagaMiddleware, | ||
]; | ||
const initialState = { | ||
context: undefined, | ||
}; | ||
const { reducer: rootReducer } = createSlice({ | ||
name: 'root', | ||
initialState, | ||
reducers: { | ||
setContext: { | ||
prepare: (context) => ({ payload: context }), | ||
reducer: (state, { payload }) => { | ||
ContextHolder.setContext(payload); | ||
return Object.assign(Object.assign({}, state), { context: payload }); | ||
}, | ||
}, | ||
}, | ||
}); | ||
const createFronteggStore = (rootInitialState, storeHolder, previewMode = false) => { | ||
const isSSR = typeof window === 'undefined'; | ||
let holder = storeHolder; | ||
if (isSSR && storeHolder == null) { | ||
throw Error('createFronteggStore(initState, storeHolder) failed, storeHolder must not be null in Server-Side rendering'); | ||
} | ||
if (!holder) { | ||
holder = window; | ||
} | ||
if (!holder.store) { | ||
ContextHolder.setContext(rootInitialState.context); | ||
holder.store = configureStore({ | ||
middleware, | ||
preloadedState: { | ||
root: Object.assign({}, rootInitialState), | ||
[authStore.storeName]: authStore.initialState, | ||
[auditsStore.storeName]: auditsStore.initialState, | ||
}, | ||
reducer: combineReducers({ | ||
root: rootReducer, | ||
[authStore.storeName]: authStore.reducer, | ||
[auditsStore.storeName]: auditsStore.reducer, | ||
}), | ||
}); | ||
const rootSaga = function* () { | ||
yield all([call(authStore.sagas), call(auditsStore.sagas)]); | ||
}; | ||
const rootMockSaga = function* () { | ||
yield all([call(authStore.mockSagas), call(auditsStore.mockSagas)]); | ||
}; | ||
if (previewMode) { | ||
sagaMiddleware.run(rootMockSaga); | ||
} | ||
else { | ||
sagaMiddleware.run(rootSaga); | ||
} | ||
} | ||
return holder.store; | ||
}; | ||
export { createFronteggStore, sagaMiddlewareFactory as createSagaMiddleware }; | ||
import '@frontegg/rest-api'; | ||
//# sourceMappingURL=index.js.map |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
2065328
88
18541
103
2
+ Added@frontegg/rest-api@2.2.0-alpha.788357573(transitive)
- Removed@frontegg/rest-api@2.1.1-alpha.786143057(transitive)