ngx-cacheable
Advanced tools
Comparing version 1.0.0 to 1.0.1
@@ -6,9 +6,17 @@ import { Observable } from 'rxjs/Observable'; | ||
import { mapTo, startWith } from 'rxjs/operators'; | ||
import { Subject } from 'rxjs/Subject'; | ||
import { CacheBuster } from './cache-buster.decorator'; | ||
import { Cacheable } from './cacheable.decorator'; | ||
const cacheBusterNotifier = new Subject<void>(); | ||
class Service { | ||
public mockServiceCall(parameter) { | ||
mockServiceCall(parameter) { | ||
return timer(1000).pipe(mapTo({ payload: parameter })); | ||
} | ||
mockSaveServiceCall() { | ||
return timer(1000).pipe(mapTo('SAVED')); | ||
} | ||
@Cacheable() | ||
@@ -18,2 +26,8 @@ getData(parameter: string) { | ||
} | ||
@Cacheable() | ||
getDataAndReturnCachedStream(parameter: string) { | ||
return this.mockServiceCall(parameter); | ||
} | ||
@Cacheable({ | ||
@@ -25,2 +39,3 @@ async: true | ||
} | ||
@Cacheable({ | ||
@@ -47,2 +62,3 @@ maxAge: 7500 | ||
} | ||
@Cacheable({ | ||
@@ -55,2 +71,3 @@ maxAge: 7500, | ||
} | ||
@Cacheable({ | ||
@@ -82,4 +99,18 @@ maxAge: 7500, | ||
} | ||
@CacheBuster({ | ||
cacheBusterNotifier: cacheBusterNotifier | ||
}) | ||
saveDataAndCacheBust() { | ||
return this.mockSaveServiceCall(); | ||
} | ||
@Cacheable({ | ||
cacheBusterObserver: cacheBusterNotifier.asObservable() | ||
}) | ||
getDataWithCacheBusting(parameter: string) { | ||
return this.mockServiceCall(parameter); | ||
} | ||
} | ||
describe('CacheDecorator', () => { | ||
describe('CacheableDecorator', () => { | ||
let service: Service = null; | ||
@@ -97,92 +128,120 @@ let mockServiceCallSpy: jasmine.Spy = null; | ||
it('return cached data for 5 unique requests all available for 7500ms WITH slidingExpiration on', () => { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
it('return cached data up until a new parameter is passed and the cache is busted', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getData('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponse = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
const parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe()); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponse2 = _timedStreamAsyncAwait(service.getData('test2')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* allow for the mock request to complete | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
jasmine.clock().tick(1000); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getData('test3'), 1000); | ||
/** | ||
* pass through time to just before the cache expires | ||
* service call is made and waited out | ||
*/ | ||
jasmine.clock().tick(7500); | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* re-call just with test2 so we renew its expiration | ||
* this should NOT return cached response, since the currently cached one should be 'test3' | ||
*/ | ||
service.getDataWithMaxCacheCountAndSlidingExpiration('test2').subscribe(); | ||
const cachedResponse4 = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return the cached observable up until it completes or errors', () => { | ||
/** | ||
* expire ALL caches except the test2 one | ||
* call the service endpoint five hundred times with the same parameter | ||
* but the service should only be called once, since the observable will be cached | ||
*/ | ||
jasmine.clock().tick(1); | ||
const cachedResponse = _timedStreamAsyncAwait( | ||
combineLatest( | ||
parameters.map((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(startWith(null))) | ||
) | ||
); | ||
for (let i = 0; i < 500; i++) { | ||
service.getDataAndReturnCachedStream('test'); | ||
} | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* no cache for 4 payloads, so 4 more calls to the service will be made | ||
* return the response | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(9); | ||
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]); | ||
jasmine.clock().uninstall(); | ||
jasmine.clock().tick(1000); | ||
/** | ||
* call again.. | ||
*/ | ||
service.getDataAndReturnCachedStream('test'); | ||
/** | ||
* service call count should still be 1, since we are returning from cache now | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms', () => { | ||
jasmine.clock().mockDate(); | ||
it('with async:true return cached data ASYNCHRONOUSLY up until a new parameter is passed and the cache is busted', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataAsync('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponseTry1 = _timedStreamAsyncAwait(service.getDataAsync('test')); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
* async cache hasn't resolved yet | ||
* we need to wait a tick out first | ||
*/ | ||
const parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach((param) => service.getDataWithMaxCacheCountAndExpiration(param).subscribe()); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(cachedResponseTry1).toEqual(null); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
const cachedResponseTry2 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponseTry2).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
jasmine.clock().tick(1000); | ||
const cachedResponse2 = _timedStreamAsyncAwait( | ||
forkJoin(parameters.map((param) => service.getDataWithMaxCacheCountAndExpiration(param))) | ||
); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataAsync('test2'), 1); | ||
expect(cachedResponse2).toEqual(null); | ||
expect(cachedResponse2).toEqual([ | ||
{ payload: 'test1' }, | ||
{ payload: 'test2' }, | ||
{ payload: 'test3' }, | ||
{ payload: 'test4' }, | ||
{ payload: 'test5' } | ||
]); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getDataAsync('test3'), 1000); | ||
/** | ||
* expire caches | ||
* service call is made and waited out | ||
*/ | ||
jasmine.clock().tick(7501); | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCountAndExpiration('test1')); | ||
expect(cachedResponse3).toEqual(null); | ||
/** | ||
* by now, no cache exists for the 'test1' parameter, so 1 more call will be made to the service | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(6); | ||
const cachedResponse4 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached date up until the maxAge period has passed and then bail out to data source', () => { | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
jasmine.clock().mockDate(); | ||
let asyncFreshData = null; | ||
service.getDataWithExpiration('test').subscribe((data) => { | ||
asyncFreshData = data; | ||
}); | ||
jasmine.clock().tick(1000); | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithExpiration('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
@@ -217,14 +276,6 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
}); | ||
it('return cached data up until the maxAge period but renew the expiration if called within the period', () => { | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
jasmine.clock().mockDate(); | ||
let asyncFreshData = null; | ||
service.getDataWithSlidingExpiration('test').subscribe((data) => { | ||
asyncFreshData = data; | ||
}); | ||
jasmine.clock().tick(1000); | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
@@ -274,109 +325,2 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
it('return cached data up until new parameters are passed WITH a custom resolver function', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test1'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test1' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalled(); | ||
const asyncFreshData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test2'), 1000); | ||
expect(asyncFreshData2).toEqual({ payload: 'test2' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse = _timedStreamAsyncAwait( | ||
service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true }) | ||
); | ||
expect(cachedResponse).toEqual({ payload: 'test2' }); | ||
/** | ||
* call count still 2, since we rerouted directly to cache | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
_timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3')); | ||
/**no cache reerouter -> bail to service call -> increment call counter*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(3); | ||
}); | ||
it('return cached data ASYNCHRONOUSLY up until a new parameter is passed and the cache is busted', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataAsync('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponseTry1 = _timedStreamAsyncAwait(service.getDataAsync('test')); | ||
/** | ||
* async cache hasn't resolved yet | ||
* we need to wait a tick out first | ||
*/ | ||
expect(cachedResponseTry1).toEqual(null); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
const cachedResponseTry2 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponseTry2).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataAsync('test2'), 1); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getDataAsync('test3'), 1000); | ||
/** | ||
* service call is made and waited out | ||
*/ | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
const cachedResponse4 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached data up until a new parameter is passed and the cache is busted', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getData('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponse = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponse2 = _timedStreamAsyncAwait(service.getData('test2')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getData('test3'), 1000); | ||
/** | ||
* service call is made and waited out | ||
*/ | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
*/ | ||
const cachedResponse4 = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached data for 5 unique requests, then should bail to data source', () => { | ||
@@ -462,2 +406,102 @@ /** | ||
it('return cached data for 5 unique requests all available for 7500ms', () => { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
*/ | ||
const parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach((param) => service.getDataWithMaxCacheCountAndExpiration(param).subscribe()); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
jasmine.clock().tick(1000); | ||
const cachedResponse2 = _timedStreamAsyncAwait( | ||
forkJoin(parameters.map((param) => service.getDataWithMaxCacheCountAndExpiration(param))) | ||
); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(cachedResponse2).toEqual([ | ||
{ payload: 'test1' }, | ||
{ payload: 'test2' }, | ||
{ payload: 'test3' }, | ||
{ payload: 'test4' }, | ||
{ payload: 'test5' } | ||
]); | ||
/** | ||
* expire caches | ||
*/ | ||
jasmine.clock().tick(7501); | ||
const cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCountAndExpiration('test1')); | ||
expect(cachedResponse3).toEqual(null); | ||
/** | ||
* by now, no cache exists for the 'test1' parameter, so 1 more call will be made to the service | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(6); | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms WITH slidingExpiration on', () => { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
*/ | ||
const parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe()); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
/** | ||
* allow for the mock request to complete | ||
*/ | ||
jasmine.clock().tick(1000); | ||
/** | ||
* pass through time to just before the cache expires | ||
*/ | ||
jasmine.clock().tick(7500); | ||
/** | ||
* re-call just with test2 so we renew its expiration | ||
*/ | ||
service.getDataWithMaxCacheCountAndSlidingExpiration('test2').subscribe(); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
/** | ||
* expire ALL caches except the test2 one | ||
*/ | ||
jasmine.clock().tick(1); | ||
const cachedResponse = _timedStreamAsyncAwait( | ||
combineLatest( | ||
parameters.map((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(startWith(null))) | ||
) | ||
); | ||
/** | ||
* no cache for 4 payloads, so 4 more calls to the service will be made | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(9); | ||
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]); | ||
jasmine.clock().uninstall(); | ||
}); | ||
it('return cached data up until new parameters are passed WITH a custom resolver function', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test1'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test1' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalled(); | ||
const asyncFreshData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test2'), 1000); | ||
expect(asyncFreshData2).toEqual({ payload: 'test2' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
const cachedResponse = _timedStreamAsyncAwait( | ||
service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true }) | ||
); | ||
expect(cachedResponse).toEqual({ payload: 'test2' }); | ||
/** | ||
* call count still 2, since we rerouted directly to cache | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
_timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3')); | ||
/**no cache reerouter -> bail to service call -> increment call counter*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(3); | ||
}); | ||
it('only cache data when a specific response is returned, otherwise it should bail to service call', () => { | ||
@@ -493,2 +537,36 @@ const asyncData = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test1'), 1000); | ||
}); | ||
it('cache data until the cacheBusterNotifier has emitted', () => { | ||
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
const cachedResponse = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* make the save call | ||
* after 1 second the cache busting subject will emit and the cache for getDataWithCacheBusting('test') will be relieved of | ||
*/ | ||
expect(_timedStreamAsyncAwait(service.saveDataAndCacheBust(), 1000)).toEqual('SAVED'); | ||
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* call count has incremented due to the actual method call (instead of cache) | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
/** | ||
* pass through 1s of time | ||
*/ | ||
jasmine.clock().tick(1000); | ||
/** | ||
* synchronous cached response should now be returned | ||
*/ | ||
expect(_timedStreamAsyncAwait(service.getDataWithCacheBusting('test'))).toEqual({ payload: 'test' }); | ||
}); | ||
}); | ||
@@ -495,0 +573,0 @@ |
import { Observable } from 'rxjs/Observable'; | ||
import { of } from 'rxjs/observable/of'; | ||
import { delay, tap } from 'rxjs/operators'; | ||
import { delay, finalize, shareReplay, tap } from 'rxjs/operators'; | ||
const DEFAULT_CACHE_RESOLVER = (oldParams:Array<any>, newParams:Array<any>) => | ||
const DEFAULT_CACHE_RESOLVER = (oldParams, newParams) => | ||
JSON.stringify(oldParams) === JSON.stringify(newParams); | ||
@@ -13,6 +13,6 @@ | ||
export type IShouldCacheDecider = (response: any) => boolean; | ||
export type ICacheable = (...args:Array<any>) => Observable<any>; | ||
interface ICachePair { | ||
type ICacheable = (...args) => Observable<any>; | ||
interface ICachePair<T> { | ||
parameters: any; | ||
response: any; | ||
response: T; | ||
created: Date; | ||
@@ -22,2 +22,7 @@ } | ||
/** | ||
* pass an Observable upon whose emission all caches will be busted | ||
*/ | ||
cacheBusterObserver?: Observable<void>; | ||
/** | ||
* @description request cache resolver which will get old and new paramaters passed to and based on those | ||
@@ -52,9 +57,3 @@ * will figure out if we need to bail out of cache or not | ||
} | ||
export const Cacheable: (( | ||
cacheConfig?: ICacheConfig | ||
) => ( | ||
target: Object, | ||
propertyKey: string, | ||
propertyDescriptor: TypedPropertyDescriptor<ICacheable> | ||
) => TypedPropertyDescriptor<ICacheable>) = _cacheConfig => { | ||
export function Cacheable(_cacheConfig?: ICacheConfig) { | ||
return function( | ||
@@ -67,4 +66,14 @@ _target: Object, | ||
if (propertyDescriptor && propertyDescriptor.value) { | ||
const _cachePairs: Array<ICachePair> = []; | ||
const _cachePairs: Array<ICachePair<any>> = []; | ||
const _observableCachePairs: Array<ICachePair<Observable<any>>> = []; | ||
const cacheConfig = _cacheConfig ? _cacheConfig : {}; | ||
if (cacheConfig.cacheBusterObserver) { | ||
/** | ||
* subscribe to the cacheBusterObserver and upon emission, clear all caches | ||
*/ | ||
cacheConfig.cacheBusterObserver.subscribe(_ => { | ||
_cachePairs.length = 0; | ||
_observableCachePairs.length = 0; | ||
}); | ||
} | ||
cacheConfig.cacheResolver = cacheConfig.cacheResolver | ||
@@ -75,6 +84,9 @@ ? cacheConfig.cacheResolver | ||
/* use function instead of an arrow function to keep context of invocation */ | ||
(propertyDescriptor.value as any) = function(...parameters:Array<any>) { | ||
(propertyDescriptor.value as any) = function(...parameters) { | ||
let _foundCachePair = _cachePairs.find(cp => | ||
cacheConfig.cacheResolver(cp.parameters, parameters) | ||
); | ||
const _foundObservableCachePair = _observableCachePairs.find(cp => | ||
cacheConfig.cacheResolver(cp.parameters, parameters) | ||
); | ||
/** | ||
@@ -88,5 +100,11 @@ * check if maxAge is passed and cache has actually expired | ||
) { | ||
/** | ||
* cache duration has expired - remove it from the cachePairs array | ||
*/ | ||
_cachePairs.splice(_cachePairs.indexOf(_foundCachePair, 1)); | ||
_foundCachePair = null; | ||
} else if (_cacheConfig.slidingExpiration) { | ||
/** | ||
* renew cache duration | ||
*/ | ||
_foundCachePair.created = new Date(); | ||
@@ -99,4 +117,19 @@ } | ||
return cacheConfig.async ? cached$.pipe(delay(0)) : cached$; | ||
} else if (_foundObservableCachePair) { | ||
return _foundObservableCachePair.response; | ||
} else { | ||
return (_oldMethod.call(this, ...parameters) as Observable<any>).pipe( | ||
const response$ = (_oldMethod.call(this, ...parameters) as Observable< | ||
any | ||
>).pipe( | ||
finalize(() => { | ||
/** | ||
* if there has been an observable cache pair for these parameters, when it completes or errors, remove it | ||
*/ | ||
const _observableCachePairToRemove = _observableCachePairs.find( | ||
cp => cacheConfig.cacheResolver(cp.parameters, parameters) | ||
); | ||
_observableCachePairs.splice( | ||
_observableCachePairs.indexOf(_observableCachePairToRemove, 1) | ||
); | ||
}), | ||
tap(response => { | ||
@@ -126,4 +159,17 @@ /** | ||
} | ||
}) | ||
}), | ||
/** | ||
* replay cached observable, so we don't enter finalize and tap for every cached observable subscription | ||
*/ | ||
shareReplay() | ||
); | ||
/** | ||
* cache the stream | ||
*/ | ||
_observableCachePairs.push({ | ||
parameters: parameters, | ||
response: response$, | ||
created: new Date() | ||
}); | ||
return response$; | ||
} | ||
@@ -134,2 +180,2 @@ }; | ||
}; | ||
}; | ||
} |
import { Observable } from 'rxjs/Observable'; | ||
export declare type ICacheRequestResolver = (oldParameters: Array<any>, newParameters: Array<any>) => boolean; | ||
export declare type IShouldCacheDecider = (response: any) => boolean; | ||
export declare type ICacheable = (...args: Array<any>) => Observable<any>; | ||
export interface ICacheConfig { | ||
/** | ||
* pass an Observable upon whose emission all caches will be busted | ||
*/ | ||
cacheBusterObserver?: Observable<void>; | ||
/** | ||
* @description request cache resolver which will get old and new paramaters passed to and based on those | ||
@@ -36,2 +39,2 @@ * will figure out if we need to bail out of cache or not | ||
} | ||
export declare const Cacheable: ((cacheConfig?: ICacheConfig) => (target: Object, propertyKey: string, propertyDescriptor: TypedPropertyDescriptor<ICacheable>) => TypedPropertyDescriptor<ICacheable>); | ||
export declare function Cacheable(_cacheConfig?: ICacheConfig): (_target: Object, _propertyKey: string, propertyDescriptor: TypedPropertyDescriptor<(...args: any[]) => Observable<any>>) => TypedPropertyDescriptor<(...args: any[]) => Observable<any>>; |
@@ -8,3 +8,3 @@ "use strict"; | ||
}; | ||
exports.Cacheable = function (_cacheConfig) { | ||
function Cacheable(_cacheConfig) { | ||
return function (_target, _propertyKey, propertyDescriptor) { | ||
@@ -14,3 +14,13 @@ var _oldMethod = propertyDescriptor.value; | ||
var _cachePairs_1 = []; | ||
var _observableCachePairs_1 = []; | ||
var cacheConfig_1 = _cacheConfig ? _cacheConfig : {}; | ||
if (cacheConfig_1.cacheBusterObserver) { | ||
/** | ||
* subscribe to the cacheBusterObserver and upon emission, clear all caches | ||
*/ | ||
cacheConfig_1.cacheBusterObserver.subscribe(function (_) { | ||
_cachePairs_1.length = 0; | ||
_observableCachePairs_1.length = 0; | ||
}); | ||
} | ||
cacheConfig_1.cacheResolver = cacheConfig_1.cacheResolver | ||
@@ -28,2 +38,5 @@ ? cacheConfig_1.cacheResolver | ||
}); | ||
var _foundObservableCachePair = _observableCachePairs_1.find(function (cp) { | ||
return cacheConfig_1.cacheResolver(cp.parameters, parameters); | ||
}); | ||
/** | ||
@@ -35,2 +48,5 @@ * check if maxAge is passed and cache has actually expired | ||
cacheConfig_1.maxAge) { | ||
/** | ||
* cache duration has expired - remove it from the cachePairs array | ||
*/ | ||
_cachePairs_1.splice(_cachePairs_1.indexOf(_foundCachePair, 1)); | ||
@@ -40,2 +56,5 @@ _foundCachePair = null; | ||
else if (_cacheConfig.slidingExpiration) { | ||
/** | ||
* renew cache duration | ||
*/ | ||
_foundCachePair.created = new Date(); | ||
@@ -48,5 +67,14 @@ } | ||
} | ||
else if (_foundObservableCachePair) { | ||
return _foundObservableCachePair.response; | ||
} | ||
else { | ||
return _oldMethod.call.apply(_oldMethod, [this].concat(parameters)).pipe(operators_1.tap(function (response) { | ||
var response$ = _oldMethod.call.apply(_oldMethod, [this].concat(parameters)).pipe(operators_1.finalize(function () { | ||
/** | ||
* if there has been an observable cache pair for these parameters, when it completes or errors, remove it | ||
*/ | ||
var _observableCachePairToRemove = _observableCachePairs_1.find(function (cp) { return cacheConfig_1.cacheResolver(cp.parameters, parameters); }); | ||
_observableCachePairs_1.splice(_observableCachePairs_1.indexOf(_observableCachePairToRemove, 1)); | ||
}), operators_1.tap(function (response) { | ||
/** | ||
* if no maxCacheCount has been passed | ||
@@ -70,3 +98,16 @@ * if maxCacheCount has not been passed, just shift the cachePair to make room for the new one | ||
} | ||
})); | ||
}), | ||
/** | ||
* replay cached observable, so we don't enter finalize and tap for every cached observable subscription | ||
*/ | ||
operators_1.shareReplay()); | ||
/** | ||
* cache the stream | ||
*/ | ||
_observableCachePairs_1.push({ | ||
parameters: parameters, | ||
response: response$, | ||
created: new Date() | ||
}); | ||
return response$; | ||
} | ||
@@ -77,2 +118,4 @@ }; | ||
}; | ||
}; | ||
} | ||
exports.Cacheable = Cacheable; | ||
//# sourceMappingURL=cacheable.decorator.js.map |
@@ -49,3 +49,5 @@ "use strict"; | ||
var operators_1 = require("rxjs/operators"); | ||
var cacheable_decorator_1 = require("./cacheable.decorator"); | ||
var Subject_1 = require("rxjs/Subject"); | ||
var _1 = require("./"); | ||
var cacheBusterNotifier = new Subject_1.Subject(); | ||
var Service = /** @class */ (function () { | ||
@@ -57,5 +59,11 @@ function Service() { | ||
}; | ||
Service.prototype.mockSaveServiceCall = function () { | ||
return timer_1.timer(1000).pipe(operators_1.mapTo('SAVED')); | ||
}; | ||
Service.prototype.getData = function (parameter) { | ||
return this.mockServiceCall(parameter); | ||
}; | ||
Service.prototype.getDataAndReturnCachedStream = function (parameter) { | ||
return this.mockServiceCall(parameter); | ||
}; | ||
Service.prototype.getDataAsync = function (parameter) { | ||
@@ -85,7 +93,16 @@ return this.mockServiceCall(parameter); | ||
}; | ||
Service.prototype.saveDataAndCacheBust = function () { | ||
return this.mockSaveServiceCall(); | ||
}; | ||
Service.prototype.getDataWithCacheBusting = function (parameter) { | ||
return this.mockServiceCall(parameter); | ||
}; | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable() | ||
_1.Cacheable() | ||
], Service.prototype, "getData", null); | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable() | ||
], Service.prototype, "getDataAndReturnCachedStream", null); | ||
__decorate([ | ||
_1.Cacheable({ | ||
async: true | ||
@@ -95,3 +112,3 @@ }) | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
maxAge: 7500 | ||
@@ -101,3 +118,3 @@ }) | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
maxAge: 7500, | ||
@@ -108,3 +125,3 @@ slidingExpiration: true | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
maxCacheCount: 5 | ||
@@ -114,3 +131,3 @@ }) | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
maxAge: 7500, | ||
@@ -121,3 +138,3 @@ maxCacheCount: 5 | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
maxAge: 7500, | ||
@@ -129,3 +146,3 @@ maxCacheCount: 5, | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
cacheResolver: function (_oldParameters, newParameters) { | ||
@@ -137,3 +154,3 @@ return newParameters.find(function (param) { return !!param.straightToLastCache; }); | ||
__decorate([ | ||
cacheable_decorator_1.Cacheable({ | ||
_1.Cacheable({ | ||
shouldCacheDecider: function (response) { | ||
@@ -144,5 +161,15 @@ return response.payload === 'test'; | ||
], Service.prototype, "getDataWithCustomCacheDecider", null); | ||
__decorate([ | ||
_1.CacheBuster({ | ||
cacheBusterNotifier: cacheBusterNotifier | ||
}) | ||
], Service.prototype, "saveDataAndCacheBust", null); | ||
__decorate([ | ||
_1.Cacheable({ | ||
cacheBusterObserver: cacheBusterNotifier.asObservable() | ||
}) | ||
], Service.prototype, "getDataWithCacheBusting", null); | ||
return Service; | ||
}()); | ||
describe('CacheDecorator', function () { | ||
describe('CacheableDecorator', function () { | ||
var service = null; | ||
@@ -158,76 +185,101 @@ var mockServiceCallSpy = null; | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms WITH slidingExpiration on', function () { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
it('return cached data up until a new parameter is passed and the cache is busted', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getData('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponse = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
var parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe(); }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponse2 = _timedStreamAsyncAwait(service.getData('test2')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* allow for the mock request to complete | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
jasmine.clock().tick(1000); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getData('test3'), 1000); | ||
/** | ||
* pass through time to just before the cache expires | ||
* service call is made and waited out | ||
*/ | ||
jasmine.clock().tick(7500); | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* re-call just with test2 so we renew its expiration | ||
* this should NOT return cached response, since the currently cached one should be 'test3' | ||
*/ | ||
service.getDataWithMaxCacheCountAndSlidingExpiration('test2').subscribe(); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
var cachedResponse4 = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return the cached observable up until it completes or errors', function () { | ||
/** | ||
* expire ALL caches except the test2 one | ||
* call the service endpoint five hundred times with the same parameter | ||
* but the service should only be called once, since the observable will be cached | ||
*/ | ||
jasmine.clock().tick(1); | ||
var cachedResponse = _timedStreamAsyncAwait(combineLatest_1.combineLatest(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(operators_1.startWith(null)); }))); | ||
for (var i = 0; i < 500; i++) { | ||
service.getDataAndReturnCachedStream('test'); | ||
} | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* no cache for 4 payloads, so 4 more calls to the service will be made | ||
* return the response | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(9); | ||
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]); | ||
jasmine.clock().uninstall(); | ||
jasmine.clock().tick(1000); | ||
/** | ||
* call again.. | ||
*/ | ||
service.getDataAndReturnCachedStream('test'); | ||
/** | ||
* service call count should still be 1, since we are returning from cache now | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms', function () { | ||
jasmine.clock().mockDate(); | ||
it('with async:true return cached data ASYNCHRONOUSLY up until a new parameter is passed and the cache is busted', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataAsync('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponseTry1 = _timedStreamAsyncAwait(service.getDataAsync('test')); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
* async cache hasn't resolved yet | ||
* we need to wait a tick out first | ||
*/ | ||
var parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param).subscribe(); }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
jasmine.clock().tick(1000); | ||
var cachedResponse2 = _timedStreamAsyncAwait(forkJoin_1.forkJoin(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param); }))); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(cachedResponse2).toEqual([ | ||
{ payload: 'test1' }, | ||
{ payload: 'test2' }, | ||
{ payload: 'test3' }, | ||
{ payload: 'test4' }, | ||
{ payload: 'test5' } | ||
]); | ||
expect(cachedResponseTry1).toEqual(null); | ||
/** | ||
* expire caches | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
jasmine.clock().tick(7501); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCountAndExpiration('test1')); | ||
expect(cachedResponse3).toEqual(null); | ||
var cachedResponseTry2 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponseTry2).toEqual({ payload: 'test' }); | ||
/** | ||
* by now, no cache exists for the 'test1' parameter, so 1 more call will be made to the service | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(6); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
var cachedResponse2 = _timedStreamAsyncAwait(service.getDataAsync('test2'), 1); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getDataAsync('test3'), 1000); | ||
/** | ||
* service call is made and waited out | ||
*/ | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
var cachedResponse4 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached date up until the maxAge period has passed and then bail out to data source', function () { | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
jasmine.clock().mockDate(); | ||
var asyncFreshData = null; | ||
service.getDataWithExpiration('test').subscribe(function (data) { | ||
asyncFreshData = data; | ||
}); | ||
jasmine.clock().tick(1000); | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataWithExpiration('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
@@ -259,13 +311,4 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
it('return cached data up until the maxAge period but renew the expiration if called within the period', function () { | ||
/** | ||
* do not use async await when using jasmine.clock() | ||
* we can mitigate this but will make our tests slower | ||
* https://www.google.bg/search?q=jasmine.clock.install+%2B+async+await&oq=jasmine.clock.install+%2B+async+await&aqs=chrome..69i57.4240j0j7&sourceid=chrome&ie=UTF-8 | ||
*/ | ||
jasmine.clock().mockDate(); | ||
var asyncFreshData = null; | ||
service.getDataWithSlidingExpiration('test').subscribe(function (data) { | ||
asyncFreshData = data; | ||
}); | ||
jasmine.clock().tick(1000); | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
@@ -308,88 +351,2 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
}); | ||
it('return cached data up until new parameters are passed WITH a custom resolver function', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test1'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test1' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalled(); | ||
var asyncFreshData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test2'), 1000); | ||
expect(asyncFreshData2).toEqual({ payload: 'test2' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true })); | ||
expect(cachedResponse).toEqual({ payload: 'test2' }); | ||
/** | ||
* call count still 2, since we rerouted directly to cache | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
_timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3')); | ||
/**no cache reerouter -> bail to service call -> increment call counter*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(3); | ||
}); | ||
it('return cached data ASYNCHRONOUSLY up until a new parameter is passed and the cache is busted', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataAsync('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponseTry1 = _timedStreamAsyncAwait(service.getDataAsync('test')); | ||
/** | ||
* async cache hasn't resolved yet | ||
* we need to wait a tick out first | ||
*/ | ||
expect(cachedResponseTry1).toEqual(null); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
var cachedResponseTry2 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponseTry2).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
var cachedResponse2 = _timedStreamAsyncAwait(service.getDataAsync('test2'), 1); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getDataAsync('test3'), 1000); | ||
/** | ||
* service call is made and waited out | ||
*/ | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
* 1 millisecond delay added, so the async cache resolves | ||
*/ | ||
var cachedResponse4 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached data up until a new parameter is passed and the cache is busted', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getData('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponse = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponse2 = _timedStreamAsyncAwait(service.getData('test2')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* no cache for 'test2', but service call was made so the spy counter is incremented | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getData('test3'), 1000); | ||
/** | ||
* service call is made and waited out | ||
*/ | ||
expect(cachedResponse3).toEqual({ payload: 'test3' }); | ||
/** | ||
* this should return cached response, since the currently cached one should be 'test3' | ||
*/ | ||
var cachedResponse4 = _timedStreamAsyncAwait(service.getData('test')); | ||
expect(cachedResponse4).toEqual(null); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(4); | ||
}); | ||
it('return cached data for 5 unique requests, then should bail to data source', function () { | ||
@@ -461,2 +418,81 @@ /** | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms', function () { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
*/ | ||
var parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param).subscribe(); }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
jasmine.clock().tick(1000); | ||
var cachedResponse2 = _timedStreamAsyncAwait(forkJoin_1.forkJoin(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param); }))); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
expect(cachedResponse2).toEqual([ | ||
{ payload: 'test1' }, | ||
{ payload: 'test2' }, | ||
{ payload: 'test3' }, | ||
{ payload: 'test4' }, | ||
{ payload: 'test5' } | ||
]); | ||
/** | ||
* expire caches | ||
*/ | ||
jasmine.clock().tick(7501); | ||
var cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCountAndExpiration('test1')); | ||
expect(cachedResponse3).toEqual(null); | ||
/** | ||
* by now, no cache exists for the 'test1' parameter, so 1 more call will be made to the service | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(6); | ||
}); | ||
it('return cached data for 5 unique requests all available for 7500ms WITH slidingExpiration on', function () { | ||
jasmine.clock().mockDate(); | ||
/** | ||
* call the same endpoint with 5 different parameters and cache all 5 responses, based on the maxCacheCount parameter | ||
*/ | ||
var parameters = ['test1', 'test2', 'test3', 'test4', 'test5']; | ||
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe(); }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
/** | ||
* allow for the mock request to complete | ||
*/ | ||
jasmine.clock().tick(1000); | ||
/** | ||
* pass through time to just before the cache expires | ||
*/ | ||
jasmine.clock().tick(7500); | ||
/** | ||
* re-call just with test2 so we renew its expiration | ||
*/ | ||
service.getDataWithMaxCacheCountAndSlidingExpiration('test2').subscribe(); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5); | ||
/** | ||
* expire ALL caches except the test2 one | ||
*/ | ||
jasmine.clock().tick(1); | ||
var cachedResponse = _timedStreamAsyncAwait(combineLatest_1.combineLatest(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(operators_1.startWith(null)); }))); | ||
/** | ||
* no cache for 4 payloads, so 4 more calls to the service will be made | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(9); | ||
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]); | ||
jasmine.clock().uninstall(); | ||
}); | ||
it('return cached data up until new parameters are passed WITH a custom resolver function', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test1'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test1' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalled(); | ||
var asyncFreshData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test2'), 1000); | ||
expect(asyncFreshData2).toEqual({ payload: 'test2' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
var cachedResponse = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true })); | ||
expect(cachedResponse).toEqual({ payload: 'test2' }); | ||
/** | ||
* call count still 2, since we rerouted directly to cache | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
_timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3')); | ||
/**no cache reerouter -> bail to service call -> increment call counter*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(3); | ||
}); | ||
it('only cache data when a specific response is returned, otherwise it should bail to service call', function () { | ||
@@ -488,2 +524,32 @@ var asyncData = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test1'), 1000); | ||
}); | ||
it('cache data until the cacheBusterNotifier has emitted', function () { | ||
var asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test'), 1000); | ||
expect(asyncFreshData).toEqual({ payload: 'test' }); | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
var cachedResponse = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test')); | ||
expect(cachedResponse).toEqual({ payload: 'test' }); | ||
/** | ||
* response acquired from cache, so no incrementation on the service spy call counter is expected here | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1); | ||
/** | ||
* make the save call | ||
* after 1 second the cache busting subject will emit and the cache for getDataWithCacheBusting('test') will be relieved of | ||
*/ | ||
expect(_timedStreamAsyncAwait(service.saveDataAndCacheBust(), 1000)).toEqual('SAVED'); | ||
var cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test')); | ||
expect(cachedResponse2).toEqual(null); | ||
/** | ||
* call count has incremented due to the actual method call (instead of cache) | ||
*/ | ||
expect(mockServiceCallSpy).toHaveBeenCalledTimes(2); | ||
/** | ||
* pass through 1s of time | ||
*/ | ||
jasmine.clock().tick(1000); | ||
/** | ||
* synchronous cached response should now be returned | ||
*/ | ||
expect(_timedStreamAsyncAwait(service.getDataWithCacheBusting('test'))).toEqual({ payload: 'test' }); | ||
}); | ||
}); | ||
@@ -505,1 +571,2 @@ function _timedStreamAsyncAwait(stream$, skipTime) { | ||
} | ||
//# sourceMappingURL=cacheable.decorator.spec.js.map |
export * from './cacheable.decorator'; | ||
export * from './cache-buster.decorator'; |
@@ -7,1 +7,3 @@ "use strict"; | ||
__export(require("./cacheable.decorator")); | ||
__export(require("./cache-buster.decorator")); | ||
//# sourceMappingURL=index.js.map |
@@ -1,1 +0,2 @@ | ||
export * from './cacheable.decorator'; | ||
export * from './cacheable.decorator'; | ||
export * from './cache-buster.decorator'; |
module.exports = function(config) { | ||
config.set({ | ||
frameworks: ["jasmine", "karma-typescript"], | ||
browsers: ["Chrome"], | ||
browsers: ["ChromeHeadlessNoSandbox"], | ||
reporters: ["progress", "karma-typescript"], | ||
files: ["./cacheable.decorator.ts", "./cacheable.decorator.spec.ts"], | ||
files: ["./cacheable.decorator.ts", "./cache-buster.decorator.ts", "./cacheable.decorator.spec.ts"], | ||
preprocessors: { | ||
@@ -12,4 +12,12 @@ "**/*.ts": "karma-typescript" | ||
tsconfig: "./tsconfig.json" | ||
} | ||
}, | ||
// you can define custom flags | ||
customLaunchers: { | ||
ChromeHeadlessNoSandbox: { | ||
base: "ChromeHeadless", | ||
flags: ["--no-sandbox"] | ||
} | ||
}, | ||
singleRun: true | ||
}); | ||
}; |
{ | ||
"name": "ngx-cacheable", | ||
"version": "1.0.0", | ||
"version": "1.0.1", | ||
"description": "Rx Observable cache decorator", | ||
"main": "index.js", | ||
"main": "dist/index.js", | ||
"types" : "dist/index.d.ts", | ||
"scripts": { | ||
@@ -7,0 +8,0 @@ "test": "karma start", |
@@ -0,1 +1,2 @@ | ||
[![Build Status](https://travis-ci.org/angelnikolov/ngx-cacheable.svg?branch=master)](https://travis-ci.org/angelnikolov/ngx-cacheable) | ||
# ngx-cacheable |
Sorry, the diff of this file is not supported yet
86908
22
1478
3