New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

ngx-cacheable

Package Overview
Dependencies
Maintainers
1
Versions
66
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

ngx-cacheable - npm Package Compare versions

Comparing version 1.0.1 to 1.0.2

11

cache-buster.decorator.ts

@@ -1,4 +0,3 @@

import { Observable } from 'rxjs/Observable';
import { Observable, Subject } from 'rxjs';
import { tap } from 'rxjs/operators';
import { Subject } from 'rxjs/Subject';

@@ -11,6 +10,10 @@ type ICacheable = (...args) => Observable<any>;

*/
cacheBusterNotifier?: Subject<void>;
cacheBusterNotifier?: Subject<any>;
}
export function CacheBuster(_cacheBusterConfig?: ICacheBusterConfig) {
return function(_target: Object, _propertyKey: string, propertyDescriptor: TypedPropertyDescriptor<ICacheable>) {
return function(
_target: Object,
_propertyKey: string,
propertyDescriptor: TypedPropertyDescriptor<ICacheable>
) {
const _oldMethod = propertyDescriptor.value;

@@ -17,0 +20,0 @@ if (propertyDescriptor && propertyDescriptor.value) {

@@ -1,13 +0,7 @@

import { Observable } from 'rxjs/Observable';
import { combineLatest } from 'rxjs/observable/combineLatest';
import { forkJoin } from 'rxjs/observable/forkJoin';
import { timer } from 'rxjs/observable/timer';
import { combineLatest, forkJoin, Observable, Subject, timer } from 'rxjs';
import { mapTo, startWith } from 'rxjs/operators';
import { Subject } from 'rxjs/Subject';
import { CacheBuster } from './cache-buster.decorator';
import { Cacheable } from './cacheable.decorator';
const cacheBusterNotifier = new Subject<void>();
const cacheBusterNotifier = new Subject();
class Service {

@@ -79,6 +73,9 @@ mockServiceCall(parameter) {

cacheResolver: (_oldParameters, newParameters) => {
return newParameters.find((param) => !!param.straightToLastCache);
return newParameters.find(param => !!param.straightToLastCache);
}
})
getDataWithCustomCacheResolver(parameter: string, _cacheRerouterParameter?: { straightToLastCache: boolean }) {
getDataWithCustomCacheResolver(
parameter: string,
_cacheRerouterParameter?: { straightToLastCache: boolean }
) {
return this.mockServiceCall(parameter);

@@ -130,3 +127,6 @@ }

it('return cached data up until a new parameter is passed and the cache is busted', () => {
const asyncFreshData = _timedStreamAsyncAwait(service.getData('test'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getData('test'),
1000
);
expect(asyncFreshData).toEqual({ payload: 'test' });

@@ -150,3 +150,6 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1);

const cachedResponse3 = _timedStreamAsyncAwait(service.getData('test3'), 1000);
const cachedResponse3 = _timedStreamAsyncAwait(
service.getData('test3'),
1000
);

@@ -192,7 +195,12 @@ /**

it('with async:true return cached data ASYNCHRONOUSLY up until a new parameter is passed and the cache is busted', () => {
const asyncFreshData = _timedStreamAsyncAwait(service.getDataAsync('test'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getDataAsync('test'),
1000
);
expect(asyncFreshData).toEqual({ payload: 'test' });
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1);
const cachedResponseTry1 = _timedStreamAsyncAwait(service.getDataAsync('test'));
const cachedResponseTry1 = _timedStreamAsyncAwait(
service.getDataAsync('test')
);
/**

@@ -206,3 +214,6 @@ * async cache hasn't resolved yet

*/
const cachedResponseTry2 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1);
const cachedResponseTry2 = _timedStreamAsyncAwait(
service.getDataAsync('test'),
1
);
expect(cachedResponseTry2).toEqual({ payload: 'test' });

@@ -217,3 +228,6 @@ /**

*/
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataAsync('test2'), 1);
const cachedResponse2 = _timedStreamAsyncAwait(
service.getDataAsync('test2'),
1
);
expect(cachedResponse2).toEqual(null);

@@ -226,3 +240,6 @@

const cachedResponse3 = _timedStreamAsyncAwait(service.getDataAsync('test3'), 1000);
const cachedResponse3 = _timedStreamAsyncAwait(
service.getDataAsync('test3'),
1000
);

@@ -238,3 +255,6 @@ /**

*/
const cachedResponse4 = _timedStreamAsyncAwait(service.getDataAsync('test'), 1);
const cachedResponse4 = _timedStreamAsyncAwait(
service.getDataAsync('test'),
1
);
expect(cachedResponse4).toEqual(null);

@@ -246,3 +266,6 @@

jasmine.clock().mockDate();
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithExpiration('test'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getDataWithExpiration('test'),
1000
);

@@ -252,3 +275,5 @@ expect(asyncFreshData).toEqual({ payload: 'test' });

const cachedResponse = _timedStreamAsyncAwait(service.getDataWithExpiration('test'));
const cachedResponse = _timedStreamAsyncAwait(
service.getDataWithExpiration('test')
);
/**

@@ -268,3 +293,5 @@ * service shouldn't be called and we should route directly to cache

*/
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithExpiration('test'));
const cachedResponse2 = _timedStreamAsyncAwait(
service.getDataWithExpiration('test')
);
expect(cachedResponse2).toEqual(null);

@@ -274,3 +301,3 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(2);

let asyncFreshDataAfterCacheBust = null;
service.getDataWithExpiration('test').subscribe((data) => {
service.getDataWithExpiration('test').subscribe(data => {
asyncFreshDataAfterCacheBust = data;

@@ -284,7 +311,12 @@ });

jasmine.clock().mockDate();
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getDataWithSlidingExpiration('test'),
1000
);
expect(asyncFreshData).toEqual({ payload: 'test' });
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1);
const cachedResponse = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'));
const cachedResponse = _timedStreamAsyncAwait(
service.getDataWithSlidingExpiration('test')
);
expect(cachedResponse).toEqual({ payload: 'test' });

@@ -310,3 +342,5 @@ /**

const cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'));
const cachedResponse2 = _timedStreamAsyncAwait(
service.getDataWithSlidingExpiration('test')
);
expect(cachedResponse2).toEqual({ payload: 'test' });

@@ -323,3 +357,5 @@ /**

const cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithSlidingExpiration('test'));
const cachedResponse3 = _timedStreamAsyncAwait(
service.getDataWithSlidingExpiration('test')
);
/**

@@ -337,3 +373,5 @@ * cached has expired, request hasn't returned yet but still - the service was called

const parameters = ['test1', 'test2', 'test3', 'test4', 'test5'];
parameters.forEach(async (param) => _timedStreamAsyncAwait(service.getDataWithMaxCacheCount(param), 1000));
parameters.forEach(async param =>
_timedStreamAsyncAwait(service.getDataWithMaxCacheCount(param), 1000)
);
/**

@@ -344,3 +382,5 @@ * data for all endpoints should be available through cache by now

const cachedResponse = _timedStreamAsyncAwait(service.getDataWithMaxCacheCount('test1'));
const cachedResponse = _timedStreamAsyncAwait(
service.getDataWithMaxCacheCount('test1')
);
expect(cachedResponse).toEqual({ payload: 'test1' });

@@ -354,3 +394,3 @@ /** call count still 5 */

const cachedResponseAll = _timedStreamAsyncAwait(
forkJoin(parameters.map((param) => service.getDataWithMaxCacheCount(param)))
forkJoin(parameters.map(param => service.getDataWithMaxCacheCount(param)))
);

@@ -368,3 +408,6 @@

const asyncData = _timedStreamAsyncAwait(service.getDataWithMaxCacheCount('test6'), 1000);
const asyncData = _timedStreamAsyncAwait(
service.getDataWithMaxCacheCount('test6'),
1000
);

@@ -384,3 +427,5 @@ expect(asyncData).toEqual({ payload: 'test6' });

const cachedResponseAll2 = _timedStreamAsyncAwait(
forkJoin(newParameters.map((param) => service.getDataWithMaxCacheCount(param))),
forkJoin(
newParameters.map(param => service.getDataWithMaxCacheCount(param))
),
1000

@@ -402,3 +447,6 @@ );

*/
const nonCachedResponse = _timedStreamAsyncAwait(service.getDataWithMaxCacheCount('test7'), 1000);
const nonCachedResponse = _timedStreamAsyncAwait(
service.getDataWithMaxCacheCount('test7'),
1000
);
expect(nonCachedResponse).toEqual({ payload: 'test7' });

@@ -410,3 +458,5 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(7);

*/
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCount('test2'));
const cachedResponse2 = _timedStreamAsyncAwait(
service.getDataWithMaxCacheCount('test2')
);
expect(cachedResponse2).toEqual(null);

@@ -426,3 +476,5 @@ /**

const parameters = ['test1', 'test2', 'test3', 'test4', 'test5'];
parameters.forEach((param) => service.getDataWithMaxCacheCountAndExpiration(param).subscribe());
parameters.forEach(param =>
service.getDataWithMaxCacheCountAndExpiration(param).subscribe()
);
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);

@@ -432,3 +484,7 @@

const cachedResponse2 = _timedStreamAsyncAwait(
forkJoin(parameters.map((param) => service.getDataWithMaxCacheCountAndExpiration(param)))
forkJoin(
parameters.map(param =>
service.getDataWithMaxCacheCountAndExpiration(param)
)
)
);

@@ -450,3 +506,5 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);

const cachedResponse3 = _timedStreamAsyncAwait(service.getDataWithMaxCacheCountAndExpiration('test1'));
const cachedResponse3 = _timedStreamAsyncAwait(
service.getDataWithMaxCacheCountAndExpiration('test1')
);
expect(cachedResponse3).toEqual(null);

@@ -465,3 +523,5 @@ /**

const parameters = ['test1', 'test2', 'test3', 'test4', 'test5'];
parameters.forEach((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe());
parameters.forEach(param =>
service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe()
);
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);

@@ -490,3 +550,7 @@

combineLatest(
parameters.map((param) => service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(startWith(null)))
parameters.map(param =>
service
.getDataWithMaxCacheCountAndSlidingExpiration(param)
.pipe(startWith(null))
)
)

@@ -498,3 +562,9 @@ );

expect(mockServiceCallSpy).toHaveBeenCalledTimes(9);
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]);
expect(cachedResponse).toEqual([
null,
{ payload: 'test2' },
null,
null,
null
]);
jasmine.clock().uninstall();

@@ -504,7 +574,13 @@ });

it('return cached data up until new parameters are passed WITH a custom resolver function', () => {
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test1'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getDataWithCustomCacheResolver('test1'),
1000
);
expect(asyncFreshData).toEqual({ payload: 'test1' });
expect(mockServiceCallSpy).toHaveBeenCalled();
const asyncFreshData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test2'), 1000);
const asyncFreshData2 = _timedStreamAsyncAwait(
service.getDataWithCustomCacheResolver('test2'),
1000
);
expect(asyncFreshData2).toEqual({ payload: 'test2' });

@@ -514,3 +590,5 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(2);

const cachedResponse = _timedStreamAsyncAwait(
service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true })
service.getDataWithCustomCacheResolver('test3', {
straightToLastCache: true
})
);

@@ -528,3 +606,6 @@ expect(cachedResponse).toEqual({ payload: 'test2' });

it('only cache data when a specific response is returned, otherwise it should bail to service call', () => {
const asyncData = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test1'), 1000);
const asyncData = _timedStreamAsyncAwait(
service.getDataWithCustomCacheDecider('test1'),
1000
);
expect(asyncData).toEqual({ payload: 'test1' });

@@ -536,3 +617,5 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(1);

*/
const cachedData = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test1'));
const cachedData = _timedStreamAsyncAwait(
service.getDataWithCustomCacheDecider('test1')
);
expect(cachedData).toEqual(null);

@@ -545,3 +628,6 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(2);

const asyncData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test'), 1000);
const asyncData2 = _timedStreamAsyncAwait(
service.getDataWithCustomCacheDecider('test'),
1000
);
expect(asyncData2).toEqual({ payload: 'test' });

@@ -553,3 +639,5 @@ expect(mockServiceCallSpy).toHaveBeenCalledTimes(3);

*/
const cachedData2 = _timedStreamAsyncAwait(service.getDataWithCustomCacheDecider('test'));
const cachedData2 = _timedStreamAsyncAwait(
service.getDataWithCustomCacheDecider('test')
);
expect(cachedData2).toEqual({ payload: 'test' });

@@ -563,7 +651,12 @@ /**

it('cache data until the cacheBusterNotifier has emitted', () => {
const asyncFreshData = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test'), 1000);
const asyncFreshData = _timedStreamAsyncAwait(
service.getDataWithCacheBusting('test'),
1000
);
expect(asyncFreshData).toEqual({ payload: 'test' });
expect(mockServiceCallSpy).toHaveBeenCalledTimes(1);
const cachedResponse = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test'));
const cachedResponse = _timedStreamAsyncAwait(
service.getDataWithCacheBusting('test')
);
expect(cachedResponse).toEqual({ payload: 'test' });

@@ -579,5 +672,9 @@ /**

*/
expect(_timedStreamAsyncAwait(service.saveDataAndCacheBust(), 1000)).toEqual('SAVED');
expect(
_timedStreamAsyncAwait(service.saveDataAndCacheBust(), 1000)
).toEqual('SAVED');
const cachedResponse2 = _timedStreamAsyncAwait(service.getDataWithCacheBusting('test'));
const cachedResponse2 = _timedStreamAsyncAwait(
service.getDataWithCacheBusting('test')
);
expect(cachedResponse2).toEqual(null);

@@ -595,3 +692,5 @@ /**

*/
expect(_timedStreamAsyncAwait(service.getDataWithCacheBusting('test'))).toEqual({ payload: 'test' });
expect(
_timedStreamAsyncAwait(service.getDataWithCacheBusting('test'))
).toEqual({ payload: 'test' });
});

@@ -602,3 +701,3 @@ });

let response = null;
stream$.subscribe((data) => {
stream$.subscribe(data => {
response = data;

@@ -605,0 +704,0 @@ });

@@ -1,3 +0,2 @@

import { Observable } from 'rxjs/Observable';
import { of } from 'rxjs/observable/of';
import { Observable, of } from 'rxjs';
import { delay, finalize, shareReplay, tap } from 'rxjs/operators';

@@ -23,3 +22,3 @@

*/
cacheBusterObserver?: Observable<void>;
cacheBusterObserver?: Observable<any>;

@@ -26,0 +25,0 @@ /**

@@ -1,3 +0,2 @@

import { Observable } from 'rxjs/Observable';
import { Subject } from 'rxjs/Subject';
import { Observable, Subject } from 'rxjs';
export interface ICacheBusterConfig {

@@ -8,4 +7,4 @@ /**

*/
cacheBusterNotifier?: Subject<void>;
cacheBusterNotifier?: Subject<any>;
}
export declare function CacheBuster(_cacheBusterConfig?: ICacheBusterConfig): (_target: Object, _propertyKey: string, propertyDescriptor: TypedPropertyDescriptor<(...args: any[]) => Observable<any>>) => TypedPropertyDescriptor<(...args: any[]) => Observable<any>>;

@@ -1,2 +0,2 @@

import { Observable } from 'rxjs/Observable';
import { Observable } from 'rxjs';
export declare type ICacheRequestResolver = (oldParameters: Array<any>, newParameters: Array<any>) => boolean;

@@ -8,3 +8,3 @@ export declare type IShouldCacheDecider = (response: any) => boolean;

*/
cacheBusterObserver?: Observable<void>;
cacheBusterObserver?: Observable<any>;
/**

@@ -11,0 +11,0 @@ * @description request cache resolver which will get old and new paramaters passed to and based on those

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var of_1 = require("rxjs/observable/of");
var rxjs_1 = require("rxjs");
var operators_1 = require("rxjs/operators");

@@ -59,3 +59,3 @@ var DEFAULT_CACHE_RESOLVER = function (oldParams, newParams) {

if (_foundCachePair) {
var cached$ = of_1.of(_foundCachePair.response);
var cached$ = rxjs_1.of(_foundCachePair.response);
return cacheConfig_1.async ? cached$.pipe(operators_1.delay(0)) : cached$;

@@ -62,0 +62,0 @@ }

@@ -45,9 +45,7 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
var combineLatest_1 = require("rxjs/observable/combineLatest");
var forkJoin_1 = require("rxjs/observable/forkJoin");
var timer_1 = require("rxjs/observable/timer");
var rxjs_1 = require("rxjs");
var operators_1 = require("rxjs/operators");
var Subject_1 = require("rxjs/Subject");
var _1 = require("./");
var cacheBusterNotifier = new Subject_1.Subject();
var cache_buster_decorator_1 = require("./cache-buster.decorator");
var cacheable_decorator_1 = require("./cacheable.decorator");
var cacheBusterNotifier = new rxjs_1.Subject();
var Service = /** @class */ (function () {

@@ -57,6 +55,6 @@ function Service() {

Service.prototype.mockServiceCall = function (parameter) {
return timer_1.timer(1000).pipe(operators_1.mapTo({ payload: parameter }));
return rxjs_1.timer(1000).pipe(operators_1.mapTo({ payload: parameter }));
};
Service.prototype.mockSaveServiceCall = function () {
return timer_1.timer(1000).pipe(operators_1.mapTo('SAVED'));
return rxjs_1.timer(1000).pipe(operators_1.mapTo('SAVED'));
};

@@ -100,9 +98,9 @@ Service.prototype.getData = function (parameter) {

__decorate([
_1.Cacheable()
cacheable_decorator_1.Cacheable()
], Service.prototype, "getData", null);
__decorate([
_1.Cacheable()
cacheable_decorator_1.Cacheable()
], Service.prototype, "getDataAndReturnCachedStream", null);
__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
async: true

@@ -112,3 +110,3 @@ })

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
maxAge: 7500

@@ -118,3 +116,3 @@ })

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
maxAge: 7500,

@@ -125,3 +123,3 @@ slidingExpiration: true

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
maxCacheCount: 5

@@ -131,3 +129,3 @@ })

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
maxAge: 7500,

@@ -138,3 +136,3 @@ maxCacheCount: 5

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
maxAge: 7500,

@@ -146,3 +144,3 @@ maxCacheCount: 5,

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
cacheResolver: function (_oldParameters, newParameters) {

@@ -154,3 +152,3 @@ return newParameters.find(function (param) { return !!param.straightToLastCache; });

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
shouldCacheDecider: function (response) {

@@ -162,3 +160,3 @@ return response.payload === 'test';

__decorate([
_1.CacheBuster({
cache_buster_decorator_1.CacheBuster({
cacheBusterNotifier: cacheBusterNotifier

@@ -168,3 +166,3 @@ })

__decorate([
_1.Cacheable({
cacheable_decorator_1.Cacheable({
cacheBusterObserver: cacheBusterNotifier.asObservable()

@@ -369,3 +367,3 @@ })

*/
var cachedResponseAll = _timedStreamAsyncAwait(forkJoin_1.forkJoin(parameters.map(function (param) { return service.getDataWithMaxCacheCount(param); })));
var cachedResponseAll = _timedStreamAsyncAwait(rxjs_1.forkJoin(parameters.map(function (param) { return service.getDataWithMaxCacheCount(param); })));
expect(cachedResponseAll).toEqual([

@@ -391,3 +389,3 @@ { payload: 'test1' },

*/
var cachedResponseAll2 = _timedStreamAsyncAwait(forkJoin_1.forkJoin(newParameters.map(function (param) { return service.getDataWithMaxCacheCount(param); })), 1000);
var cachedResponseAll2 = _timedStreamAsyncAwait(rxjs_1.forkJoin(newParameters.map(function (param) { return service.getDataWithMaxCacheCount(param); })), 1000);
expect(cachedResponseAll2).toEqual([

@@ -424,6 +422,10 @@ { payload: 'test2' },

var parameters = ['test1', 'test2', 'test3', 'test4', 'test5'];
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param).subscribe(); });
parameters.forEach(function (param) {
return service.getDataWithMaxCacheCountAndExpiration(param).subscribe();
});
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);
jasmine.clock().tick(1000);
var cachedResponse2 = _timedStreamAsyncAwait(forkJoin_1.forkJoin(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndExpiration(param); })));
var cachedResponse2 = _timedStreamAsyncAwait(rxjs_1.forkJoin(parameters.map(function (param) {
return service.getDataWithMaxCacheCountAndExpiration(param);
})));
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);

@@ -454,3 +456,5 @@ expect(cachedResponse2).toEqual([

var parameters = ['test1', 'test2', 'test3', 'test4', 'test5'];
parameters.forEach(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe(); });
parameters.forEach(function (param) {
return service.getDataWithMaxCacheCountAndSlidingExpiration(param).subscribe();
});
expect(mockServiceCallSpy).toHaveBeenCalledTimes(5);

@@ -474,3 +478,7 @@ /**

jasmine.clock().tick(1);
var cachedResponse = _timedStreamAsyncAwait(combineLatest_1.combineLatest(parameters.map(function (param) { return service.getDataWithMaxCacheCountAndSlidingExpiration(param).pipe(operators_1.startWith(null)); })));
var cachedResponse = _timedStreamAsyncAwait(rxjs_1.combineLatest(parameters.map(function (param) {
return service
.getDataWithMaxCacheCountAndSlidingExpiration(param)
.pipe(operators_1.startWith(null));
})));
/**

@@ -480,3 +488,9 @@ * no cache for 4 payloads, so 4 more calls to the service will be made

expect(mockServiceCallSpy).toHaveBeenCalledTimes(9);
expect(cachedResponse).toEqual([null, { payload: 'test2' }, null, null, null]);
expect(cachedResponse).toEqual([
null,
{ payload: 'test2' },
null,
null,
null
]);
jasmine.clock().uninstall();

@@ -491,3 +505,5 @@ });

expect(mockServiceCallSpy).toHaveBeenCalledTimes(2);
var cachedResponse = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3', { straightToLastCache: true }));
var cachedResponse = _timedStreamAsyncAwait(service.getDataWithCustomCacheResolver('test3', {
straightToLastCache: true
}));
expect(cachedResponse).toEqual({ payload: 'test2' });

@@ -494,0 +510,0 @@ /**

{
"name": "ngx-cacheable",
"version": "1.0.1",
"version": "1.0.2",
"description": "Rx Observable cache decorator",
"main": "dist/index.js",
"types" : "dist/index.d.ts",
"types": "dist/index.d.ts",
"scripts": {

@@ -12,4 +12,4 @@ "test": "karma start",

"repository": {
"type": "git",
"url": "git+https://github.com/angelnikolov/ngx-cacheable.git"
"type": "git",
"url": "git+https://github.com/angelnikolov/ngx-cacheable.git"
},

@@ -21,3 +21,3 @@ "author": "Angel Nikolov <darkysharky@gmail.com>",

"jasmine": "^3.1.0",
"rxjs": "^5.5.6"
"rxjs": "6.2.0"
},

@@ -24,0 +24,0 @@ "devDependencies": {

[![Build Status](https://travis-ci.org/angelnikolov/ngx-cacheable.svg?branch=master)](https://travis-ci.org/angelnikolov/ngx-cacheable)
# ngx-cacheable
Observable cache decorator you can use to decorate class methods which return streams and cache their return values.
## Installing
To install the package, just run
```
npm install ngx-cacheable
```
Import the decorator from ngx-cacheable like:
```
import { Cacheable } from 'ngx-cacheable';
```
and use it decorate any class method like:
```
@Cacheable()
getUsers() {
return this.http
.get(`${environment.api}/users`);
}
```
Now all subsequent calls to this endpoint will be returned from an in-memory cache, rather than the actual http call!
Another example will be:
```
@Cacheable()
getUser(id:string) {
return this.http
.get(`${environment.api}/users/${id}`);
}
```
If we call this method by `service.getUser(1)`, its return value will be cached and returned, up until the method is called with a different parameter. Then the old cache will be busted and a new one will take its place.
For more information and other configurations, see the configuration options below
## Configuration
```
export interface ICacheConfig {
/**
* pass an Observable upon whose emission all caches will be busted
*/
cacheBusterObserver?: Observable<void>;
/**
* @description request cache resolver which will get old and new paramaters passed to and based on those
* will figure out if we need to bail out of cache or not
*/
cacheResolver?: ICacheRequestResolver;
/**
* @description cache decider that will figure out if the response should be cached or not, based on it
*/
shouldCacheDecider?: IShouldCacheDecider;
/**
* maxAge of cache in milliseconds
* @description if time between method calls is larger - we bail out of cache
*/
maxAge?: number;
/**
* whether should use a sliding expiration strategy on caches
* this will reset the cache created property and keep the cache alive for @param maxAge milliseconds more
*/
slidingExpiration?: boolean;
/**
* max cacheCount for different parameters
* @description maximum allowed unique caches (same parameters)
*/
maxCacheCount?: number;
/**
* cache will be resolved asynchronously - an extra change detection pass will be made by
* @description should cache be resolved asynchronously? - helps with declarative forms and two-way databinding via ngModel
*/
async?: boolean;
}
```
## Running the tests
Just run `npm test`.
## Contributing
The project is open for contributors! Please file an issue or make a PR:)
## Authors
* **Angel Nikolov** - *Initial work* - https://github.com/angelnikolov

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc