cache-manager-redis-yet
Advanced tools
Comparing version 5.0.0 to 5.0.1
@@ -17,2 +17,5 @@ import { RedisClientOptions, RedisClientType, RedisClusterOptions, RedisClusterType } from 'redis'; | ||
} | ||
export type CustomOptions = { | ||
keyPrefix?: string; | ||
}; | ||
export declare class NoCacheableError implements Error { | ||
@@ -24,7 +27,7 @@ message: string; | ||
export declare const avoidNoCacheable: <T>(p: Promise<T>) => Promise<T | undefined>; | ||
export declare function redisStore(options?: RedisClientOptions & Config): Promise<RedisStore<RedisClientType>>; | ||
export declare function redisStore(options?: RedisClientOptions & Config & CustomOptions): Promise<RedisStore<RedisClientType>>; | ||
/** | ||
* redisCache should be connected | ||
*/ | ||
export declare function redisInsStore(redisCache: RedisClientType, options?: Config): RedisStore<RedisClientType>; | ||
export declare function redisInsStore(redisCache: RedisClientType, options?: Config & CustomOptions): RedisStore<RedisClientType>; | ||
export declare function redisClusterStore(options: RedisClusterOptions & Config): Promise<RedisStore<RedisClusterType>>; | ||
@@ -31,0 +34,0 @@ /** |
@@ -38,2 +38,3 @@ "use strict"; | ||
const getVal = (value) => JSON.stringify(value) || '"undefined"'; | ||
const getFullKey = (originalKey, keyPrefix) => `${keyPrefix ? `${keyPrefix}:` : ''}${originalKey}`; | ||
function builder(redisCache, name, reset, keys, options) { | ||
@@ -44,3 +45,3 @@ const isCacheable = (options === null || options === void 0 ? void 0 : options.isCacheable) || ((value) => value !== undefined && value !== null); | ||
return __awaiter(this, void 0, void 0, function* () { | ||
const val = yield redisCache.get(key); | ||
const val = yield redisCache.get(getFullKey(key, options === null || options === void 0 ? void 0 : options.keyPrefix)); | ||
if (val === undefined || val === null) | ||
@@ -58,5 +59,5 @@ return undefined; | ||
if (t !== undefined && t !== 0) | ||
yield redisCache.set(key, getVal(value), { PX: t }); | ||
yield redisCache.set(getFullKey(key, options === null || options === void 0 ? void 0 : options.keyPrefix), getVal(value), { PX: t }); | ||
else | ||
yield redisCache.set(key, getVal(value)); | ||
yield redisCache.set(getFullKey(key, options === null || options === void 0 ? void 0 : options.keyPrefix), getVal(value)); | ||
}); | ||
@@ -72,3 +73,3 @@ }, | ||
throw new NoCacheableError(`"${getVal(value)}" is not a cacheable value`); | ||
multi.set(key, getVal(value), { PX: t }); | ||
multi.set(getFullKey(key, options === null || options === void 0 ? void 0 : options.keyPrefix), getVal(value), { PX: t }); | ||
} | ||
@@ -75,0 +76,0 @@ yield multi.exec(); |
{ | ||
"name": "cache-manager-redis-yet", | ||
"description": "Redis store for node-cache-manager updated", | ||
"version": "5.0.0", | ||
"version": "5.0.1", | ||
"license": "MIT", | ||
@@ -33,3 +33,3 @@ "main": "dist/index.js", | ||
"@redis/bloom": "^1.2.0", | ||
"@redis/client": "^1.5.14", | ||
"@redis/client": "^1.5.16", | ||
"@redis/graph": "^1.1.1", | ||
@@ -39,19 +39,15 @@ "@redis/json": "^1.0.6", | ||
"@redis/time-series": "^1.0.5", | ||
"cache-manager": "^5.4.0", | ||
"redis": "^4.6.13" | ||
"cache-manager": "^5.5.3", | ||
"redis": "^4.6.14" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "20.11.27", | ||
"@typescript-eslint/eslint-plugin": "7.2.0", | ||
"@typescript-eslint/parser": "7.2.0", | ||
"@vitest/coverage-v8": "1.3.1", | ||
"dotenv-cli": "7.4.1", | ||
"eslint": "8.57.0", | ||
"eslint-config-prettier": "9.1.0", | ||
"eslint-plugin-prettier": "5.1.3", | ||
"lint-staged": "15.2.2", | ||
"prettier": "3.2.5", | ||
"rimraf": "^5.0.5", | ||
"typescript": "5.4.2", | ||
"vitest": "1.3.1" | ||
"@types/node": "20.12.12", | ||
"@typescript-eslint/eslint-plugin": "7.10.0", | ||
"@typescript-eslint/parser": "7.10.0", | ||
"@vitest/coverage-v8": "1.6.0", | ||
"eslint": "9.3.0", | ||
"lint-staged": "15.2.4", | ||
"rimraf": "^5.0.7", | ||
"typescript": "5.4.5", | ||
"vitest": "1.6.0" | ||
}, | ||
@@ -66,3 +62,3 @@ "engines": { | ||
"scripts": { | ||
"build": "rimraf dist && tsc -p tsconfig.build.json", | ||
"build": "rimraf ./dist && tsc -p tsconfig.build.json", | ||
"clean": "rimraf dist node_modules yarn.lock package-lock.json pnpm-lock.yaml .eslintcache coverage", | ||
@@ -69,0 +65,0 @@ "test": "vitest run --coverage", |
@@ -36,2 +36,6 @@ import { | ||
export type CustomOptions = { | ||
keyPrefix?: string; | ||
} | ||
export class NoCacheableError implements Error { | ||
@@ -52,2 +56,4 @@ name = 'NoCacheableError'; | ||
const getFullKey = (originalKey: string, keyPrefix?: string) => `${keyPrefix? `${keyPrefix}:` : ''}${originalKey}`; | ||
function builder<T extends Clients>( | ||
@@ -58,3 +64,3 @@ redisCache: T, | ||
keys: (pattern: string) => Promise<string[]>, | ||
options?: Config, | ||
options?: Config & CustomOptions, | ||
) { | ||
@@ -66,3 +72,3 @@ const isCacheable = | ||
async get<T>(key: string) { | ||
const val = await redisCache.get(key); | ||
const val = await redisCache.get(getFullKey(key, options?.keyPrefix)); | ||
if (val === undefined || val === null) return undefined; | ||
@@ -72,8 +78,11 @@ else return JSON.parse(val) as T; | ||
async set(key, value, ttl) { | ||
if (!isCacheable(value)) | ||
throw new NoCacheableError(`"${value}" is not a cacheable value`); | ||
const t = ttl === undefined ? options?.ttl : ttl; | ||
const t = ttl === undefined ? options?.ttl : ttl; | ||
if (t !== undefined && t !== 0) | ||
await redisCache.set(key, getVal(value), { PX: t }); | ||
else await redisCache.set(key, getVal(value)); | ||
await redisCache.set(getFullKey(key, options?.keyPrefix), getVal(value), { PX: t }); | ||
else await redisCache.set(getFullKey(key, options?.keyPrefix), getVal(value)); | ||
}, | ||
@@ -89,3 +98,4 @@ async mset(args, ttl) { | ||
); | ||
multi.set(key, getVal(value), { PX: t }); | ||
multi.set(getFullKey(key, options?.keyPrefix), getVal(value), { PX: t }); | ||
} | ||
@@ -129,4 +139,5 @@ await multi.exec(); | ||
// TODO: past instance as option | ||
export async function redisStore(options?: RedisClientOptions & Config) { | ||
export async function redisStore(options?: RedisClientOptions & Config & CustomOptions) { | ||
const redisCache = createClient(options); | ||
@@ -141,3 +152,3 @@ await redisCache.connect(); | ||
*/ | ||
export function redisInsStore(redisCache: RedisClientType, options?: Config) { | ||
export function redisInsStore(redisCache: RedisClientType, options?: Config & CustomOptions) { | ||
const reset = async () => { | ||
@@ -144,0 +155,0 @@ await redisCache.flushDb(); |
@@ -6,11 +6,11 @@ import { describe, expect, it, beforeEach } from 'vitest'; | ||
import { | ||
redisStore, | ||
RedisCache, | ||
redisInsStore, | ||
NoCacheableError, | ||
avoidNoCacheable, | ||
redisStore, | ||
RedisCache, | ||
redisInsStore, | ||
NoCacheableError, | ||
avoidNoCacheable, | ||
} from '../src'; | ||
const sleep = (timeout: number) => | ||
new Promise((resolve) => setTimeout(resolve, timeout)); | ||
new Promise((resolve) => setTimeout(resolve, timeout)); | ||
@@ -22,319 +22,302 @@ let redisCacheTtl: RedisCache; | ||
const config = { | ||
url: 'redis://localhost:6379', | ||
url: 'redis://localhost:6379', | ||
} as const; | ||
const configTtl = { | ||
...config, | ||
ttl: 500, | ||
...config, | ||
ttl: 500, | ||
} as const; | ||
beforeEach(async () => { | ||
redisCache = await caching(redisStore, config); | ||
redisCacheTtl = await caching(redisStore, configTtl); | ||
redisCache = await caching(redisStore, config); | ||
redisCacheTtl = await caching(redisStore, configTtl); | ||
await redisCache.reset(); | ||
const conf = { | ||
...config, | ||
isCacheable: (val: unknown) => { | ||
if (val === undefined) { | ||
// allow undefined | ||
return true; | ||
} else if (val === 'FooBarString') { | ||
// disallow FooBarString | ||
return false; | ||
} | ||
return redisCache.store.isCacheable(val); | ||
}, | ||
}; | ||
customRedisCache = await caching(redisStore, conf); | ||
await redisCache.reset(); | ||
const conf = { | ||
...config, | ||
isCacheable: (val: unknown) => { | ||
if (val === undefined) { | ||
// allow undefined | ||
return true; | ||
} else if (val === 'FooBarString') { | ||
// disallow FooBarString | ||
return false; | ||
} | ||
return redisCache.store.isCacheable(val); | ||
}, | ||
}; | ||
customRedisCache = await caching(redisStore, conf); | ||
await customRedisCache.reset(); | ||
await customRedisCache.reset(); | ||
}); | ||
describe('instance', () => { | ||
it('should be constructed', async () => { | ||
const instance: RedisClientType = await createClient(config); | ||
await instance.connect(); | ||
const cache = await caching( | ||
(c) => redisInsStore(instance, c), | ||
config, | ||
); | ||
await cache.set('fooll', 'bar'); | ||
await expect(cache.get('fooll')).resolves.toEqual('bar'); | ||
}); | ||
it('should be constructed', async () => { | ||
const instance: RedisClientType = await createClient(config); | ||
await instance.connect(); | ||
const cache = await caching( | ||
(c) => redisInsStore(instance, c), | ||
config, | ||
); | ||
await cache.set('fooll', 'bar'); | ||
await expect(cache.get('fooll')).resolves.toEqual('bar'); | ||
}); | ||
}); | ||
describe('set', () => { | ||
it('should store a value without ttl', async () => { | ||
await expect(redisCache.set('foo', 'bar')).resolves.toBeUndefined(); | ||
await expect(redisCache.get('foo')).resolves.toBe('bar'); | ||
}); | ||
it('should store a value without ttl', async () => { | ||
await expect(redisCache.set('foo', 'bar')).resolves.toBeUndefined(); | ||
await expect(redisCache.get('foo')).resolves.toBe('bar'); | ||
}); | ||
it('should store a value with a specific ttl', async () => { | ||
await expect(redisCache.set('foo', 'bar', 1)).resolves.toBeUndefined(); | ||
await sleep(2); | ||
await expect(redisCache.get('foo')).resolves.toBeUndefined(); | ||
}); | ||
it('should store a value with a specific ttl', async () => { | ||
await expect(redisCache.set('foo', 'bar', 1)).resolves.toBeUndefined(); | ||
await sleep(2); | ||
await expect(redisCache.get('foo')).resolves.toBeUndefined(); | ||
}); | ||
it('should store a value with 0 ttl', async () => { | ||
await expect(redisCacheTtl.set('foo', 'bar', 0)).resolves.toBeUndefined(); | ||
await sleep(configTtl.ttl + 1); | ||
await expect(redisCacheTtl.get('foo')).resolves.toEqual('bar'); | ||
}); | ||
it('should store a value with 0 ttl', async () => { | ||
await expect(redisCacheTtl.set('foo', 'bar', 0)).resolves.toBeUndefined(); | ||
await sleep(configTtl.ttl + 1); | ||
await expect(redisCacheTtl.get('foo')).resolves.toEqual('bar'); | ||
}); | ||
it('should not be able to store a null value (not cacheable)', () => | ||
expect(redisCache.set('foo2', null)).rejects.toBeDefined()); | ||
it('should not be able to store a null value (not cacheable)', () => | ||
expect(redisCache.set('foo2', null)).rejects.toBeDefined()); | ||
it('should not store an invalid value', () => | ||
expect(redisCache.set('foo1', undefined)).rejects.toStrictEqual( | ||
new NoCacheableError('"undefined" is not a cacheable value'), | ||
)); | ||
it('should not store an invalid value', () => | ||
expect(redisCache.set('foo1', undefined)).rejects.toStrictEqual( | ||
new NoCacheableError('"undefined" is not a cacheable value'), | ||
)); | ||
it('should store an undefined value if permitted by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable(undefined)).toBe(true); | ||
await customRedisCache.set('foo3', undefined); | ||
}); | ||
it('should store an undefined value if permitted by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable(undefined)).toBe(true); | ||
await customRedisCache.set('foo3', undefined); | ||
}); | ||
it('should not store a value disallowed by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable('FooBarString')).toBe(false); | ||
await expect( | ||
customRedisCache.set('foobar', 'FooBarString'), | ||
).rejects.toBeDefined(); | ||
}); | ||
it('should not store a value disallowed by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable('FooBarString')).toBe(false); | ||
await expect( | ||
customRedisCache.set('foobar', 'FooBarString'), | ||
).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.set('foo', 'bar')).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.set('foo', 'bar')).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('mset', () => { | ||
it('should store a value with a specific ttl', async () => { | ||
await redisCache.store.mset( | ||
[ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
], | ||
1000, | ||
); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
'bar', | ||
'bar2', | ||
]); | ||
}); | ||
it('should store a value with a specific ttl', async () => { | ||
await redisCache.store.mset( | ||
[ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
], | ||
1000, | ||
); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
'bar', | ||
'bar2', | ||
]); | ||
}); | ||
it('should store a value with a specific ttl from global', async () => { | ||
await redisCacheTtl.store.mset([ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
]); | ||
await expect( | ||
redisCacheTtl.store.mget('foo', 'foo2'), | ||
).resolves.toStrictEqual(['bar', 'bar2']); | ||
it('should not be able to store a null value (not cacheable)', () => | ||
expect(redisCache.store.mset([['foo2', null]])).rejects.toBeDefined()); | ||
await sleep(configTtl.ttl); | ||
it('should store a value without ttl', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'baz'], | ||
['foo2', 'baz2'], | ||
]); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
'baz', | ||
'baz2', | ||
]); | ||
}); | ||
await expect( | ||
redisCacheTtl.store.mget('foo', 'foo2'), | ||
).resolves.toStrictEqual([undefined, undefined]); | ||
}); | ||
it('should not store an invalid value', () => | ||
expect(redisCache.store.mset([['foo1', undefined]])).rejects.toBeDefined()); | ||
it('should store an undefined value if permitted by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable(undefined)).toBe(true); | ||
await customRedisCache.store.mset([ | ||
['foo3', undefined], | ||
['foo4', undefined], | ||
]); | ||
await expect( | ||
customRedisCache.store.mget('foo3', 'foo4'), | ||
).resolves.toStrictEqual(['undefined', 'undefined']); | ||
}); | ||
it('should not be able to store a null value (not cacheable)', () => | ||
expect(redisCache.store.mset([['foo2', null]])).rejects.toBeDefined()); | ||
it('should not store a value disallowed by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable('FooBarString')).toBe(false); | ||
await expect( | ||
customRedisCache.store.mset([['foobar', 'FooBarString']]), | ||
).rejects.toBeDefined(); | ||
}); | ||
it('should store a value without ttl', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'baz'], | ||
['foo2', 'baz2'], | ||
]); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
'baz', | ||
'baz2', | ||
]); | ||
}); | ||
it('should not store an invalid value', () => | ||
expect(redisCache.store.mset([['foo1', undefined]])).rejects.toBeDefined()); | ||
it('should store an undefined value if permitted by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable(undefined)).toBe(true); | ||
await customRedisCache.store.mset([ | ||
['foo3', undefined], | ||
['foo4', undefined], | ||
]); | ||
await expect( | ||
customRedisCache.store.mget('foo3', 'foo4'), | ||
).resolves.toStrictEqual(['undefined', 'undefined']); | ||
}); | ||
it('should not store a value disallowed by isCacheable', async () => { | ||
expect(customRedisCache.store.isCacheable('FooBarString')).toBe(false); | ||
await expect( | ||
customRedisCache.store.mset([['foobar', 'FooBarString']]), | ||
).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.mset([['foo', 'bar']])).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.mset([['foo', 'bar']])).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('mget', () => { | ||
it('should retrieve a value for a given key', async () => { | ||
const value = 'bar'; | ||
const value2 = 'bar2'; | ||
await redisCache.store.mset([ | ||
['foo', value], | ||
['foo2', value2], | ||
]); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
value, | ||
value2, | ||
]); | ||
}); | ||
it('should return null when the key is invalid', () => | ||
expect( | ||
redisCache.store.mget('invalidKey', 'otherInvalidKey'), | ||
).resolves.toStrictEqual([undefined, undefined])); | ||
it('should retrieve a value for a given key', async () => { | ||
const value = 'bar'; | ||
const value2 = 'bar2'; | ||
await redisCache.store.mset([ | ||
['foo', value], | ||
['foo2', value2], | ||
]); | ||
await expect(redisCache.store.mget('foo', 'foo2')).resolves.toStrictEqual([ | ||
value, | ||
value2, | ||
]); | ||
}); | ||
it('should return null when the key is invalid', () => | ||
expect( | ||
redisCache.store.mget('invalidKey', 'otherInvalidKey'), | ||
).resolves.toStrictEqual([undefined, undefined])); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.mget('foo')).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.mget('foo')).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('del', () => { | ||
it('should delete a value for a given key', async () => { | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.del('foo')).resolves.toBeUndefined(); | ||
}); | ||
it('should delete a value for a given key', async () => { | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.del('foo')).resolves.toBeUndefined(); | ||
}); | ||
it('should delete a unlimited number of keys', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
]); | ||
await expect(redisCache.store.mdel('foo', 'foo2')).resolves.toBeUndefined(); | ||
}); | ||
it('should delete a unlimited number of keys', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
]); | ||
await expect(redisCache.store.mdel('foo', 'foo2')).resolves.toBeUndefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.del('foo')).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.del('foo')).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('reset', () => { | ||
it('should flush underlying db', () => redisCache.reset()); | ||
it('should flush underlying db', () => redisCache.reset()); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.reset()).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.reset()).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('ttl', () => { | ||
it('should retrieve ttl for a given key', async () => { | ||
const ttl = 1000; | ||
await redisCache.set('foo', 'bar', ttl); | ||
await expect(redisCache.store.ttl('foo')).resolves.toBeGreaterThanOrEqual( | ||
ttl - 10, | ||
); | ||
it('should retrieve ttl for a given key', async () => { | ||
const ttl = 1000; | ||
await redisCache.set('foo', 'bar', ttl); | ||
await expect(redisCache.store.ttl('foo')).resolves.toBeGreaterThanOrEqual( | ||
ttl - 10, | ||
); | ||
await redisCache.set('foo', 'bar', 0); | ||
await expect(redisCache.store.ttl('foo')).resolves.toEqual(-1); | ||
}); | ||
await redisCache.set('foo', 'bar', 0); | ||
await expect(redisCache.store.ttl('foo')).resolves.toEqual(-1); | ||
}); | ||
it('should retrieve ttl for an invalid key', () => | ||
expect(redisCache.store.ttl('invalidKey')).resolves.toEqual(-2)); | ||
it('should retrieve ttl for an invalid key', () => | ||
expect(redisCache.store.ttl('invalidKey')).resolves.toEqual(-2)); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.ttl('foo')).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.ttl('foo')).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('keys', () => { | ||
it('should return an array of keys for the given pattern', async () => { | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.store.keys('f*')).resolves.toStrictEqual(['foo']); | ||
}); | ||
it('should return an array of keys for the given pattern', async () => { | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.store.keys('f*')).resolves.toStrictEqual(['foo']); | ||
}); | ||
it('should return an array of all keys if called without a pattern', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
['foo3', 'bar3'], | ||
]); | ||
await expect( | ||
redisCache.store | ||
.keys('f*') | ||
.then((x) => x.sort((a, b) => a.localeCompare(b))), | ||
).resolves.toStrictEqual(['foo', 'foo2', 'foo3']); | ||
}); | ||
it('should return an array of all keys if called without a pattern', async () => { | ||
await redisCache.store.mset([ | ||
['foo', 'bar'], | ||
['foo2', 'bar2'], | ||
['foo3', 'bar3'], | ||
]); | ||
await expect( | ||
redisCache.store | ||
.keys('f*') | ||
.then((x) => x.sort((a, b) => a.localeCompare(b))), | ||
).resolves.toStrictEqual(['foo', 'foo2', 'foo3']); | ||
}); | ||
it('should return an array of keys without pattern', async () => { | ||
await redisCache.reset(); | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.store.keys()).resolves.toStrictEqual(['foo']); | ||
}); | ||
it('should return an array of keys without pattern', async () => { | ||
await redisCache.reset(); | ||
await redisCache.set('foo', 'bar'); | ||
await expect(redisCache.store.keys()).resolves.toStrictEqual(['foo']); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.keys()).rejects.toBeDefined(); | ||
}); | ||
it('should return an error if there is an error acquiring a connection', async () => { | ||
await redisCache.store.client.disconnect(); | ||
await expect(redisCache.store.keys()).rejects.toBeDefined(); | ||
}); | ||
}); | ||
describe('isCacheable', () => { | ||
it('should return true when the value is not undefined', () => { | ||
expect(redisCache.store.isCacheable(0)).toBeTruthy(); | ||
expect(redisCache.store.isCacheable(100)).toBeTruthy(); | ||
expect(redisCache.store.isCacheable('')).toBeTruthy(); | ||
expect(redisCache.store.isCacheable('test')).toBeTruthy(); | ||
}); | ||
it('should return true when the value is not undefined', () => { | ||
expect(redisCache.store.isCacheable(0)).toBeTruthy(); | ||
expect(redisCache.store.isCacheable(100)).toBeTruthy(); | ||
expect(redisCache.store.isCacheable('')).toBeTruthy(); | ||
expect(redisCache.store.isCacheable('test')).toBeTruthy(); | ||
}); | ||
it('should return false when the value is undefined', () => { | ||
expect(redisCache.store.isCacheable(undefined)).toBeFalsy(); | ||
}); | ||
it('should return false when the value is undefined', () => { | ||
expect(redisCache.store.isCacheable(undefined)).toBeFalsy(); | ||
}); | ||
it('should return false when the value is null', () => { | ||
expect(redisCache.store.isCacheable(null)).toBeFalsy(); | ||
}); | ||
it('should return false when the value is null', () => { | ||
expect(redisCache.store.isCacheable(null)).toBeFalsy(); | ||
}); | ||
it('should avoid not cacheable error', async () => { | ||
expect(redisCache.store.isCacheable(null)).toBeFalsy(); | ||
await expect( | ||
avoidNoCacheable(redisCache.set('foo', null)), | ||
).resolves.toBeUndefined(); | ||
}); | ||
it('should avoid not cacheable error', async () => { | ||
expect(redisCache.store.isCacheable(null)).toBeFalsy(); | ||
await expect( | ||
avoidNoCacheable(redisCache.set('foo', null)), | ||
).resolves.toBeUndefined(); | ||
}); | ||
}); | ||
describe('redis error event', () => { | ||
it('should return an error when the redis server is unavailable', async () => { | ||
await new Promise<void>((resolve) => { | ||
redisCache.store.client.on('error', (err) => { | ||
expect(err).not.toEqual(null); | ||
resolve(); | ||
}); | ||
it('should return an error when the redis server is unavailable', async () => { | ||
await new Promise<void>((resolve) => { | ||
redisCache.store.client.on('error', (err) => { | ||
expect(err).not.toEqual(null); | ||
resolve(); | ||
}); | ||
redisCache.store.client.emit('error', 'Something unexpected'); | ||
}); | ||
}); | ||
redisCache.store.client.emit('error', 'Something unexpected'); | ||
}); | ||
}); | ||
}); | ||
describe('wrap function', () => { | ||
// Simulate retrieving a user from a database | ||
const getUser = (id: number) => Promise.resolve({ id }); | ||
// Simulate retrieving a user from a database | ||
const getUser = (id: number) => Promise.resolve({ id }); | ||
it('should work', async () => { | ||
const id = 123; | ||
it('should work', async () => { | ||
const id = 123; | ||
await redisCache.wrap('wrap-promise', () => getUser(id)); | ||
await redisCache.wrap('wrap-promise', () => getUser(id)); | ||
// Second call to wrap should retrieve from cache | ||
await expect( | ||
redisCache.wrap('wrap-promise', () => getUser(id + 1)), | ||
).resolves.toStrictEqual({ id }); | ||
}); | ||
// Second call to wrap should retrieve from cache | ||
await expect( | ||
redisCache.wrap('wrap-promise', () => getUser(id + 1)), | ||
).resolves.toStrictEqual({ id }); | ||
}); | ||
}); |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
285712
9
23
1567
Updated@redis/client@^1.5.16
Updatedcache-manager@^5.5.3
Updatedredis@^4.6.14