lru-cache-for-clusters-as-promised
Advanced tools
Comparing version 1.5.25 to 1.6.0
@@ -0,1 +1,11 @@ | ||
1.6.0 / 2021-03-20 | ||
================== | ||
* Refactor codebase to be more maintainable | ||
* Support for external `parse` and `stringify` functions, used for object caching, more efficient on large objects | ||
* Update tests, 100% code coverage | ||
* Update documentation | ||
* Update dependencies | ||
* npm audit fix | ||
1.5.25 / 2021-03-02 | ||
@@ -2,0 +12,0 @@ ================== |
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#lru-cache-for-clusters-as-promised | ||
declare module "lru-cache-for-clusters-as-promised" { | ||
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#example-usage | ||
@@ -6,0 +5,0 @@ class Cache <G1 = never, G2 = never, G3 = never, G4 = never> { |
/** | ||
* Provide a cluster-safe lru-cache with Promises | ||
* Provide a cluster-optimized lru-cache with Promises | ||
* | ||
@@ -8,380 +8,155 @@ * @module lru-cache-for-clusters-as-promised | ||
/* eslint strict: 0 */ | ||
'use strict'; | ||
const cluster = require('cluster'); | ||
const CronJob = require('cron').CronJob; | ||
const Debug = require('debug'); | ||
const uuid = require('uuid'); | ||
const LRUCache = require('lru-cache'); | ||
const master = require('./lib/master'); | ||
const worker = require('./lib/worker'); | ||
const utils = require('./lib/utils'); | ||
const debug = new Debug('lru-cache-for-clusters-as-promised'); | ||
const messages = new Debug('lru-cache-for-clusters-as-promised-messages'); | ||
// setup the the master to handle messages for each worker | ||
master.processMessages(); | ||
// lru caches by namespace on the master | ||
const caches = {}; | ||
// setup each worker to handle messages from the master | ||
worker.processMessages(); | ||
// track callbacks on the worker by request id | ||
const callbacks = {}; | ||
// use to identify messages from our module | ||
const source = 'lru-cache-for-clusters-as-promised'; | ||
/** | ||
* Starts a cron job to prune stale objects from the cache | ||
* @param {LRUCache} cache The cache we want to prune | ||
* @param {string} cronTime The cron schedule | ||
* @param {string} namespace The namespace for shared caches | ||
* @return {CronJob} The cron job which has already been started | ||
* LRUCacheForClustersAsPromised roughly approximates the functionality of LRUCache | ||
* but in a promisified way. When running as a cluster workers send requests to the | ||
* master thread which actually holds the cache via IPC which then sends a response | ||
* that resolves the Promise. For non-clustered environments a Promisfied interface | ||
* to the cache is provided to match the interface for clustered environments. | ||
* | ||
* @param {Object} options The lru-cache options. Properties can be set, functions cannot. | ||
* @return {Object} Object with LRU methods | ||
*/ | ||
function startPruneCronJob(cache, cronTime, namespace) { | ||
debug('Creating cache prune job.', cache); | ||
const job = new CronJob({ | ||
cronTime, | ||
onTick: () => { | ||
debug(`Pruning cache ${namespace}`, cache); | ||
cache.prune(); | ||
}, | ||
start: true, | ||
runOnInit: true, | ||
}); | ||
job.start(); | ||
return job; | ||
} | ||
class LRUCacheForClustersAsPromised { | ||
constructor(options = {}) { | ||
// this is how the clustered cache differentiates | ||
this.namespace = options.namespace || 'default'; | ||
const funcs = { | ||
mapObjects: (pairs, objs, jsonFunction) => | ||
Promise.all( | ||
Object.keys(pairs).map((key) => Promise.resolve((objs[key] = JSON[jsonFunction](pairs[key])))) | ||
), | ||
mDel: (lru, params) => { | ||
if (params[0] && params[0] instanceof Array) { | ||
params[0].map((key) => lru.del(key)); | ||
} | ||
}, | ||
mGet: (lru, params) => { | ||
const mGetValues = {}; | ||
if (params[0] && params[0] instanceof Array) { | ||
params[0].map((key) => (mGetValues[key] = lru.get(key))); | ||
} | ||
return mGetValues; | ||
}, | ||
mSet: (lru, params) => { | ||
if (params[0] && params[0] instanceof Object) { | ||
Object.keys(params[0]).map((key) => lru.set(key, params[0][key], params[1])); | ||
} | ||
}, | ||
}; | ||
// this is how long the worker will wait for a response from the master in milliseconds | ||
this.timeout = options.timeout || 100; | ||
// only run on the master thread | ||
if (cluster.isMaster) { | ||
// for each worker created... | ||
cluster.on('fork', (worker) => { | ||
// wait for the worker to send a message | ||
worker.on('message', (request) => { | ||
if (request.source !== source) return; | ||
messages(`Master recieved message from worker ${worker.id}`, request); | ||
// how should timeouts be handled - default is resolve(undefined), otherwise reject(Error) | ||
this.failsafe = options.failsafe === 'reject' ? 'reject' : 'resolve'; | ||
/** | ||
* Sends the response back to the worker thread | ||
* @param {Object} data The response from the cache | ||
*/ | ||
function sendResponse(data) { | ||
const response = data; | ||
response.source = source; | ||
response.id = request.id; | ||
response.func = request.func; | ||
messages(`Master sending response to worker ${worker.id}`, response); | ||
worker.send(response); | ||
} | ||
this.parse = options.parse || JSON.parse; | ||
this.stringify = options.stringify || JSON.stringify; | ||
// try to load an existing lru-cache | ||
let lru = caches[request.namespace]; | ||
// if this is the master thread, we just promisify an lru-cache | ||
// if it is the worker we need to send messages to the master to resolve the values | ||
this.promisify = (cluster.isMaster ? master : worker).getPromisified( | ||
this, | ||
options | ||
); | ||
} | ||
const params = request.arguments; | ||
set(key, value, maxAge) { | ||
return this.promisify('set', key, value, maxAge); | ||
} | ||
switch (request.func) { | ||
// constructor request | ||
case '()': { | ||
let created = false; | ||
const options = params[0]; | ||
// create a new lru-cache, give it a namespace, and save it locally | ||
if (caches[request.namespace]) { | ||
lru = caches[request.namespace]; | ||
// update property values as needed | ||
['max', 'maxAge', 'stale'].forEach((prop) => { | ||
if (options[prop] && options[prop] !== lru[prop]) { | ||
lru[prop] = options[prop]; | ||
} | ||
}); | ||
} else { | ||
created = true; | ||
lru = caches[request.namespace] = new LRUCache(...params); | ||
// start a job to clean the cache | ||
if (params[0].prune) { | ||
lru.job = startPruneCronJob(lru, params[0].prune, request.namespace); | ||
} | ||
} | ||
sendResponse({ | ||
value: { | ||
namespace: request.namespace, | ||
isnew: created, | ||
max: lru.max, | ||
maxAge: lru.maxAge, | ||
stale: lru.stale, | ||
}, | ||
}); | ||
break; | ||
} | ||
case 'max': | ||
case 'maxAge': | ||
case 'stale': { | ||
lru = caches[request.namespace]; | ||
if (params[0]) { | ||
lru[request.func] = params[0]; | ||
} | ||
sendResponse({ | ||
value: lru[request.func], | ||
}); | ||
break; | ||
} | ||
case 'decr': | ||
case 'incr': { | ||
// get the current value | ||
let value = lru.get(params[0]); | ||
// maybe initialize and increment | ||
value = (typeof value === 'number' ? value : 0) + | ||
((params[1] || 1) * (request.func === 'decr' ? -1 : 1)); | ||
// set the new value | ||
lru.set(params[0], value); | ||
// send the new value | ||
sendResponse({ | ||
value, | ||
}); | ||
break; | ||
} | ||
case 'mGet': { | ||
const mGetValues = funcs.mGet(lru, params); | ||
sendResponse({ value: mGetValues }); | ||
break; | ||
} | ||
case 'mSet': { | ||
funcs.mSet(lru, params); | ||
sendResponse({ value: true }); | ||
break; | ||
} | ||
case 'mDel': { | ||
funcs.mDel(lru, params); | ||
sendResponse({ value: true }); | ||
break; | ||
} | ||
// return the property value | ||
case 'length': | ||
case 'itemCount': { | ||
sendResponse({ | ||
value: lru[request.func], | ||
}); | ||
break; | ||
} | ||
// return the function value | ||
default: { | ||
sendResponse({ | ||
value: lru[request.func](...params), | ||
}); | ||
break; | ||
} | ||
} | ||
get(key) { | ||
return this.promisify('get', key); | ||
} | ||
setObject(key, value, maxAge) { | ||
return this.promisify('set', key, this.stringify(value), maxAge); | ||
} | ||
getObject(key) { | ||
return this.promisify('get', key).then((value) => | ||
Promise.resolve( | ||
// eslint-disable-next-line no-undefined | ||
value ? this.parse(value) : undefined | ||
) | ||
); | ||
} | ||
del(key) { | ||
return this.promisify('del', key); | ||
} | ||
mGet(keys) { | ||
return this.promisify('mGet', keys); | ||
} | ||
mSet(pairs, maxAge) { | ||
return this.promisify('mSet', pairs, maxAge); | ||
} | ||
mGetObjects(keys) { | ||
return this.promisify('mGet', keys).then((pairs) => { | ||
const objs = {}; | ||
return utils | ||
.mapObjects(pairs, objs, this.parse) | ||
.then(() => Promise.resolve(objs)); | ||
}); | ||
}); | ||
} | ||
} | ||
// run on each worker thread | ||
if (cluster.isWorker) { | ||
process.on('message', (response) => { | ||
messages(`Worker ${cluster.worker.id} recieved message`, response); | ||
// look up the callback based on the response ID, delete it, then call it | ||
if (response.source !== source || !callbacks[response.id]) return; | ||
const callback = callbacks[response.id]; | ||
delete callbacks[response.id]; | ||
callback(response); | ||
}); | ||
} | ||
mSetObjects(pairs, maxAge) { | ||
const objs = {}; | ||
return utils | ||
.mapObjects(pairs, objs, this.stringify) | ||
.then(() => this.promisify('mSet', objs, maxAge)); | ||
} | ||
/** | ||
* LRUCacheForClustersAsPromised roughly approximates the functionality of LRUCache | ||
* but in a promisified way. When running as a cluster workers send requests to the | ||
* master thread which actually holds the cache via IPC which then sends a response | ||
* that resolves the Promise. For non-clustered environments a Promisfied interface | ||
* to the cache is provided to match the interface for clustered environments. | ||
* | ||
* @param {Object} opts The lru-cache options. Properties can be set, functions cannot. | ||
* @return {Object} Object with LRU methods | ||
*/ | ||
function LRUCacheForClustersAsPromised(opts) { | ||
// default to some empty options | ||
const options = opts || {}; | ||
mDel(keys) { | ||
return this.promisify('mDel', keys); | ||
} | ||
// keep a reference as 'this' is lost inside the Promise contexts | ||
const cache = this; | ||
peek(key) { | ||
return this.promisify('peek', key); | ||
} | ||
// this is how the clustered cache differentiates | ||
cache.namespace = options.namespace || 'default'; | ||
has(key) { | ||
return this.promisify('has', key); | ||
} | ||
// this is how long the worker will wait for a response from the master in milliseconds | ||
cache.timeout = options.timeout || 100; | ||
incr(key, amount) { | ||
return this.promisify('incr', key, amount); | ||
} | ||
// how should timeouts be handled - default is resolve(undefined), otherwise reject(Error) | ||
cache.failsafe = options.failsafe === 'reject' ? 'reject' : 'resolve'; | ||
decr(key, amount) { | ||
return this.promisify('decr', key, amount); | ||
} | ||
// if this is the master thread, we just promisify an lru-cache | ||
let lru = null; | ||
if (cluster.isMaster) { | ||
if (caches[cache.namespace]) { | ||
lru = caches[cache.namespace]; | ||
debug(`Loaded cache from shared namespace ${cache.namespace}`); | ||
} else { | ||
lru = new LRUCache(options); | ||
caches[cache.namespace] = lru; | ||
if (options.prune) { | ||
lru.job = startPruneCronJob(lru, options.prune, cache.namespace); | ||
} | ||
debug(`Created new LRU cache ${cache.namespace}`); | ||
} | ||
reset() { | ||
return this.promisify('reset'); | ||
} | ||
// return a promise that resolves to the result of the method on | ||
// the local lru-cache this is the master thread, or from the | ||
// lru-cache on the master thread if this is a worker | ||
const promiseTo = (...args) => { | ||
// first argument is the function to run | ||
const func = args[0]; | ||
// the rest of the args are the function arguments of N length | ||
const funcArgs = Array.prototype.slice.call(args, 1, args.length); | ||
if (cluster.isMaster) { | ||
// acting on the local lru-cache | ||
messages(cache.namespace, args); | ||
switch (func) { | ||
case 'max': | ||
case 'maxAge': | ||
case 'stale': { | ||
if (funcArgs[0]) { | ||
lru[func] = funcArgs[0]; | ||
} | ||
return Promise.resolve(lru[func]); | ||
} | ||
case 'decr': | ||
case 'incr': { | ||
// get the current value default to 0 | ||
let value = lru.get(funcArgs[0]); | ||
// maybe initialize and increment | ||
value = (typeof value === 'number' ? value : 0) + | ||
((funcArgs[1] || 1) * (func === 'decr' ? -1 : 1)); | ||
// set the new value | ||
lru.set(funcArgs[0], value); | ||
// resolve the new value | ||
return Promise.resolve(value); | ||
} | ||
case 'mGet': { | ||
const mGetValues = funcs.mGet(lru, funcArgs); | ||
return Promise.resolve(mGetValues); | ||
} | ||
case 'mSet': { | ||
funcs.mSet(lru, funcArgs); | ||
return Promise.resolve(true); | ||
} | ||
case 'mDel': { | ||
funcs.mDel(lru, funcArgs); | ||
return Promise.resolve(true); | ||
} | ||
case 'itemCount': | ||
case 'length': { | ||
// return the property value | ||
return Promise.resolve(lru[func]); | ||
} | ||
default: { | ||
// just call the function on the lru-cache | ||
return Promise.resolve(lru[func](...funcArgs)); | ||
} | ||
} | ||
} | ||
return new Promise((resolve, reject) => { | ||
// create the request to the master | ||
const request = { | ||
source, | ||
namespace: cache.namespace, | ||
id: uuid.v4(), | ||
func, | ||
arguments: funcArgs, | ||
}; | ||
// if we don't get a response in 100ms, return undefined | ||
let failsafeTimeout = setTimeout(() => { | ||
failsafeTimeout = undefined; | ||
if (cache.failsafe === 'reject') { | ||
return reject(new Error('Timed out in isFailed()')); | ||
} | ||
return resolve(undefined); | ||
}, func === '()' ? 5000 : cache.timeout); | ||
// set the callback for this id to resolve the promise | ||
callbacks[request.id] = (result) => { | ||
if (failsafeTimeout) { | ||
clearTimeout(failsafeTimeout); | ||
return resolve(result.value); | ||
} | ||
return false; | ||
}; | ||
// send the request to the master process | ||
process.send(request); | ||
}); | ||
}; | ||
keys() { | ||
return this.promisify('keys'); | ||
} | ||
if (cluster.isWorker) { | ||
// create a new LRU cache on the master | ||
promiseTo('()', options) | ||
.then((lruOptions) => debug('created lru cache on master', lruOptions)) | ||
.catch((err) => { | ||
/* istanbul ignore next */ | ||
debug('failed to create lru cache on master', err, options); | ||
}); | ||
values() { | ||
return this.promisify('values'); | ||
} | ||
// the lru-cache functions we are able to provide. Note that length() | ||
// and itemCount() are functions and not properties. All functions | ||
// return a Promise. | ||
return { | ||
set: (key, value, maxAge) => promiseTo('set', key, value, maxAge), | ||
get: (key) => promiseTo('get', key), | ||
setObject: (key, value, maxAge) => promiseTo('set', key, JSON.stringify(value), maxAge), | ||
getObject: (key) => promiseTo('get', key).then((value) => Promise.resolve(value ? JSON.parse(value) : undefined)), | ||
del: (key) => promiseTo('del', key), | ||
mGet: (keys) => promiseTo('mGet', keys), | ||
mSet: (pairs, maxAge) => promiseTo('mSet', pairs, maxAge), | ||
mGetObjects: (keys) => promiseTo('mGet', keys).then((pairs) => { | ||
const objs = {}; | ||
return funcs | ||
.mapObjects(pairs, objs, 'parse') | ||
.then(() => Promise.resolve(objs)); | ||
}), | ||
mSetObjects: (pairs, maxAge) => { | ||
const objs = {}; | ||
return funcs | ||
.mapObjects(pairs, objs, 'stringify') | ||
.then(() => promiseTo('mSet', objs, maxAge)); | ||
}, | ||
mDel: (keys) => promiseTo('mDel', keys), | ||
peek: (key) => promiseTo('peek', key), | ||
has: (key) => promiseTo('has', key), | ||
incr: (key, amount) => promiseTo('incr', key, amount), | ||
decr: (key, amount) => promiseTo('decr', key, amount), | ||
reset: () => promiseTo('reset'), | ||
keys: () => promiseTo('keys'), | ||
values: () => promiseTo('values'), | ||
dump: () => promiseTo('dump'), | ||
prune: () => promiseTo('prune'), | ||
length: () => promiseTo('length'), | ||
itemCount: () => promiseTo('itemCount'), | ||
stale: (stale) => promiseTo('stale', stale), | ||
max: (max) => promiseTo('max', max), | ||
maxAge: (maxAge) => promiseTo('maxAge', maxAge), | ||
}; | ||
dump() { | ||
return this.promisify('dump'); | ||
} | ||
prune() { | ||
return this.promisify('prune'); | ||
} | ||
length() { | ||
return this.promisify('length'); | ||
} | ||
itemCount() { | ||
return this.promisify('itemCount'); | ||
} | ||
stale(stale) { | ||
return this.promisify('stale', stale); | ||
} | ||
max(max) { | ||
return this.promisify('max', max); | ||
} | ||
maxAge(maxAge) { | ||
return this.promisify('maxAge', maxAge); | ||
} | ||
} | ||
@@ -388,0 +163,0 @@ |
{ | ||
"name": "lru-cache-for-clusters-as-promised", | ||
"version": "1.5.25", | ||
"version": "1.6.0", | ||
"types": "index.d.ts", | ||
@@ -43,8 +43,8 @@ "description": "LRU Cache that is safe for clusters", | ||
"depcheck": "1.4.0", | ||
"eslint": "7.21.0", | ||
"eslint-config-google": "0.14.0", | ||
"eslint-plugin-mocha": "8.0.0", | ||
"eslint": "7.22.0", | ||
"eslint-plugin-mocha": "8.1.0", | ||
"express": "4.17.1", | ||
"flatted": "^3.1.1", | ||
"istanbul-cobertura-badger": "1.3.1", | ||
"mocha": "8.3.0", | ||
"mocha": "8.3.2", | ||
"nyc": "15.1.0", | ||
@@ -51,0 +51,0 @@ "should": "13.2.3", |
@@ -43,3 +43,3 @@ # lru-cache-for-clusters-as-promised | ||
* `prune: false|crontime string`, defaults to `false` | ||
* Use a cron job on the master thread to call `prune()` on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds. Also works in single threaded environments not using the `cluster` module. | ||
* Use a cron job on the master thread to call `prune()` on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds (See [`node-cron` patterns](https://www.npmjs.com/package/cron#available-cron-patterns) for more info). Also works in single threaded environments not using the `cluster` module. | ||
@@ -52,12 +52,16 @@ > ! note that `length` and `dispose` are missing as it is not possible to pass `functions` via IPC messages. | ||
* Sets a value for a key. Specifying the `maxAge` will cause the value to expire per the `stale` value or when `prune`d. | ||
* `setObject(key, object, maxAge)` | ||
* Sets a cache value where the value is an object. Passes the values through `cache.stringify()`, which defaults to `JSON.stringify()`. Use a custom parser like [`flatted`](https://www.npmjs.com/package/flatted) to cases like circular object references. | ||
* `mSet({ key1: 1, key2: 2, ...}, maxAge)` | ||
* Sets multiple key-value pairs in the cache at one time. | ||
* `mSetObjects({ key1: { obj: 1 }, key2: { obj: 2 }, ...}, maxAge)` | ||
* Sets multiple key-value pairs in the cache at one time, where the value is an object. | ||
* Sets multiple key-value pairs in the cache at one time, where the value is an object. Passes the values through `cache.stringify()`, see `cache.setObject()`; | ||
* `get(key)` | ||
* Returns a value for a key. | ||
* `getObject(key)` | ||
* Returns an object value for a key. Passes the values through `cache.parse()`, which defaults to `JSON.parse()`. Use a custom parser like [`flatted`](https://www.npmjs.com/package/flatted) to cases like circular object references. | ||
* `mGet([key1, key2, ...])` | ||
* Returns values for multiple keys, results are in the form of `{ key1: '1', key2: '2' }`. | ||
* `mGetObjects([key1, key2, ...])` | ||
* Returns values as objects for multiple keys, results are in the form of `{ key1: '1', key2: '2' }`. | ||
* Returns values as objects for multiple keys, results are in the form of `{ key1: '1', key2: '2' }`. Passes the values through `cache.parse()`, see `cache.getObject()`. | ||
* `peek(key)` | ||
@@ -100,5 +104,3 @@ * Returns the value for a key without updating its last access time. | ||
// require the module in your master thread that creates workers to initialize | ||
const LRUCache = require('lru-cache-for-clusters-as-promised'); | ||
LRUCache.init(); | ||
require('lru-cache-for-clusters-as-promised').init(); | ||
``` | ||
@@ -150,5 +152,32 @@ | ||
}); | ||
``` | ||
Use a custom object parser for the cache to handle cases like circular object references that `JSON.parse()` and `JSON.stringify()` cannot, or use custom revivers, etc. | ||
``` | ||
const flatted = require('flatted'); | ||
const LRUCache = require('lru-cache-for-clusters-as-promised'); | ||
const cache = new LRUCache({ | ||
namespace: 'circular-objects', | ||
max: 50, | ||
parse: flatted.parse, | ||
stringify: flatted.stringify, | ||
}); | ||
// create a circular reference | ||
const a = { b: null }; | ||
const b = { a }; | ||
b.a.b = b; | ||
// this will work | ||
await cache.setObject(1, a); | ||
// this will return an object with the same circular reference via flatted | ||
const c = await cache.getObject(1); | ||
if (a == c && a.b === c.b) { | ||
console.log('yes they are the same!'); | ||
} | ||
``` | ||
# process flow | ||
@@ -155,0 +184,0 @@ |
const request = require('supertest'); | ||
const config = require('./lib/config'); | ||
const config = require('./lib/test-config'); | ||
const TestUtils = require('./lib/test-utils'); | ||
@@ -17,9 +17,9 @@ | ||
request(`http://${config.server.host}:${config.server.port}`) | ||
.get('/reset') | ||
.end((err) => { | ||
if (err) { | ||
return done(err); | ||
} | ||
return done(); | ||
}); | ||
.get('/reset') | ||
.end((err) => { | ||
if (err) { | ||
return done(err); | ||
} | ||
return done(); | ||
}); | ||
}); | ||
@@ -32,10 +32,12 @@ | ||
request(`http://${config.server.host}:${config.server.port}`) | ||
.get(`/${method}`) | ||
.expect(200) | ||
.end((err, response) => { | ||
if (err) { | ||
return done(err); | ||
} | ||
return response.body === true ? done() : done(new Error(response.body)); | ||
}); | ||
.get(`/${method}`) | ||
.expect(200) | ||
.end((err, response) => { | ||
if (err) { | ||
return done(err); | ||
} | ||
return response.body === true | ||
? done() | ||
: done(new Error(response.body)); | ||
}); | ||
}); | ||
@@ -42,0 +44,0 @@ }); |
@@ -20,4 +20,6 @@ const cluster = require('cluster'); | ||
'cover', | ||
'--report', 'none', | ||
'--print', 'none', | ||
'--report', | ||
'none', | ||
'--print', | ||
'none', | ||
// output files will have the workers PID in the filename | ||
@@ -28,4 +30,4 @@ '--include-pid', | ||
] | ||
// append any additional command line arguments | ||
.concat(process.argv.slice(2)), | ||
// append any additional command line arguments | ||
.concat(process.argv.slice(2)), | ||
}); | ||
@@ -75,4 +77,3 @@ } else { | ||
}); | ||
cache.keys() | ||
.then(() => done2()); | ||
cache.keys().then(() => done2()); | ||
}, | ||
@@ -79,0 +80,0 @@ getCacheMax: () => { |
@@ -1,2 +0,2 @@ | ||
const config = require('./config'); | ||
const config = require('./test-config'); | ||
const express = require('express'); | ||
@@ -10,2 +10,3 @@ const http = require('http'); | ||
initCache.keys(); | ||
// this will be the SAME cache no matter which module calls it. | ||
@@ -12,0 +13,0 @@ const defaultCache = new LRUCache({ |
@@ -1,6 +0,6 @@ | ||
const config = require('./config'); | ||
const config = require('./test-config'); | ||
const cluster = require('cluster'); | ||
const { parse, stringify } = require('flatted'); | ||
const should = require('should'); | ||
const LRUCache = require('../../'); | ||
const LRUCacheForClustersAsPromised = require('../../'); | ||
const member = cluster.isWorker ? 'worker' : 'master'; | ||
@@ -14,2 +14,8 @@ | ||
function TestUtils(cache) { | ||
const object = { foo: 'bar' }; | ||
const pairs = { | ||
foo: 'bar', | ||
bizz: 'buzz', | ||
}; | ||
const keys = Object.keys(pairs); | ||
return { | ||
@@ -31,2 +37,5 @@ clusterTests: { | ||
null_objects: 'null objects should be ok', | ||
undefined_objects: 'undefined objects should be ok', | ||
circular_objects: 'circular objects should be ok', | ||
miss_undefined: 'missing objects should return undefined', | ||
pruneJob: 'prune cache using cron job', | ||
@@ -36,8 +45,9 @@ set: 'set(key, value)', | ||
del: 'del(key)', | ||
incr: 'incr(key)', | ||
incr2: 'incr(key, 2)', | ||
decr: 'decr(key)', | ||
decr2: 'decr(key, 2)', | ||
peek: 'peek(key)', | ||
has: 'has(key)', | ||
incr: 'incr(key) - increment value by 1', | ||
incr2: 'incr(key, 2) - increment value by 2', | ||
decr: 'decr(key) - decrement value by 1', | ||
decr2: 'decr(key, 2) - decrement value by 2', | ||
peek: | ||
'peek(key) - get a cache value but do not update access time for LRU', | ||
has: 'has(key) - check if a key exists', | ||
length: 'length()', | ||
@@ -51,3 +61,4 @@ itemCount: 'itemCount()', | ||
addFour: 'add four keys and have the first fall out', | ||
addFourAccessOne: 'add four keys and then access the first so the second falls out', | ||
addFourAccessOne: | ||
'add four keys and then access the first so the second falls out', | ||
getMax: 'max()', | ||
@@ -60,447 +71,497 @@ getMaxAge: 'maxAge()', | ||
}, | ||
mSet: (cb) => { | ||
const pairs = { | ||
foo: 'bar', | ||
bizz: 'buzz', | ||
}; | ||
cache.mSet(pairs) | ||
.then(() => cache.get('bizz')) | ||
.then((value) => { | ||
should(value).equal('buzz'); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mSet: async (cb) => { | ||
try { | ||
await cache.mSet(pairs); | ||
const value = await cache.get(keys[0]); | ||
should(value).equal(pairs[keys[0]]); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mSetNull: (cb) => { | ||
const pairs = null; | ||
cache.mSet(pairs) | ||
.then(() => cache.mSet('string')) | ||
.then(() => cache.mSet(['array'])) | ||
.then(() => { | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mSetNull: async (cb) => { | ||
try { | ||
await cache.mSet(null); | ||
await cache.mSet('string'); | ||
await cache.mSet(['array']); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mGet: (cb) => { | ||
const pairs = { | ||
foo: 'bar', | ||
bizz: 'buzz', | ||
}; | ||
cache.mSet(pairs) | ||
.then(() => cache.mGet(['bizz', 'foo'])) | ||
.then((values) => { | ||
// should(values).not.equal(undefined); | ||
should(values.bizz).equal('buzz'); | ||
should(values.foo).equal('bar'); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mGet: async (cb) => { | ||
try { | ||
await cache.mSet(pairs); | ||
const values = await cache.mGet(keys); | ||
should(typeof values).not.equal('undefined'); | ||
should(values.bizz).equal(pairs.bizz); | ||
should(values.foo).equal(pairs.foo); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mGetAndSetObjects: (cb) => { | ||
const pairs = { | ||
foo: { boo: 'bar' }, | ||
bizz: { bam: 'buzz' }, | ||
}; | ||
cache.mSetObjects(pairs) | ||
.then(() => cache.mGetObjects(['bizz', 'foo'])) | ||
.then((values) => { | ||
should(values.bizz).deepEqual({ bam: 'buzz' }); | ||
should(values.foo).deepEqual({ boo: 'bar' }); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mGetAndSetObjects: async (cb) => { | ||
try { | ||
await cache.mSetObjects(pairs); | ||
const values = await cache.mGetObjects(keys); | ||
should(values.bizz).deepEqual(pairs.bizz); | ||
should(values.foo).deepEqual(pairs.foo); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mGetNull: (cb) => { | ||
cache.mGet('string') | ||
.then((values) => { | ||
should(values).deepEqual({}); | ||
return cache.mGet(null); | ||
}) | ||
.then((values) => { | ||
should(values).deepEqual({}); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mGetNull: async (cb) => { | ||
try { | ||
let values = await cache.mGet('string'); | ||
should(values).deepEqual({}); | ||
values = await cache.mGet(null); | ||
should(values).deepEqual({}); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mDel: (cb) => { | ||
const pairs = { | ||
my: 'bar', | ||
get: 'buzz', | ||
}; | ||
cache.mSet(pairs) | ||
.then(() => cache.mDel(['my', 'get'])) | ||
.then(() => cache.get('get')) | ||
.then((value) => { | ||
should(value).equal(undefined); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mDel: async (cb) => { | ||
try { | ||
await cache.mSet(pairs); | ||
await cache.mDel(keys); | ||
const value = await cache.get(keys[0]); | ||
should(typeof value).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
mDelNull: (cb) => { | ||
const pairs = { | ||
foo: 'whamo', | ||
bizz: 'blamo', | ||
}; | ||
cache.mSet(pairs) | ||
.then(() => cache.mDel(null)) | ||
.then(() => cache.get('bizz')) | ||
.then((value) => { | ||
should(value).equal('blamo'); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
mDelNull: async (cb) => { | ||
try { | ||
await cache.mSet(pairs); | ||
await cache.mDel(null); | ||
const value = await cache.get(keys[0]); | ||
should(value).equal(pairs[keys[0]]); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
objects: (cb) => { | ||
const myObj = { foo: 'bar' }; | ||
cache.setObject(1, myObj) | ||
.then(() => cache.getObject(1)) | ||
.then((obj) => { | ||
should(obj).not.equal(null); | ||
should(obj.foo).equal('bar'); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
objects: async (cb) => { | ||
try { | ||
await cache.setObject(1, object); | ||
const obj = await cache.getObject(1); | ||
should(obj).not.equal(null); | ||
should(obj.foo).equal(object.foo); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
null_objects: (cb) => { | ||
cache.setObject(1, undefined) | ||
.then(() => cache.getObject(1)) | ||
.then((obj) => { | ||
should(obj).equal(undefined); | ||
undefined_objects: async (cb) => { | ||
try { | ||
let object; | ||
await cache.setObject(1, object); | ||
const obj = await cache.getObject(1); | ||
should(typeof obj).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
null_objects: async (cb) => { | ||
try { | ||
let object = null; | ||
await cache.setObject(1, object); | ||
const obj = await cache.getObject(1); | ||
should(obj).equal(null); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
circular_objects: async (cb) => { | ||
try { | ||
// this cache uses the flatted parse and stringify | ||
const cacheCircular = new LRUCacheForClustersAsPromised({ | ||
namespace: 'circular-cache', | ||
max: 3, | ||
parse, | ||
stringify, | ||
}); | ||
// create a circular dependency | ||
const a = { b: null }; | ||
const b = { a }; | ||
b.a.b = b; | ||
// see if we can set and then extract the circular object | ||
await cacheCircular.setObject(1, a); | ||
const obj = await cacheCircular.getObject(1); | ||
should(obj).deepEqual(a); | ||
should(obj.b).deepEqual(b); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
miss_undefined: async (cb) => { | ||
try { | ||
const obj = await cache.getObject(1); | ||
should(typeof obj).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
hi: async (cb) => { | ||
try { | ||
let responded = false; | ||
const callback = (response) => { | ||
if (!responded) { | ||
responded = true; | ||
should(response).equal('hello'); | ||
cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
} | ||
}; | ||
process.on('message', (response) => callback && callback(response)); | ||
process.send('hi'); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
hi: (cb) => { | ||
let responded = false; | ||
const callback = (response) => { | ||
if (!responded) { | ||
responded = true; | ||
should(response).equal('hello'); | ||
cb(null, true); | ||
timeout: async (cb) => { | ||
try { | ||
const cacheBad = new LRUCacheForClustersAsPromised({ | ||
max: 1, | ||
stale: false, | ||
timeout: 1, | ||
namespace: `bad-cache-resolve-${member}`, | ||
}); | ||
let large = '1234567890'; | ||
for (let i = 0; i < 17; i += 1) { | ||
large += large; | ||
} | ||
}; | ||
process.on('message', (response) => callback && callback(response)); | ||
process.send('hi'); | ||
const result = await cacheBad.get(`bad-cache-key-${large}`); | ||
cb(null, result); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
timeout: (cb) => { | ||
const cacheBad = new LRUCache({ | ||
max: 1, | ||
stale: false, | ||
timeout: 1, | ||
namespace: `bad-cache-resolve-${member}`, | ||
}); | ||
let large = '1234567890'; | ||
for (let i = 0; i < 17; i += 1) { | ||
large += large; | ||
reject: async (cb) => { | ||
try { | ||
const cacheBad = new LRUCacheForClustersAsPromised({ | ||
max: 2, | ||
stale: false, | ||
timeout: 1, | ||
failsafe: 'reject', | ||
namespace: `bad-cache-reject-${member}`, | ||
}); | ||
let large = '1234567890'; | ||
for (let i = 0; i < 17; i += 1) { | ||
large += large; | ||
} | ||
await cacheBad.get(`bad-cache-key-${large}`); | ||
cb('fail'); | ||
} catch (err) { | ||
cb(null, true); | ||
} | ||
return cacheBad.get(`bad-cache-key-${large}`) | ||
.then((result) => cb(null, result)) | ||
.catch((err) => cb(err)); | ||
}, | ||
reject: (cb) => { | ||
const cacheBad = new LRUCache({ | ||
max: 2, | ||
stale: false, | ||
timeout: 1, | ||
failsafe: 'reject', | ||
namespace: `bad-cache-reject-${member}`, | ||
}); | ||
let large = '1234567890'; | ||
for (let i = 0; i < 17; i += 1) { | ||
large += large; | ||
pruneJob: async (cb) => { | ||
try { | ||
const prunedCache = new LRUCacheForClustersAsPromised({ | ||
max: 10, | ||
stale: true, | ||
maxAge: 100, | ||
namespace: `pruned-cache-${member}`, | ||
prune: '*/1 * * * * *', | ||
}); | ||
await prunedCache.set(config.args.one, config.args.one); | ||
await prunedCache.set(config.args.two, config.args.two, 2000); | ||
const itemCount = await prunedCache.itemCount(); | ||
// we should see 2 items in the cache | ||
should(itemCount).equal(2); | ||
// check again in 1100 ms | ||
setTimeout(async () => { | ||
// one of the items should have been removed based on the expiration | ||
const itemCount2 = await prunedCache.itemCount(); | ||
try { | ||
should(itemCount2).equal(1); | ||
return cb(null, true); | ||
} catch (err) { | ||
return cb(err); | ||
} | ||
}, 1100); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
return cacheBad.get(`bad-cache-key-${large}`) | ||
.then(() => cb('fail')) | ||
.catch(() => cb(null, true)); | ||
}, | ||
pruneJob: (cb) => { | ||
const prunedCache = new LRUCache({ | ||
max: 10, | ||
stale: true, | ||
maxAge: 100, | ||
namespace: `pruned-cache-${member}`, | ||
prune: '*/1 * * * * *', | ||
}); | ||
prunedCache.set(config.args.one, config.args.one) | ||
.then(() => prunedCache.set(config.args.two, config.args.two, 2000)) | ||
.then(() => prunedCache.itemCount()) | ||
.then((itemCount) => { | ||
// we should see 2 items in the cache | ||
should(itemCount).equal(2); | ||
// check again in 1100 ms | ||
setTimeout(() => { | ||
// one of the items should have been removed based on the expiration | ||
prunedCache.itemCount() | ||
.then((itemCount2) => { | ||
try { | ||
should(itemCount2).equal(1); | ||
return cb(null, true); | ||
} catch (err) { | ||
return cb(err); | ||
} | ||
}); | ||
}, 1100); | ||
}) | ||
.catch((err) => cb(err)); | ||
set: async (cb) => { | ||
try { | ||
const result = await cache.set(config.args.one, config.args.one); | ||
cb(null, result); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
set: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then((result) => cb(null, result)) | ||
.catch((err) => cb(err)); | ||
get: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
const result = await cache.get(config.args.one); | ||
should(result).equal(config.args.one); | ||
cb(null, result); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
get: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.get(config.args.one)) | ||
.then((result) => { | ||
should(result).equal(config.args.one); | ||
return cb(null, result); | ||
}) | ||
.catch((err) => cb(err)); | ||
del: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
await cache.del(config.args.one); | ||
const result = await cache.get(config.args.one); | ||
should(typeof result).equal('undefined'); | ||
cb(null, result); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
del: (cb) => { | ||
cache.del(config.args.one) | ||
.then(() => cache.get(config.args.one)) | ||
.then((result) => { | ||
should(result).equal(undefined); | ||
return cb(null, result); | ||
}) | ||
.catch((err) => cb(err)); | ||
incr: async (cb) => { | ||
try { | ||
const value = await cache.incr(config.args.one); | ||
should(value).eql(1); | ||
const value2 = await cache.incr(config.args.one); | ||
should(value2).eql(2); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
incr: (cb) => { | ||
cache.incr(config.args.one) | ||
.then((value) => { | ||
should(value).eql(1); | ||
return cache.incr(config.args.one); | ||
}) | ||
.then((value) => { | ||
should(value).eql(2); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
incr2: async (cb) => { | ||
try { | ||
const amount = 2; | ||
const value = await cache.incr(config.args.one, amount); | ||
should(value).eql(2); | ||
const value2 = await cache.incr(config.args.one, amount); | ||
should(value2).eql(4); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
incr2: (cb) => { | ||
const amount = 2; | ||
cache.incr(config.args.one, amount) | ||
.then((value) => { | ||
should(value).eql(2); | ||
return cache.incr(config.args.one, amount); | ||
}) | ||
.then((value) => { | ||
should(value).eql(4); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
decr: async (cb) => { | ||
try { | ||
const value = await cache.decr(config.args.one); | ||
should(value).eql(-1); | ||
const value2 = await cache.decr(config.args.one); | ||
should(value2).eql(-2); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
decr: (cb) => { | ||
cache.decr(config.args.one) | ||
.then((value) => { | ||
should(value).eql(-1); | ||
return cache.decr(config.args.one); | ||
}) | ||
.then((value) => { | ||
should(value).eql(-2); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
decr2: async (cb) => { | ||
try { | ||
const amount = 2; | ||
const value = await cache.decr(config.args.one, amount); | ||
should(value).eql(-2); | ||
const value2 = await cache.decr(config.args.one, amount); | ||
should(value2).eql(-4); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
decr2: (cb) => { | ||
const amount = 2; | ||
cache.decr(config.args.one, amount) | ||
.then((value) => { | ||
should(value).eql(-2); | ||
return cache.decr(config.args.one, amount); | ||
}) | ||
.then((value) => { | ||
should(value).eql(-4); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
peek: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
await cache.set(config.args.two, config.args.two); | ||
await cache.set(config.args.three, config.args.three); | ||
const result = await cache.peek(config.args.one); | ||
should(result).equal(config.args.one); | ||
await cache.set(config.args.four, config.args.four); | ||
const result2 = await cache.get(config.args.one); | ||
should(typeof result2).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
peek: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.set(config.args.two, config.args.two)) | ||
.then(() => cache.set(config.args.three, config.args.three)) | ||
.then(() => cache.peek(config.args.one)) | ||
.then((result) => { | ||
should(result).equal(config.args.one); | ||
return cache.set(config.args.four, config.args.four); | ||
}) | ||
.then(() => cache.get(config.args.one)) | ||
.then((result) => { | ||
should(undefined).equal(result); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
has: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
const has = await cache.has(config.args.one); | ||
should(has).equal(true); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
has: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.has(config.args.one)) | ||
.then((has) => { | ||
should(has).equal(true); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
length: async (cb) => { | ||
try { | ||
await cache.set(config.args.two, config.args.two); | ||
await cache.set(config.args.three, config.args.three); | ||
const length = await cache.length(); | ||
should(length).equal(2); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
length: (cb) => { | ||
cache.set(config.args.two, config.args.two) | ||
.then(() => cache.set(config.args.three, config.args.three)) | ||
.then(() => cache.length()) | ||
.then((length) => { | ||
should(length).equal(2); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
itemCount: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
const itemCount = await cache.itemCount(); | ||
should(itemCount).equal(1); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
itemCount: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.itemCount()) | ||
.then((itemCount) => { | ||
should(itemCount).equal(1); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
reset: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
const result = await cache.get(config.args.one); | ||
should(typeof result).equal('string'); | ||
await cache.reset(); | ||
const result2 = await cache.get(config.args.one); | ||
should(typeof result2).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
reset: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.reset()) | ||
.then(() => cache.get(config.args.one)) | ||
.then((result) => { | ||
should(result).equal(undefined); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
keys: async (cb) => { | ||
try { | ||
const result = await cache.set(config.args.one, config.args.one); | ||
should(result).equal(true); | ||
const keys = await cache.keys(); | ||
should(keys.length).equal(1); | ||
should(keys[0]).equal(config.args.one); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
keys: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then((result) => { | ||
should(result).equal(true); | ||
return cache.keys(); | ||
}) | ||
.then((keys) => { | ||
should(keys.length).equal(1); | ||
should(keys[0]).equal(config.args.one); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
values: async (cb) => { | ||
try { | ||
await cache.set(config.args.two, config.args.two); | ||
const values = await cache.values(); | ||
should(values).deepEqual([config.args.two]); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
values: (cb) => { | ||
cache.set(config.args.two, config.args.two) | ||
.then(() => cache.values()) | ||
.then((values) => { | ||
should(values).deepEqual([config.args.two]); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
prune: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.one); | ||
await cache.prune(); | ||
const itemCount = await cache.itemCount(); | ||
should(itemCount).equal(1); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
prune: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then(() => cache.prune()) | ||
.then(() => cache.itemCount()) | ||
.then((itemCount) => { | ||
should(itemCount).equal(1); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
dump: async (cb) => { | ||
try { | ||
await cache.set(config.args.one, config.args.two); | ||
const dump = await cache.dump(); | ||
should(dump[0].k).equal(config.args.one); | ||
should(dump[0].v).equal(config.args.two); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
dump: (cb) => { | ||
cache.set(config.args.one, config.args.two) | ||
.then(() => cache.dump()) | ||
.then((dump) => { | ||
should(dump[0].k).equal(config.args.one); | ||
should(dump[0].v).equal(config.args.two); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
getMax: async (cb) => { | ||
try { | ||
const max = await cache.max(); | ||
should(max).equal(3); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
getMax: (cb) => { | ||
cache.max() | ||
.then((max) => { | ||
should(max).equal(3); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
getMaxAge: async (cb) => { | ||
try { | ||
const maxAge = await cache.maxAge(); | ||
should(maxAge).equal(0); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
getMaxAge: (cb) => { | ||
cache.maxAge() | ||
.then((maxAge) => { | ||
should(maxAge).equal(0); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
getStale: async (cb) => { | ||
try { | ||
const stale = await cache.stale(); | ||
should(typeof stale).equal('undefined'); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
getStale: (cb) => { | ||
cache.stale() | ||
.then((stale) => { | ||
should(stale).equal(undefined); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
setMax: async (cb) => { | ||
try { | ||
const max = await cache.max(10000); | ||
should(max).equal(10000); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
setMax: (cb) => { | ||
cache.max(100) | ||
.then((max) => { | ||
should(max).equal(100); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
setMaxAge: async (cb) => { | ||
try { | ||
const maxAge = await cache.maxAge(10); | ||
should(maxAge).equal(10); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
setMaxAge: (cb) => { | ||
cache.maxAge(10) | ||
.then((maxAge) => { | ||
should(maxAge).equal(10); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
setStale: async (cb) => { | ||
try { | ||
const stale = await cache.stale(true); | ||
should(stale).equal(true); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
setStale: (cb) => { | ||
cache.stale(true) | ||
.then((stale) => { | ||
should(stale).equal(true); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
addFour: async (cb) => { | ||
try { | ||
const value = await cache.set(config.args.one, config.args.one); | ||
should(value).equal(true); | ||
await cache.set(config.args.two, config.args.two); | ||
await cache.set(config.args.three, config.args.three); | ||
await cache.set(config.args.four, config.args.four); | ||
const result = await cache.get(config.args.one); | ||
should(typeof result).equal('undefined'); | ||
const result2 = await cache.get(config.args.four); | ||
should(result2).equal(config.args.four); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
addFour: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then((value) => { | ||
should(value).equal(true); | ||
return cache.set(config.args.two, config.args.two); | ||
}) | ||
.then(() => cache.set(config.args.three, config.args.three)) | ||
.then(() => cache.set(config.args.four, config.args.four)) | ||
.then(() => cache.get(config.args.one)) | ||
.then((result) => { | ||
should(result).equal(undefined); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
addFourAccessOne: async (cb) => { | ||
try { | ||
const value = await cache.set(config.args.one, config.args.one); | ||
should(value).equal(true); | ||
const value2 = await cache.set(config.args.two, config.args.two); | ||
should(value2).equal(true); | ||
const value3 = await cache.set(config.args.three, config.args.three); | ||
should(value3).equal(true); | ||
const value4 = await cache.get(config.args.one); | ||
should(value4).equal(config.args.one); | ||
const value5 = await cache.set(config.args.four, config.args.four); | ||
should(value5).equal(true); | ||
const result = await cache.get(config.args.one); | ||
should(result).equal(config.args.one); | ||
cb(null, true); | ||
} catch (err) { | ||
cb(err); | ||
} | ||
}, | ||
addFourAccessOne: (cb) => { | ||
cache.set(config.args.one, config.args.one) | ||
.then((value) => { | ||
should(value).equal(true); | ||
return cache.set(config.args.two, config.args.two); | ||
}) | ||
.then((value) => { | ||
should(value).equal(true); | ||
return cache.set(config.args.three, config.args.three); | ||
}) | ||
.then((value) => { | ||
should(value).equal(true); | ||
return cache.get(config.args.one); | ||
}) | ||
.then((value) => { | ||
should(value).equal(config.args.one); | ||
return cache.set(config.args.four, config.args.four); | ||
}) | ||
.then((value) => { | ||
should(value).equal(true); | ||
return cache.get(config.args.one); | ||
}) | ||
.then((result) => { | ||
should(result).equal(config.args.one); | ||
return cb(null, true); | ||
}) | ||
.catch((err) => cb(err)); | ||
}, | ||
}; | ||
@@ -507,0 +568,0 @@ } |
@@ -14,3 +14,3 @@ const LRUCache = require('../'); | ||
afterEach((done) => { | ||
testUtils.reset(() => done()); | ||
testUtils.reset(done); | ||
}); | ||
@@ -17,0 +17,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
65258
25
1342
189
2