Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

lru-cache-for-clusters-as-promised

Package Overview
Dependencies
Maintainers
1
Versions
49
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lru-cache-for-clusters-as-promised - npm Package Compare versions

Comparing version 1.7.0 to 1.7.1

.github/workflows/ci.yml

130

index.d.ts

@@ -0,4 +1,32 @@

import LRUCache from "lru-cache";
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#lru-cache-for-clusters-as-promised
declare module "lru-cache-for-clusters-as-promised" {
interface LRUCaches {
[key: string]: LRUCache
}
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#options
interface cacheConstructorParam {
// The namespace for this cache on the master thread as it is not aware of the worker instances.
namespace?: string,
// The amount of time in milliseconds that a worker will wait for a response from the master before rejecting the Promise.
timeout?: number,
// When a request times out the Promise will return resolve(undefined) by default, or with a value of reject the return will be reject(Error)
failsafe?: "resolve" | "reject",
// The maximum items that can be stored in the cache
max?: number,
// The maximum age for an item to be considered valid
maxAge?: number,
// When true expired items are return before they are removed rather than undefined
stale?: boolean,
// Use a cron job on the master thread to call prune() on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds. Also works in single threaded environments not using the cluster module.
prune?: false | string,
// custom stringify function
stringify?: function,
// custom parse function
parse?: function,
}
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#example-usage

@@ -8,108 +36,112 @@ class Cache <G1 = never, G2 = never, G3 = never, G4 = never> {

// Call from the master to ensure that the listeners are enabled
static init(): void
// Called from the master to fetch unerlying LRUCaches keyed by namespace
static getLruCaches(): LRUCaches
// Load an instance asynchronously to ensure that the cache has been created on the master.
static async getInstance(): Promise<Cache>
// Get the underlying LRUCache on the master thread (throws exception on worker)
getCache(): LRUCache
// Execute arbitrary command (function) on the cache.
async execute(command: string, ...args: any[]): Promise<any>
// Sets a value for a key. Specifying the maxAge will cause the value to expire per the stale value or when pruned.
set(key: string, value: G1 | G2 | G3 | G4, maxAge?: number): Promise<void>
async set(key: string, value: G1 | G2 | G3 | G4, maxAge?: number): Promise<void>
// Sets a value for a key. Specifying the maxAge will cause the value to expire per the stale value or when pruned.
setObject(key: string, object: Object, maxAge?: number): Promise<void>
async setObject(key: string, object: Object, maxAge?: number): Promise<void>
// Sets multiple key-value pairs in the cache at one time.
mSet(keys: { [index: string]: string | number }, maxAge?: number): Promise<void>
async mSet(keys: { [index: string]: string | number }, maxAge?: number): Promise<void>
// Sets multiple key-value pairs in the cache at one time, where the value is an object.
mSetObjects(keys: { [index: string]: G1 | G2 | G3 | G4 }, maxAge?: number): Promise<void>
async mSetObjects(keys: { [index: string]: G1 | G2 | G3 | G4 }, maxAge?: number): Promise<void>
// Returns a value for a key.
get(key: string): Promise<G1 | G2 | G3 | G4 | string | number>
async get(key: string): Promise<G1 | G2 | G3 | G4 | string | number>
// Returns a value for a key.
getObject(key: string): Promise<Object>
async getObject(key: string): Promise<Object>
// Returns values for multiple keys, results are in the form of { key1: '1', key2: '2' }.
mGet(keys: Array<string>): Promise<{ [index: string]: string | number }>
async mGet(keys: Array<string>): Promise<{ [index: string]: string | number }>
// Returns values as objects for multiple keys, results are in the form of { key1: '1', key2: '2' }.
mGetObjects(keys: Array<string>): Promise<{ [index: string]: G1 | G2 | G3 | G4 | string | number}>
async mGetObjects(keys: Array<string>): Promise<{ [index: string]: G1 | G2 | G3 | G4 | string | number}>
// Returns the value for a key without updating its last access time.
peek(key: string): Promise< G1 | G2 | G3 | G4 | string | number>
async peek(key: string): Promise< G1 | G2 | G3 | G4 | string | number>
// Removes a value from the cache.
del(key: string): Promise<void>
async del(key: string): Promise<void>
// Removes multiple keys from the cache..
mDel(keys: Array<string>): Promise<void>
async mDel(keys: Array<string>): Promise<void>
// Returns true if the key exists in the cache.
has(key: string): Promise<boolean>
async has(key: string): Promise<boolean>
// Increments a numeric key value by the amount, which defaults to 1. More atomic in a clustered environment.
incr(key: string, amount?: number): Promise<void>
async incr(key: string, amount?: number): Promise<void>
// Decrements a numeric key value by the amount, which defaults to 1. More atomic in a clustered environment.
decr(key: string, amount?: number): Promise<void>
async decr(key: string, amount?: number): Promise<void>
// Removes all values from the cache.
reset(): Promise<void>
async reset(): Promise<void>
// Returns an array of all the cache keys.
keys(): Promise<Array<string>>
async keys(): Promise<Array<string>>
// Returns an array of all the cache values.
values(): Promise<Array< G1 | G2 | G3 | G4 | string | number>>
async values(): Promise<Array< G1 | G2 | G3 | G4 | string | number>>
// Returns a serialized array of the cache contents.
dump(): Promise<Array<string>>
async dump(): Promise<Array<string>>
// Manually removes items from the cache rather than on get.
prune(): Promise<void>
async prune(): Promise<void>
// Return the number of items in the cache.
length(): Promise<number>
async length(): Promise<number>
// Return the number of items in the cache - same as length().
itemCount(): Promise<number>
async itemCount(): Promise<number>
// Get or update the max value for the cache.
max(): Promise<number>
async max(): Promise<number>
// Get or update the maxAge value for the cache.
max(max: number): Promise<void>
async max(max: number): Promise<void>
// Get or update the maxAge value for the cache.
maxAge(): Promise<number>
async maxAge(): Promise<number>
// Get or update the maxAge value for the cache.
maxAge(maxAge: number): Promise<void>
async maxAge(maxAge: number): Promise<void>
/**
* Get or update the stale value for the cache.
* @deprecated please use allowStale()
*/
async stale(): Promise<boolean>
/**
* Get or update the stale value for the cache.
* @param stale
* @deprecated please use allowStale(stale)
*/
async stale(stale: boolean): Promise<void>
// Get or update the stale value for the cache.
stale(): Promise<boolean>
async allowStale(): Promise<boolean>
// Get or update the stale value for the cache.
stale(stale: boolean): Promise<void>
async allowStale(stale: boolean): Promise<void>
}
// https://github.com/doublesharp/lru-cache-for-clusters-as-promised#options
interface cacheConstructorParam {
// The namespace for this cache on the master thread as it is not aware of the worker instances.
namespace?: string,
// The amount of time in milliseconds that a worker will wait for a response from the master before rejecting the Promise.
timeout?: number,
// When a request times out the Promise will return resolve(undefined) by default, or with a value of reject the return will be reject(Error)
failsafe?: "resolve" | "reject",
// The maximum items that can be stored in the cache
max?: number,
// The maximum age for an item to be considered valid
maxAge?: number,
// When true expired items are return before they are removed rather than undefined
stale?: boolean,
// Use a cron job on the master thread to call prune() on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds. Also works in single threaded environments not using the cluster module.
prune?: false | string,
// custom stringify function
stringify?: function,
// custom parse function
parse?: function,
}
module Cache {

@@ -116,0 +148,0 @@ }

@@ -18,3 +18,2 @@ const Debug = require('debug');

response.id = request.id;
response.func = request.func;
messages(`Master sending response to worker ${worker.id}`, response);

@@ -24,49 +23,27 @@ worker.send(response);

const handleConstructLruCache = (
caches,
request,
worker,
{ startPruneCronJob }
) => {
let created = false;
let lru;
const params = request.arguments;
const options = params[0];
// create a new lru-cache, give it a namespace, and save it locally
if (caches[request.namespace]) {
lru = caches[request.namespace];
// update property values as needed
['max', 'maxAge', 'stale'].forEach((prop) => {
if (options[prop] && options[prop] !== lru[prop]) {
lru[prop] = options[prop];
}
});
const getLruCache = (caches, request, worker) => {
const { namespace, arguments: args } = request;
let lru = caches[namespace];
if (lru && lru instanceof LRUCache) {
lru.isnew = false;
utils.setCacheProperties(lru, args[0]);
} else {
created = true;
lru = caches[request.namespace] = new LRUCache(...params);
// start a job to clean the cache
if (params[0].prune) {
lru.job = startPruneCronJob(lru, params[0].prune, request.namespace);
}
lru = caches[namespace] = new LRUCache(...args);
lru.isnew = true;
messages(`Created new LRUCache for namespace '${namespace}'`);
}
return handleConstructLruCacheResponse(request, worker, created, lru);
// use cronjob on master to prune cache, false to disable if running
utils.handlePruneCronJob(lru, args[0].prune, request.namespace);
const value = {
namespace: request.namespace,
isnew: lru.isnew,
max: lru.max,
maxAge: lru.maxAge,
stale: lru.allowStale,
};
messages(`${lru.isnew ? 'Created' : 'Fetched'} LRUCache`, args[0], value);
return sendResponseToWorker({ value }, request, worker);
};
const handleConstructLruCacheResponse = (request, worker, created, lru) =>
sendResponseToWorker(
{
value: {
namespace: request.namespace,
isnew: created,
max: lru.max,
maxAge: lru.maxAge,
stale: lru.stale,
},
},
request,
worker
);
const handleGetCacheConfigValue = (caches, request, worker) => {
const getOrSetConfigValue = (caches, request, worker) => {
const lru = caches[request.namespace];

@@ -86,3 +63,3 @@ const params = request.arguments;

const handleIncrementOrDecrement = (caches, request, worker) => {
const incrementOrDecrement = (caches, request, worker) => {
const lru = caches[request.namespace];

@@ -108,11 +85,8 @@ const params = request.arguments;

const handleGetMultipleValues = (caches, request, worker) =>
const getMultipleValues = (caches, request, worker) =>
handleMultipleValues(caches, request, worker, 'mGet');
const handleSetMultipleValues = (caches, request, worker) =>
const setMultipleValues = (caches, request, worker) =>
handleMultipleValues(caches, request, worker, 'mSet');
const handleDeleteMultipleValues = (caches, request, worker) =>
const deleteMultipleValues = (caches, request, worker) =>
handleMultipleValues(caches, request, worker, 'mDel');
const handleMultipleValues = (caches, request, worker, func) => {

@@ -123,24 +97,39 @@ const value = utils[func](caches[request.namespace], request.arguments);

const handleDefaultLruFunction = (caches, request, worker) => {
return sendResponseToWorker(
{
value: caches[request.namespace][request.func](...request.arguments),
},
request,
worker
);
const defaultLruFunction = (caches, request, worker) => {
try {
if (typeof caches[request.namespace][request.func] !== 'function') {
throw new Error(`LRUCache.${request.func}() is not a valid function`);
}
return sendResponseToWorker(
{
value: caches[request.namespace][request.func](...request.arguments),
},
request,
worker
);
} catch (err) {
return sendResponseToWorker(
{
error: err.message,
},
request,
worker
);
}
};
const messageHandlerFunctions = {
'()': handleConstructLruCache,
mGet: handleGetMultipleValues,
mSet: handleSetMultipleValues,
mDel: handleDeleteMultipleValues,
decr: handleIncrementOrDecrement,
incr: handleIncrementOrDecrement,
max: handleGetCacheConfigValue,
maxAge: handleGetCacheConfigValue,
stale: handleGetCacheConfigValue,
itemCount: handleGetCacheConfigValue,
length: handleGetCacheConfigValue,
const messageHandlers = {
'()': getLruCache,
mGet: getMultipleValues,
mSet: setMultipleValues,
mDel: deleteMultipleValues,
decr: incrementOrDecrement,
incr: incrementOrDecrement,
max: getOrSetConfigValue,
maxAge: getOrSetConfigValue,
stale: getOrSetConfigValue,
allowStale: getOrSetConfigValue,
itemCount: getOrSetConfigValue,
length: getOrSetConfigValue,
default: defaultLruFunction,
};

@@ -153,6 +142,4 @@

*/
const getMessageHandler = (func) => {
const handler = messageHandlerFunctions[func];
return handler ? handler : handleDefaultLruFunction;
};
const getMessageHandler = (func) =>
messageHandlers[func] || messageHandlers.default;

@@ -159,0 +146,0 @@ module.exports = {

const cluster = require('cluster');
const CronJob = require('cron').CronJob;
const Debug = require('debug');

@@ -17,24 +16,2 @@ const LRUCache = require('lru-cache');

/**
* Starts a cron job to prune stale objects from the cache
* @param {LRUCache} cache The cache we want to prune
* @param {string} cronTime The cron schedule
* @param {string} namespace The namespace for shared caches
* @return {CronJob} The cron job which has already been started
*/
function startPruneCronJob(cache, cronTime, namespace) {
debug('Creating cache prune job.', cache);
const job = new CronJob({
cronTime,
onTick: () => {
debug(`Pruning cache ${namespace}`, cache);
cache.prune();
},
start: true,
runOnInit: true,
});
job.start();
return job;
}
// this code will only run on the master to set up handles for messages from the workers

@@ -47,10 +24,7 @@ if (cluster.isMaster) {

if (request.source !== config.source) return;
messages(`Master recieved message from worker ${worker.id}`, request);
messages(`Master received message from worker ${worker.id}`, request);
return masterMessages.getMessageHandler(request.func)(
caches,
request,
worker,
{
startPruneCronJob,
}
worker
);

@@ -61,34 +35,29 @@ });

function getLruCache(caches, cache, options, startPruneCronJob) {
if (caches[cache.namespace]) {
debug(`Loaded cache from shared namespace ${cache.namespace}`);
return caches[cache.namespace];
function getLruCache(caches, namespace, options) {
let lru = caches[namespace];
if (!lru || lru instanceof LRUCache === false) {
lru = caches[namespace] = new LRUCache(options);
debug(`Created new LRUCache for namespace '${namespace}'`);
}
const lru = new LRUCache(options);
caches[cache.namespace] = lru;
if (options.prune && startPruneCronJob) {
lru.job = startPruneCronJob(lru, options.prune, cache.namespace);
}
debug(`Created new LRU cache ${cache.namespace}`);
utils.handlePruneCronJob(lru, options.prune, namespace);
return lru;
}
const getCacheConfigValue = ({
const getOrSetConfigValue = async ({
caches,
namespace,
options,
func,
funcArgs,
func: property,
funcArgs: value,
}) => {
const lru = getLruCache(caches, namespace, options);
return new Promise((resolve) => {
if (funcArgs[0]) {
lru[func] = funcArgs[0];
}
return resolve(lru[func]);
});
if (value[0]) {
lru[property] = value[0];
}
return lru[property];
};
const incrementOrDecrement = ({
const incrementOrDecrement = async ({
caches,

@@ -99,34 +68,28 @@ namespace,

funcArgs,
startPruneCronJob,
}) => {
const lru = getLruCache(caches, namespace, options, startPruneCronJob);
return new Promise((resolve) => {
// get the current value default to 0
let value = lru.get(funcArgs[0]);
// maybe initialize and increment
value =
(typeof value === 'number' ? value : 0) +
(funcArgs[1] || 1) * (func === 'decr' ? -1 : 1);
// set the new value
lru.set(funcArgs[0], value);
// resolve the new value
return resolve(value);
});
const lru = getLruCache(caches, namespace, options);
// get the current value default to 0
let value = lru.get(funcArgs[0]);
// maybe initialize and increment
value =
(typeof value === 'number' ? value : 0) +
(funcArgs[1] || 1) * (func === 'decr' ? -1 : 1);
// set the new value
lru.set(funcArgs[0], value);
return value;
};
const getMultipleValues = (options) => handleMultipleValues('mGet', options);
const setMultipleValues = (options) => handleMultipleValues('mSet', options);
const deleteMultipleValues = (options) => handleMultipleValues('mDel', options);
const getMultipleValues = async (options) =>
handleMultipleValues('mGet', options);
const setMultipleValues = async (options) =>
handleMultipleValues('mSet', options);
const deleteMultipleValues = async (options) =>
handleMultipleValues('mDel', options);
const handleMultipleValues = (
func,
{ namespace, options, funcArgs, startPruneCronJob }
) => {
const lru = getLruCache(caches, namespace, options, startPruneCronJob);
return new Promise((resolve) => {
return resolve(utils[func](lru, funcArgs));
});
const handleMultipleValues = async (func, { namespace, options, funcArgs }) => {
const lru = getLruCache(caches, namespace, options);
return utils[func](lru, funcArgs);
};
const defaultLruFunction = ({
const defaultLruFunction = async ({
caches,

@@ -137,8 +100,9 @@ namespace,

funcArgs,
startPruneCronJob,
}) => {
const lru = getLruCache(caches, namespace, options, startPruneCronJob);
return new Promise((resolve) => {
return resolve(lru[func](...funcArgs));
});
const lru = getLruCache(caches, namespace, options);
if (typeof lru[func] !== 'function') {
throw new Error(`LRUCache.${func}() is not a valid function`);
}
// just call the function on the lru-cache
return lru[func](...funcArgs);
};

@@ -152,7 +116,7 @@

incr: incrementOrDecrement,
max: getCacheConfigValue,
maxAge: getCacheConfigValue,
stale: getCacheConfigValue,
itemCount: getCacheConfigValue,
length: getCacheConfigValue,
max: getOrSetConfigValue,
maxAge: getOrSetConfigValue,
allowStale: getOrSetConfigValue,
itemCount: getOrSetConfigValue,
length: getOrSetConfigValue,
};

@@ -169,4 +133,9 @@

module.exports = {
getPromisified: (namespace, options) => {
return (...args) => {
caches,
getPromisified: ({ namespace }, options) => {
// create the new LRU cache
const cache = getLruCache(caches, namespace, options);
utils.setCacheProperties(cache, options);
// return function to promisify function calls
return async (...args) => {
// acting on the local lru-cache

@@ -178,2 +147,3 @@ messages(namespace, args);

const funcArgs = Array.prototype.slice.call(args, 1, args.length);
// this returns an async function to handle the function call
return getPromiseHandler(func)({

@@ -185,3 +155,2 @@ caches,

funcArgs,
startPruneCronJob,
});

@@ -188,0 +157,0 @@ };

@@ -0,1 +1,27 @@

const CronJob = require('cron').CronJob;
const Debug = require('debug');
const config = require('../config');
const debug = new Debug(`${config.source}-utils
`);
const destroyCacheCron = (cache, cronTime, namespace) => {
debug(`${cronTime ? 'Updating' : 'Stopping'} cache prune job.`, namespace);
cache.job.stop();
delete cache.job;
};
const createCacheCron = (cache, cronTime, namespace) => {
debug('Creating cache prune job.', namespace, cronTime);
const job = new CronJob({
cronTime,
onTick: () => {
debug(`Pruning cache ${namespace}`, namespace, cronTime);
cache.prune();
},
start: true,
});
cache.job = job;
};
module.exports = {

@@ -27,2 +53,26 @@ mapObjects: (pairs, objs, jsonFunction) =>

},
setCacheProperties: (cache, options) => {
if (typeof options.max !== 'undefined') cache.max = options.max;
if (typeof options.maxAge !== 'undefined') cache.maxAge = options.maxAge;
if (typeof options.stale !== 'undefined') cache.allowStale = options.stale;
},
/**
* Starts/stops a cron job to prune stale objects from the cache
* @param {LRUCache} cache The cache we want to prune
* @param {string} cronTime The cron schedule
* @param {string} namespace The namespace for shared caches
* @return {CronJob} The cron job which has already been started
*/
handlePruneCronJob: (cache, cronTime, namespace) => {
if (typeof cronTime !== 'undefined' && cache.cronTime !== cronTime) {
if (cache.job) {
destroyCacheCron(cache, cronTime, namespace);
}
if (cronTime !== false) {
createCacheCron(cache, cronTime, namespace);
}
cache.cronTime = cronTime;
}
return cache.job;
},
};

@@ -24,3 +24,3 @@ const cluster = require('cluster');

const requestToMaster = (cache, func, funcArgs) =>
const requestToMaster = async (cache, func, funcArgs) =>
new Promise((resolve, reject) => {

@@ -36,11 +36,8 @@ // create the request to the master

// if we don't get a response in 100ms, return undefined
let failsafeTimeout = setTimeout(
() => {
failsafeTimeout = null;
return cache.failsafe === 'reject'
? reject(new Error('Timed out in isFailed()'))
: resolve();
},
func === '()' ? 5000 : cache.timeout
);
let failsafeTimeout = setTimeout(() => {
failsafeTimeout = null;
return cache.failsafe === 'reject'
? reject(new Error('Timed out in isFailed()'))
: resolve();
}, cache.timeout);
// set the callback for this id to resolve the promise

@@ -50,5 +47,6 @@ callbacks[request.id] = (result) => {

clearTimeout(failsafeTimeout);
return resolve(result.value);
return typeof result.error !== 'undefined'
? reject(new Error(result.error))
: resolve(result.value);
}
return false;
};

@@ -60,6 +58,4 @@ // send the request to the master process

const getPromisified = (cache, options) => {
// return a promise that resolves to the result of the method on
// the local lru-cache this is the master thread, or from the
// lru-cache on the master thread if this is a worker
const promisified = (...args) => {
// return a promise calls the lru-cache on the master thread via IPC messages
const promisified = async (...args) => {
// first argument is the function to run

@@ -73,6 +69,5 @@ const func = args[0];

// create a new LRU cache on the master
promisified('()', options)
.then((lruOptions) => debug('created lru cache on master', lruOptions))
.catch(
if (!options.noInit) {
// create a new LRU cache on the master
promisified('()', options).catch(
/* istanbul ignore next */ (err) => {

@@ -82,2 +77,3 @@ debug('failed to create lru cache on master', err, options);

);
}

@@ -84,0 +80,0 @@ return promisified;

@@ -47,108 +47,135 @@ /**

set(key, value, maxAge) {
static async getInstance(options) {
const lru = new LRUCacheForClustersAsPromised({ ...options, noInit: true });
if (cluster.isWorker) {
await lru.promisify('()', options);
}
return lru;
}
static getAllCaches() {
if (cluster.isWorker) {
throw new Error(
'LRUCacheForClustersAsPromised.getAllCaches() should only be called from the master thread.'
);
}
return master.caches;
}
getCache() {
const caches = LRUCacheForClustersAsPromised.getAllCaches();
return caches[this.namespace];
}
async execute(command, ...args) {
return this.promisify(command, ...args);
}
async set(key, value, maxAge) {
return this.promisify('set', key, value, maxAge);
}
get(key) {
async get(key) {
return this.promisify('get', key);
}
setObject(key, value, maxAge) {
async setObject(key, value, maxAge) {
return this.promisify('set', key, this.stringify(value), maxAge);
}
getObject(key) {
return this.promisify('get', key).then((value) =>
Promise.resolve(
// eslint-disable-next-line no-undefined
value ? this.parse(value) : undefined
)
);
async getObject(key) {
const value = await this.promisify('get', key);
// eslint-disable-next-line no-undefined
return value ? this.parse(value) : undefined;
}
del(key) {
async del(key) {
return this.promisify('del', key);
}
mGet(keys) {
async mGet(keys) {
return this.promisify('mGet', keys);
}
mSet(pairs, maxAge) {
async mSet(pairs, maxAge) {
return this.promisify('mSet', pairs, maxAge);
}
mGetObjects(keys) {
return this.promisify('mGet', keys).then((pairs) => {
const objs = {};
return utils
.mapObjects(pairs, objs, this.parse)
.then(() => Promise.resolve(objs));
});
async mGetObjects(keys) {
const pairs = await this.promisify('mGet', keys);
const objs = {};
await utils.mapObjects(pairs, objs, this.parse);
return objs;
}
mSetObjects(pairs, maxAge) {
async mSetObjects(pairs, maxAge) {
const objs = {};
return utils
.mapObjects(pairs, objs, this.stringify)
.then(() => this.promisify('mSet', objs, maxAge));
await utils.mapObjects(pairs, objs, this.stringify);
return this.promisify('mSet', objs, maxAge);
}
mDel(keys) {
async mDel(keys) {
return this.promisify('mDel', keys);
}
peek(key) {
async peek(key) {
return this.promisify('peek', key);
}
has(key) {
async has(key) {
return this.promisify('has', key);
}
incr(key, amount) {
async incr(key, amount) {
return this.promisify('incr', key, amount);
}
decr(key, amount) {
async decr(key, amount) {
return this.promisify('decr', key, amount);
}
reset() {
async reset() {
return this.promisify('reset');
}
keys() {
async keys() {
return this.promisify('keys');
}
values() {
async values() {
return this.promisify('values');
}
dump() {
async dump() {
return this.promisify('dump');
}
prune() {
async prune() {
return this.promisify('prune');
}
length() {
async length() {
return this.promisify('length');
}
itemCount() {
async itemCount() {
return this.promisify('itemCount');
}
stale(stale) {
return this.promisify('stale', stale);
/**
* @deprecated use allowStale(stale)
*/
async stale(stale) {
return this.allowStale(stale);
}
max(max) {
async allowStale(stale) {
return this.promisify('allowStale', stale);
}
async max(max) {
return this.promisify('max', max);
}
maxAge(maxAge) {
async maxAge(maxAge) {
return this.promisify('maxAge', maxAge);

@@ -155,0 +182,0 @@ }

{
"name": "lru-cache-for-clusters-as-promised",
"version": "1.7.0",
"version": "1.7.1",
"types": "index.d.ts",

@@ -42,4 +42,5 @@ "description": "LRU Cache that is safe for clusters",

"devDependencies": {
"async": "^3.2.0",
"depcheck": "1.4.0",
"eslint": "7.22.0",
"eslint": "7.23.0",
"eslint-plugin-mocha": "8.1.0",

@@ -46,0 +47,0 @@ "express": "4.17.1",

# lru-cache-for-clusters-as-promised
[![lru-cache-for-clusters-as-promised](https://img.shields.io/npm/v/lru-cache-for-clusters-as-promised.svg)](https://www.npmjs.com/package/lru-cache-for-clusters-as-promised)
![Build Status](https://jenkins.doublesharp.com/badges/build/lru-cache-for-clusters-as-promised.svg)
![Code Coverage](https://jenkins.doublesharp.com/badges/coverage/lru-cache-for-clusters-as-promised.svg)
![Code Coverage Badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/doublesharp/bc53be57c56fa0c0fc80a29164cc22fc/raw/lru-cache-for-clusters-as-promised__heads_master.json)
[![Code Climate](https://codeclimate.com/github/doublesharp/lru-cache-for-clusters-as-promised/badges/gpa.svg)](https://codeclimate.com/github/doublesharp/lru-cache-for-clusters-as-promised)

@@ -43,3 +42,7 @@ ![Dependency Status](https://david-dm.org/doublesharp/lru-cache-for-clusters-as-promised.svg)

* `prune: false|crontime string`, defaults to `false`
* Use a cron job on the master thread to call `prune()` on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds (See [`node-cron` patterns](https://www.npmjs.com/package/cron#available-cron-patterns) for more info). Also works in single threaded environments not using the `cluster` module.
* Use a cron job on the master thread to call `prune()` on your cache at regular intervals specified in "crontime", for example "*/30 * * * * *" would prune the cache every 30 seconds (See [`node-cron` patterns](https://www.npmjs.com/package/cron#available-cron-patterns) for more info). Also works in single threaded environments not using the `cluster` module. Passing `false` to an existing namespace will disable any jobs that are scheduled.
* `parse: function`, defaults to `JSON.parse`
* Pass in a custom parser function to use for deserializing data sent to/from the cache. This is set on the `LRUCacheForClustersAsPromised` instance and in theory could be different per worker.
* `stringify: function`, defaults to `JSON.stringify`
* Pass in a custom stringifier function to for creating a serializing data sent to/from the cache.

@@ -50,50 +53,65 @@ > ! note that `length` and `dispose` are missing as it is not possible to pass `functions` via IPC messages.

* `set(key, value, maxAge)`
## static functions
* `init(): void`
* Should be called when `cluster.isMaster === true` to initialize the caches.
* `getInstance(options): Promise<LRUCacheForClustersAsPromised>`
* Asynchronously returns an `LRUCacheForClustersAsPromised` instance once the underlying `LRUCache` is guaranteed to exist. Uses the same `options` you would pass to the constructor. When constructed synchronously other methods will ensure the underlying cache is created, but this method can be useful from the worker when you plan to interact with the caches directly. Note that this will slow down the construction time on the worker by a few milliseconds while the cache creation is confirmed.
* `getAllCaches(): { key : LRUCache }`
* Synchronously returns a dictionary of the underlying `LRUCache` caches keyed by namespace. Accessible only when `cluster.isMaster === true`, otherwise throws an exception.
## instance functions
* `getCache(): LRUCache`
* Gets the underlying `LRUCache`. Accessible only when `cluster.isMaster === true`, otherwise throws an exception.
* `set(key, value, maxAge): Promise<void>`
* Sets a value for a key. Specifying the `maxAge` will cause the value to expire per the `stale` value or when `prune`d.
* `setObject(key, object, maxAge)`
* `setObject async (key, object, maxAge): Promise<void>`
* Sets a cache value where the value is an object. Passes the values through `cache.stringify()`, which defaults to `JSON.stringify()`. Use a custom parser like [`flatted`](https://www.npmjs.com/package/flatted) to cases like circular object references.
* `mSet({ key1: 1, key2: 2, ...}, maxAge)`
* `mSet({ key1: 1, key2: 2, ...}, maxAge): Promise<void>`
* Sets multiple key-value pairs in the cache at one time.
* `mSetObjects({ key1: { obj: 1 }, key2: { obj: 2 }, ...}, maxAge)`
* `mSetObjects({ key1: { obj: 1 }, key2: { obj: 2 }, ...}, maxAge): Promise<void>`
* Sets multiple key-value pairs in the cache at one time, where the value is an object. Passes the values through `cache.stringify()`, see `cache.setObject()`;
* `get(key)`
* `get(key): Promise<string | number | null | undefined>`
* Returns a value for a key.
* `getObject(key)`
* `getObject(key): Promise<Object | null | undefined>`
* Returns an object value for a key. Passes the values through `cache.parse()`, which defaults to `JSON.parse()`. Use a custom parser like [`flatted`](https://www.npmjs.com/package/flatted) to cases like circular object references.
* `mGet([key1, key2, ...])`
* `mGet([key1, key2, ...]): Promise<{key:string | number | null | undefined}?>`
* Returns values for multiple keys, results are in the form of `{ key1: '1', key2: '2' }`.
* `mGetObjects([key1, key2, ...])`
* `mGetObjects([key1, key2, ...]): Promise<{key:Object | null | undefined}?>`
* Returns values as objects for multiple keys, results are in the form of `{ key1: '1', key2: '2' }`. Passes the values through `cache.parse()`, see `cache.getObject()`.
* `peek(key)`
* `peek(key): Promise<string | number | null | undefined>`
* Returns the value for a key without updating its last access time.
* `del(key)`
* `del(key): Promise<void>`
* Removes a value from the cache.
* `mDel([key1, key2...])`
* `mDel([key1, key2...]): Promise<void>`
* Removes multiple keys from the cache..
* `has(key)`
* `has(key): Promise<boolean>`
* Returns true if the key exists in the cache.
* `incr(key, [amount])`
* `incr(key, [amount]): Promise<number>`
* Increments a numeric key value by the `amount`, which defaults to `1`. More atomic in a clustered environment.
* `decr(key, [amount])`
* `decr(key, [amount]): Promise<number>`
* Decrements a numeric key value by the `amount`, which defaults to `1`. More atomic in a clustered environment.
* `reset()`
* `reset(): Promise<void>`
* Removes all values from the cache.
* `keys()`
* `keys(): Promise<Array<string>>`
* Returns an array of all the cache keys.
* `values()`
* `values(): Promise<Array<string | number>>`
* Returns an array of all the cache values.
* `dump()`
* Returns a serialized array of the cache contents.
* `prune()`
* `prune(): Promise<void>`
* Manually removes items from the cache rather than on get.
* `length()`
* `length(): Promise<number>`
* Return the number of items in the cache.
* `itemCount()`
* `itemCount(): Promise<number>`
* Return the number of items in the cache - same as `length()`.
* `max([max])`
* `max([max]): Promise<number | void>`
* Get or update the `max` value for the cache.
* `maxAge([maxAge])`
* `maxAge([maxAge]): Promise<number | void>`
* Get or update the `maxAge` value for the cache.
* `stale([true|false])`
* Get or update the `stale` value for the cache.
* `allowStale([true|false]): Promise<boolean | void>`
* Get or update the `allowStale` value for the cache (set via `stale` in options). The `stale()` method is deprecated.
* `execute(command, [arg1, arg2, ...]): Promise<any>`
* Execute arbitrary command (`LRUCache` function) on the cache, returns whatever value was returned.

@@ -119,2 +137,8 @@ # example usage

// async cache
(async function() {
const options = { /* ...options */ };
const cache = await LRUCache.getInstance(options);
}());
const user = { name: 'user name' };

@@ -121,0 +145,0 @@ const key = 'userKey';

@@ -33,3 +33,4 @@ const request = require('supertest');

Object.keys(testUtils[test]).forEach((method) => {
it(`should ${testUtils[test][method]}`, (done) => {
it(`should ${testUtils[test][method]}`, function (done) {
this.timeout(config.timeout);
// run the request

@@ -45,3 +46,3 @@ request(`http://${config.server.host}:${config.server.port}`)

? done()
: done(new Error(response.body));
: done(new Error(response.body.error));
});

@@ -48,0 +49,0 @@ });

@@ -75,7 +75,8 @@ const cluster = require('cluster');

return {
accessSharedFromMaster: (done2) => {
accessSharedFromMaster: async (done2) => {
const cache = new LRUCache({
namespace: 'test-cache',
});
cache.keys().then(() => done2());
await cache.keys();
return done2();
},

@@ -82,0 +83,0 @@ getCacheMax: () => {

const config = require('./test-config');
const express = require('express');
const http = require('http');
const LRUCache = require('../../');
const LRUCache = require('../../lru-cache-for-clusters-as-promised');
const TestUtils = require('./test-utils');

@@ -13,9 +13,2 @@

// this will be the SAME cache no matter which module calls it.
const defaultCache = new LRUCache({
max: 1,
maxAge: 100000,
});
defaultCache.keys();
const cache = new LRUCache({

@@ -36,3 +29,3 @@ namespace: 'test-cache',

if (err) {
return res.send(err.message);
return res.send({ error: `${err.stack}` });
}

@@ -39,0 +32,0 @@ return res.send(true);

module.exports = {
timeout: 5000,
args: {

@@ -3,0 +4,0 @@ one: 'one',

@@ -5,5 +5,9 @@ const config = require('./test-config');

const should = require('should');
const LRUCacheForClustersAsPromised = require('../../');
const LRUCacheForClustersAsPromised = require('../../lru-cache-for-clusters-as-promised');
const LRUCache = require('lru-cache');
const member = cluster.isWorker ? 'worker' : 'master';
// create a default
new LRUCacheForClustersAsPromised();
/**

@@ -15,6 +19,12 @@ * Test class definitions for clusterd and non-clustered environments

function TestUtils(cache) {
const object = { foo: 'bar' };
const object = {
foo:
'bar barbarbar barbarbar barbarbar barbarbar barbarbar barbarbar barbarbar barbarbar barbar',
};
const pairs = {
foo: 'bar',
bizz: 'buzz',
obj: {
hi: 'im an object',
},
};

@@ -29,2 +39,7 @@ const keys = Object.keys(pairs);

tests: {
executeSetGet: 'try to call set via the execute() option',
executeFail: 'execute fail',
getLruCachesOnMaster:
'getLruCaches to return the underlying LRUCaches from master, throw error on worker',
getCache: 'get underlying LRUCache for promisified version',
mSet: 'mSet values',

@@ -43,2 +58,3 @@ mSetNull: 'mSet null pairs',

pruneJob: 'prune cache using cron job',
pruneJob2: 'prune cache using cron job, longer than test',
set: 'set(key, value)',

@@ -67,6 +83,83 @@ get: 'get(key)',

getStale: 'stale()',
getAllowStale: 'allowStale()',
setMax: 'max(10)',
setMaxAge: 'maxAge(10)',
setStale: 'stale(true)',
setAllowStale: 'allowStale(true)',
properties: 'update cache properties',
getInstance:
'get an instance asynchronously, ensures cache has been created on the server',
},
executeSetGet: async (cb) => {
try {
await cache.execute('set', 1, 'execute');
const value = await cache.execute('get', 1);
should(value).equal('execute');
cb(null, true);
} catch (err) {
cb(err);
}
},
executeFail: async (cb) => {
try {
try {
await cache.execute('borked', 1, 'execute');
} catch (err) {
should(err.message).equal(
'LRUCache.borked() is not a valid function'
);
}
cb(null, true);
} catch (err) {
cb(err);
}
},
getLruCachesOnMaster: async (cb) => {
try {
try {
const yo = 'yo yo yo';
// get the default cache and set the value using a promise
const defCache = new LRUCacheForClustersAsPromised();
await defCache.set(1, yo);
// get all the caches and check the default namespace
const caches = LRUCacheForClustersAsPromised.getAllCaches();
should(typeof caches.default).not.equal('undefined');
should(caches.default instanceof LRUCache).equal(true);
should(caches.default.allowStale).equal(false);
should(caches.default.maxAge).equal(0);
// get the value we set synchronously
const value = caches.default.get(1);
should(value).equal(yo);
} catch (err) {
if (!cluster.isWorker) {
throw err;
}
should(err.message).containEql('LRUCacheForClustersAsPromised');
}
cb(null, true);
} catch (err) {
cb(err);
}
},
getCache: async (cb) => {
try {
try {
const foo = 'foo foo foo';
const defCache = new LRUCacheForClustersAsPromised();
await defCache.set(1, foo);
const cache = defCache.getCache();
should(cache.get(1)).equal(foo);
} catch (err) {
if (!cluster.isWorker) {
throw err;
}
}
cb(null, true);
} catch (err) {
cb(err);
}
},
mSet: async (cb) => {

@@ -271,2 +364,3 @@ try {

try {
const namespace = `pruned-cache-${member}-${Math.random()}`;
const prunedCache = new LRUCacheForClustersAsPromised({

@@ -276,23 +370,77 @@ max: 10,

maxAge: 100,
namespace: `pruned-cache-${member}`,
namespace,
prune: '*/1 * * * * *',
});
await prunedCache.set(config.args.one, config.args.one);
await prunedCache.set(config.args.two, config.args.two, 2000);
// maybe delay the start to sync with cron
const now = new Date();
const delay =
now.getMilliseconds() < 800 ? 0 : 1000 - now.getMilliseconds() + 10;
setTimeout(async () => {
await prunedCache.set(config.args.one, config.args.one, 200);
await prunedCache.set(config.args.two, config.args.two, 1200);
const itemCount = await prunedCache.itemCount();
// we should see 2 items in the cache
should(itemCount).equal(2);
// check again in 1100 ms
setTimeout(async () => {
// one of the items should have been removed based on the expiration
const itemCount2 = await prunedCache.itemCount();
try {
should(itemCount2).equal(1);
new LRUCacheForClustersAsPromised({
namespace,
prune: false,
});
return cb(null, true);
} catch (err) {
return cb(err);
}
}, 1100);
}, delay);
} catch (err) {
cb(err);
}
},
pruneJob2: async (cb) => {
try {
const namespace = `pruned-cache-${member}-2-${Math.random()}`;
// create it with 1 sec pruning
new LRUCacheForClustersAsPromised({
namespace,
prune: '*/1 * * * * *',
});
// update it to run every 10 secs
const prunedCache = new LRUCacheForClustersAsPromised({
namespace,
prune: '*/5 * * * * *',
});
const itemCount = await prunedCache.itemCount();
// we should see 2 items in the cache
should(itemCount).equal(2);
// check again in 1100 ms
// maybe delay the start to sync with cron
const now = new Date();
const delay =
now.getSeconds() % 5 < 4 ? 0 : 1000 - now.getMilliseconds() + 10;
setTimeout(async () => {
// one of the items should have been removed based on the expiration
const itemCount2 = await prunedCache.itemCount();
try {
should(itemCount2).equal(1);
return cb(null, true);
} catch (err) {
return cb(err);
}
}, 1100);
await prunedCache.set(config.args.one, config.args.one, 200);
await prunedCache.set(config.args.two, config.args.two, 1200);
const itemCount = await prunedCache.itemCount();
// we should see 2 items in the cache
should(itemCount).equal(2);
// check again in 1100 ms
setTimeout(async () => {
// both items should be there after they are expired
const itemCount2 = await prunedCache.itemCount();
try {
should(itemCount2).equal(2);
// disable prune job
await LRUCacheForClustersAsPromised.getInstance({
namespace,
prune: false,
});
return cb(null, true);
} catch (err) {
return cb(err);
}
}, 1000);
}, delay);
} catch (err) {

@@ -491,4 +639,5 @@ cb(err);

try {
await cache.maxAge(20);
const maxAge = await cache.maxAge();
should(maxAge).equal(0);
should(maxAge).equal(20);
cb(null, true);

@@ -502,3 +651,3 @@ } catch (err) {

const stale = await cache.stale();
should(typeof stale).equal('undefined');
should(stale).equal(false);
cb(null, true);

@@ -509,2 +658,11 @@ } catch (err) {

},
getAllowStale: async (cb) => {
try {
const stale = await cache.allowStale();
should(stale).equal(false);
cb(null, true);
} catch (err) {
cb(err);
}
},
setMax: async (cb) => {

@@ -537,2 +695,79 @@ try {

},
setAllowStale: async (cb) => {
try {
const stale = await cache.allowStale(true);
should(stale).equal(true);
cb(null, true);
} catch (err) {
cb(err);
}
},
properties: async (cb) => {
try {
const propsCache = new LRUCacheForClustersAsPromised({
namespace: 'props-cache',
max: 1,
maxAge: 100000,
stale: true,
});
should(await propsCache.allowStale()).equal(true);
should(await propsCache.max()).equal(1);
should(await propsCache.maxAge()).equal(100000);
const propsCache2 = new LRUCacheForClustersAsPromised({
namespace: 'props-cache',
max: 10101,
stale: false,
});
should(await propsCache2.allowStale()).equal(false);
should(await propsCache2.max()).equal(10101);
should(await propsCache2.maxAge()).equal(100000);
const propsCache3 = new LRUCacheForClustersAsPromised({
namespace: 'props-cache',
maxAge: 1000,
});
should(await propsCache3.allowStale()).equal(false);
should(await propsCache3.max()).equal(10101);
should(await propsCache3.maxAge()).equal(1000);
cb(null, true);
} catch (err) {
cb(err);
}
},
getInstance: async (cb) => {
try {
const propsCache = await LRUCacheForClustersAsPromised.getInstance({
namespace: 'props-cache',
max: 1,
maxAge: 100000,
stale: true,
});
should(await propsCache.allowStale()).equal(true);
should(await propsCache.max()).equal(1);
should(await propsCache.maxAge()).equal(100000);
const propsCache2 = await LRUCacheForClustersAsPromised.getInstance({
namespace: 'props-cache',
max: 10101,
stale: false,
});
should(await propsCache2.allowStale()).equal(false);
should(await propsCache2.max()).equal(10101);
should(await propsCache2.maxAge()).equal(100000);
const propsCache3 = await LRUCacheForClustersAsPromised.getInstance({
namespace: 'props-cache',
maxAge: 1000,
});
should(await propsCache3.allowStale()).equal(false);
should(await propsCache3.max()).equal(10101);
should(await propsCache3.maxAge()).equal(1000);
cb(null, true);
} catch (err) {
cb(err);
}
},
addFour: async (cb) => {

@@ -539,0 +774,0 @@ try {

@@ -1,6 +0,8 @@

const LRUCache = require('../');
const async = require('async');
const LRUCacheForClustersAsPromised = require('../lru-cache-for-clusters-as-promised');
const TestUtils = require('./lib/test-utils');
const config = require('./lib/test-config');
describe('LRU Cache as Promised', () => {
const cache = new LRUCache({
describe('LRU Cache as Promised', async () => {
const cache = new LRUCacheForClustersAsPromised({
namespace: 'lru-cache-as-promised',

@@ -13,2 +15,6 @@ max: 3,

before(function () {
this.timeout(5000);
});
afterEach((done) => {

@@ -18,11 +24,12 @@ testUtils.reset(done);

['tests'].forEach((test) => {
Object.keys(testUtils[test]).forEach((method) => {
it(`should ${testUtils[test][method]}`, (done) => {
await async.eachOf(Object.keys(testUtils.tests), async (method) => {
return new Promise((resolve, reject) => {
it(`should ${testUtils.tests[method]}`, function () {
this.timeout(config.timeout);
// run the request
testUtils[method]((err) => {
if (err) {
return done(err);
return reject(err);
}
return done();
return resolve();
});

@@ -29,0 +36,0 @@ });

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc