@acuris/leprechaun-cache
Advanced tools
Comparing version 0.0.7 to 0.0.9
@@ -6,2 +6,3 @@ import { Cacheable, LeprechaunCacheOptions } from './types'; | ||
private lockTtlMs; | ||
private waitTimeMs; | ||
private returnStale; | ||
@@ -15,3 +16,3 @@ private spinWaitCount; | ||
private onBackgroundError; | ||
constructor({ keyPrefix, softTtlMs, hardTtlMs, lockTtlMs, waitForUnlockMs, cacheStore, spinMs, returnStale, onMiss, onBackgroundError }: LeprechaunCacheOptions<T>); | ||
constructor({ keyPrefix, softTtlMs, hardTtlMs, lockTtlMs, waitTimeMs, waitForUnlockMs, cacheStore, spinMs, returnStale, onMiss, onBackgroundError }: LeprechaunCacheOptions<T>); | ||
clear(key: string): Promise<boolean>; | ||
@@ -21,2 +22,3 @@ get(key: string): Promise<T>; | ||
private doGet; | ||
private race; | ||
private spinLock; | ||
@@ -23,0 +25,0 @@ private getLock; |
@@ -12,3 +12,3 @@ "use strict"; | ||
class LeprechaunCache { | ||
constructor({ keyPrefix = '', softTtlMs, hardTtlMs, lockTtlMs, waitForUnlockMs, cacheStore, spinMs, returnStale, onMiss, onBackgroundError = defaultBackgroundErrorHandler }) { | ||
constructor({ keyPrefix = '', softTtlMs, hardTtlMs, lockTtlMs, waitTimeMs = 0, waitForUnlockMs, cacheStore, spinMs, returnStale, onMiss, onBackgroundError = defaultBackgroundErrorHandler }) { | ||
this.inProgress = new Map(); | ||
@@ -18,2 +18,3 @@ this.hardTtlMs = hardTtlMs; | ||
this.lockTtlMs = lockTtlMs; | ||
this.waitTimeMs = waitTimeMs; | ||
this.spinWaitCount = Math.ceil(waitForUnlockMs / spinMs); | ||
@@ -54,13 +55,24 @@ this.spinMs = spinMs; | ||
} | ||
if (result.expiresAt < Date.now()) { | ||
const update = this.updateCache(key, ttl, !this.returnStale); | ||
if (this.returnStale) { | ||
update.catch(this.onBackgroundError); | ||
} | ||
else { | ||
return update; | ||
} | ||
if (result.expiresAt > Date.now()) { | ||
return result.data; | ||
} | ||
return result.data; | ||
const update = this.updateCache(key, ttl, !this.returnStale); | ||
if (!this.returnStale) { | ||
return update; | ||
} | ||
return this.race(update, result.data); | ||
} | ||
async race(update, staleData) { | ||
update.catch(e => { | ||
this.onBackgroundError(e); | ||
return staleData; | ||
}); | ||
if (this.waitTimeMs <= 0) { | ||
return staleData; | ||
} | ||
const returnStaleAfterWaitTime = new Promise(resolve => { | ||
setTimeout(resolve, this.waitTimeMs, staleData); | ||
}); | ||
return Promise.race([update, returnStaleAfterWaitTime]); | ||
} | ||
async spinLock(key) { | ||
@@ -67,0 +79,0 @@ const lock = { |
@@ -19,2 +19,3 @@ export declare type Cacheable = string | number | boolean | object; | ||
lockTtlMs: number; | ||
waitTimeMs?: number; | ||
waitForUnlockMs: number; | ||
@@ -21,0 +22,0 @@ cacheStore: CacheStore<T>; |
{ | ||
"name": "@acuris/leprechaun-cache", | ||
"version": "0.0.7", | ||
"version": "0.0.9", | ||
"private": false, | ||
@@ -5,0 +5,0 @@ "description": "Caching library that supports double checked caching and stale returns to avoid stampede and slow responses", |
@@ -7,3 +7,3 @@ # leprechaun-cache | ||
If `returnStale` is true, then all requests for the same key will return a stale version of the cache while it is being regenerated (including the process that is performing the regeneration) | ||
If `returnStale` is true, then it will call the `onMiss` handler in order to update the cache. If it takes longer then `waitTimeMs` then it will return the stale data | ||
@@ -35,2 +35,3 @@ If `returnStale` is false (or there is nothing already in the cache), then all requests for that key will wait until the update is complete, and then return the updated version from the cache | ||
returnStale: true | ||
waitTimeMs: 500 | ||
onBackgroundError: e => { console.error(e); } | ||
@@ -48,13 +49,14 @@ }) | ||
| Option | type | Description | | ||
| ----------------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | ||
| keyPrefix | string? | Optional prefix that will be added to all keys in the underlying store | | ||
| softTtlMs | number (ms) | Soft TTL (in ms) for storing the items in the cache | | ||
| cacheStore | CacheStore | the underlying KV store to use. Must implement CacheStore interface. A node_redis implementation is included. | | ||
| onMiss | function | callback function that will be called when a value is either not in the cache, or the soft TTL has expired. | | ||
| hardTtlMs | number (ms) | the TTL (in ms) to pass to the cacheStore set method - values should hard-expire after this and should no longer be retrievable from the store | | ||
| lockTtlMs | number (ms) | the TTL (in ms) to pass to the cacheStore lock method. While the onMiss function is called, a lock will be acquired. This defines how long the lock should last. This should be longer than the longest time you expect your onMiss handler to take | | ||
| waitForUnlockMs | number (ms) | if the onMiss function is locked, how long should the client wait for it to unlock before giving up. This is relevant when returnStale is false, or when there is no stale data in the cache | | ||
| spinMs | number (ms) | How many milliseconds to wait before re-attempting to acquire the lock | | ||
| returnStale | boolean | if this is true, when the value is expired (by the soft-ttl, set per-key), the library will return the stale result from the cache while updating the cache in the background. The next attempt to get, after this update has resolved, will then return the new version | | ||
| onBackgroundError | function? | Called if there is any error while performing background tasks (calling the onMiss if returnStale true, or while setting the cache / unlocking after returning the data) | | ||
| Option | type | Description | | ||
| ----------------- | ----------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | ||
| keyPrefix | string? | Optional prefix that will be added to all keys in the underlying store | | ||
| softTtlMs | number (ms) | Soft TTL (in ms) for storing the items in the cache | | ||
| cacheStore | CacheStore | the underlying KV store to use. Must implement CacheStore interface. A node_redis implementation is included. | | ||
| onMiss | function | callback function that will be called when a value is either not in the cache, or the soft TTL has expired. | | ||
| hardTtlMs | number (ms) | the TTL (in ms) to pass to the cacheStore set method - values should hard-expire after this and should no longer be retrievable from the store | | ||
| lockTtlMs | number (ms) | the TTL (in ms) to pass to the cacheStore lock method. While the onMiss function is called, a lock will be acquired. This defines how long the lock should last. This should be longer than the longest time you expect your onMiss handler to take | | ||
| waitForUnlockMs | number (ms) | if the onMiss function is locked, how long should the client wait for it to unlock before giving up. This is relevant when returnStale is false, or when there is no stale data in the cache | | ||
| spinMs | number (ms) | How many milliseconds to wait before re-attempting to acquire the lock | | ||
| returnStale | boolean | if this is true, when the value is expired (by the soft-ttl, set per-key), the library will return the stale result (after waitTimeMs) from the cache while updating the cache in the background | | ||
| waitTimeMs | number (ms) | Optional (default=0) The amount of time to wait for the onMiss handler to resolve before returning the stale data. If 0 then it will always return the stale data if it is expired | | ||
| onBackgroundError | function? | Called if there is any error while performing background tasks (calling the onMiss if returnStale true, or while setting the cache / unlocking after returning the data) | |
@@ -22,2 +22,3 @@ import { CacheStore, Cacheable, OnCacheMiss, LeprechaunCacheOptions, CacheItem } from './types' | ||
private lockTtlMs: number | ||
private waitTimeMs: number | ||
private returnStale: boolean | ||
@@ -37,2 +38,3 @@ private spinWaitCount: number | ||
lockTtlMs, | ||
waitTimeMs = 0, | ||
waitForUnlockMs, | ||
@@ -48,2 +50,3 @@ cacheStore, | ||
this.lockTtlMs = lockTtlMs | ||
this.waitTimeMs = waitTimeMs | ||
this.spinWaitCount = Math.ceil(waitForUnlockMs / spinMs) | ||
@@ -87,13 +90,33 @@ this.spinMs = spinMs | ||
} | ||
if (result.expiresAt < Date.now()) { | ||
const update = this.updateCache(key, ttl, !this.returnStale) | ||
if (this.returnStale) { | ||
update.catch(this.onBackgroundError) | ||
} else { | ||
return update | ||
} | ||
if (result.expiresAt > Date.now()) { | ||
return result.data | ||
} | ||
return result.data | ||
const update = this.updateCache(key, ttl, !this.returnStale) | ||
if (!this.returnStale) { | ||
return update | ||
} | ||
return this.race(update, result.data) | ||
} | ||
private async race(update: Promise<T>, staleData: T): Promise<T> { | ||
update.catch(e => { | ||
this.onBackgroundError(e) | ||
return staleData | ||
}) | ||
if (this.waitTimeMs <= 0) { | ||
return staleData | ||
} | ||
const returnStaleAfterWaitTime: Promise<T> = new Promise(resolve => { | ||
setTimeout(resolve, this.waitTimeMs, staleData) | ||
}) | ||
return Promise.race([update, returnStaleAfterWaitTime]) | ||
} | ||
private async spinLock(key: string): Promise<LockResult> { | ||
@@ -100,0 +123,0 @@ const lock: LockResult = { |
@@ -23,2 +23,3 @@ export type Cacheable = string | number | boolean | object | ||
lockTtlMs: number | ||
waitTimeMs?: number | ||
waitForUnlockMs: number | ||
@@ -25,0 +26,0 @@ cacheStore: CacheStore<T> |
Sorry, the diff of this file is not supported yet
42111
682
60