lru-cache
Advanced tools
Comparing version 7.17.1 to 7.17.2
154
index.d.ts
@@ -274,16 +274,2 @@ // Project: https://github.com/isaacs/node-lru-cache | ||
/** | ||
* Make an asynchronous cached fetch using the {@link fetchMethod} function. | ||
* | ||
* If multiple fetches for the same key are issued, then they will all be | ||
* coalesced into a single call to fetchMethod. | ||
* | ||
* Note that this means that handling options such as | ||
* {@link allowStaleOnFetchAbort}, {@link signal}, and | ||
* {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch() | ||
* call for a given key. | ||
* | ||
* This is a known (fixable) shortcoming which will be addresed on when | ||
* someone complains about it, as the fix would involve added complexity and | ||
* may not be worth the costs for this edge case. | ||
* | ||
* since: 7.6.0 | ||
@@ -653,3 +639,2 @@ */ | ||
noUpdateTTL?: boolean | ||
status?: Status<V> | ||
} | ||
@@ -663,3 +648,2 @@ | ||
updateAgeOnHas?: boolean | ||
status: Status<V> | ||
} | ||
@@ -675,3 +659,2 @@ | ||
noDeleteOnStaleGet?: boolean | ||
status?: Status<V> | ||
} | ||
@@ -708,141 +691,5 @@ | ||
allowStaleOnFetchAbort?: boolean | ||
status?: Status<V> | ||
} | ||
/** | ||
* Status object that may be passed to {@link fetch}, {@link get}, | ||
* {@link set}, and {@link has}. | ||
*/ | ||
interface Status<V> { | ||
/** | ||
* The status of a set() operation. | ||
* | ||
* - add: the item was not found in the cache, and was added | ||
* - update: the item was in the cache, with the same value provided | ||
* - replace: the item was in the cache, and replaced | ||
* - miss: the item was not added to the cache for some reason | ||
*/ | ||
set?: 'add' | 'update' | 'replace' | 'miss' | ||
/** | ||
* the ttl stored for the item, or undefined if ttls are not used. | ||
*/ | ||
ttl?: LRUMilliseconds | ||
/** | ||
* the start time for the item, or undefined if ttls are not used. | ||
*/ | ||
start?: LRUMilliseconds | ||
/** | ||
* The timestamp used for TTL calculation | ||
*/ | ||
now?: LRUMilliseconds | ||
/** | ||
* the remaining ttl for the item, or undefined if ttls are not used. | ||
*/ | ||
remainingTTL?: LRUMilliseconds | ||
/** | ||
* The calculated size for the item, if sizes are used. | ||
*/ | ||
size?: LRUSize | ||
/** | ||
* A flag indicating that the item was not stored, due to exceeding the | ||
* {@link maxEntrySize} | ||
*/ | ||
maxEntrySizeExceeded?: true | ||
/** | ||
* The old value, specified in the case of `set:'update'` or | ||
* `set:'replace'` | ||
*/ | ||
oldValue?: V | ||
/** | ||
* The results of a {@link has} operation | ||
* | ||
* - hit: the item was found in the cache | ||
* - stale: the item was found in the cache, but is stale | ||
* - miss: the item was not found in the cache | ||
*/ | ||
has?: 'hit' | 'stale' | 'miss' | ||
/** | ||
* The status of a {@link fetch} operation. | ||
* Note that this can change as the underlying fetch() moves through | ||
* various states. | ||
* | ||
* - inflight: there is another fetch() for this key which is in process | ||
* - get: there is no fetchMethod, so {@link get} was called. | ||
* - miss: the item is not in cache, and will be fetched. | ||
* - hit: the item is in the cache, and was resolved immediately. | ||
* - stale: the item is in the cache, but stale. | ||
* - refresh: the item is in the cache, and not stale, but | ||
* {@link forceRefresh} was specified. | ||
*/ | ||
fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' | ||
/** | ||
* The {@link fetchMethod} was called | ||
*/ | ||
fetchDispatched?: true | ||
/** | ||
* The cached value was updated after a successful call to fetchMethod | ||
*/ | ||
fetchUpdated?: true | ||
/** | ||
* The reason for a fetch() rejection. Either the error raised by the | ||
* {@link fetchMethod}, or the reason for an AbortSignal. | ||
*/ | ||
fetchError?: Error | ||
/** | ||
* The fetch received an abort signal | ||
*/ | ||
fetchAborted?: true | ||
/** | ||
* The abort signal received was ignored, and the fetch was allowed to | ||
* continue. | ||
*/ | ||
fetchAbortIgnored?: true | ||
/** | ||
* The fetchMethod promise resolved successfully | ||
*/ | ||
fetchResolved?: true | ||
/** | ||
* The results of the fetchMethod promise were stored in the cache | ||
*/ | ||
fetchUpdated?: true | ||
/** | ||
* The fetchMethod promise was rejected | ||
*/ | ||
fetchRejected?: true | ||
/** | ||
* The status of a {@link get} operation. | ||
* | ||
* - fetching: The item is currently being fetched. If a previous value is | ||
* present and allowed, that will be returned. | ||
* - stale: The item is in the cache, and is stale. | ||
* - hit: the item is in the cache | ||
* - miss: the item is not in the cache | ||
*/ | ||
get?: 'stale' | 'hit' | 'miss' | ||
/** | ||
* A fetch or get operation returned a stale value. | ||
*/ | ||
returnedStale?: true | ||
} | ||
/** | ||
* options which override the options set in the LRUCache constructor | ||
@@ -859,3 +706,2 @@ * when calling {@link fetch}. | ||
signal?: AbortSignal | ||
status?: Status<V> | ||
} | ||
@@ -862,0 +708,0 @@ |
123
index.js
@@ -339,11 +339,2 @@ const perf = | ||
this.statusTTL = (status, index) => { | ||
if (status) { | ||
status.ttl = this.ttls[index] | ||
status.start = this.starts[index] | ||
status.now = cachedNow || getNow() | ||
status.remainingTTL = status.now + status.ttl - status.start | ||
} | ||
} | ||
// debounce calls to perf.now() to 1s so we're not hitting | ||
@@ -390,3 +381,2 @@ // that costly call repeatedly. | ||
updateItemAge(_index) {} | ||
statusTTL(_status, _index) {} | ||
setItemTTL(_index, _ttl, _start) {} | ||
@@ -431,3 +421,3 @@ isStale(_index) { | ||
} | ||
this.addItemSize = (index, size, status) => { | ||
this.addItemSize = (index, size) => { | ||
this.sizes[index] = size | ||
@@ -441,6 +431,2 @@ if (this.maxSize) { | ||
this.calculatedSize += this.sizes[index] | ||
if (status) { | ||
status.entrySize = size | ||
status.totalCalculatedSize = this.calculatedSize | ||
} | ||
} | ||
@@ -634,3 +620,2 @@ } | ||
noUpdateTTL = this.noUpdateTTL, | ||
status, | ||
} = {} | ||
@@ -642,6 +627,2 @@ ) { | ||
if (this.maxEntrySize && size > this.maxEntrySize) { | ||
if (status) { | ||
status.set = 'miss' | ||
status.maxEntrySizeExceeded = true | ||
} | ||
// have to delete, in case a background fetch is there already. | ||
@@ -663,6 +644,3 @@ // in non-async cases, this is a no-op | ||
this.size++ | ||
this.addItemSize(index, size, status) | ||
if (status) { | ||
status.set = 'add' | ||
} | ||
this.addItemSize(index, size) | ||
noUpdateTTL = false | ||
@@ -686,13 +664,3 @@ } else { | ||
this.valList[index] = v | ||
this.addItemSize(index, size, status) | ||
if (status) { | ||
status.set = 'replace' | ||
const oldValue = | ||
oldVal && this.isBackgroundFetch(oldVal) | ||
? oldVal.__staleWhileFetching | ||
: oldVal | ||
if (oldValue !== undefined) status.oldValue = oldValue | ||
} | ||
} else if (status) { | ||
status.set = 'update' | ||
this.addItemSize(index, size) | ||
} | ||
@@ -706,3 +674,2 @@ } | ||
} | ||
this.statusTTL(status, index) | ||
if (this.disposeAfter) { | ||
@@ -763,3 +730,3 @@ while (this.disposed.length) { | ||
has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { | ||
has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) { | ||
const index = this.keyMap.get(k) | ||
@@ -771,11 +738,4 @@ if (index !== undefined) { | ||
} | ||
if (status) status.has = 'hit' | ||
this.statusTTL(status, index) | ||
return true | ||
} else if (status) { | ||
status.has = 'stale' | ||
this.statusTTL(status, index) | ||
} | ||
} else if (status) { | ||
status.has = 'miss' | ||
} | ||
@@ -814,13 +774,4 @@ return false | ||
const ignoreAbort = options.ignoreFetchAbort && v !== undefined | ||
if (options.status) { | ||
if (aborted && !updateCache) { | ||
options.status.fetchAborted = true | ||
options.status.fetchError = ac.signal.reason | ||
if (ignoreAbort) options.status.fetchAbortIgnored = true | ||
} else { | ||
options.status.fetchResolved = true | ||
} | ||
} | ||
if (aborted && !ignoreAbort && !updateCache) { | ||
return fetchFail(ac.signal.reason) | ||
return eb(ac.signal.reason) | ||
} | ||
@@ -836,3 +787,2 @@ // either we didn't abort, and are still here, or we did, and ignored | ||
} else { | ||
if (options.status) options.status.fetchUpdated = true | ||
this.set(k, v, fetchOpts.options) | ||
@@ -844,9 +794,2 @@ } | ||
const eb = er => { | ||
if (options.status) { | ||
options.status.fetchRejected = true | ||
options.status.fetchError = er | ||
} | ||
return fetchFail(er) | ||
} | ||
const fetchFail = er => { | ||
const { aborted } = ac.signal | ||
@@ -873,5 +816,2 @@ const allowStaleAborted = | ||
if (allowStale) { | ||
if (options.status && p.__staleWhileFetching !== undefined) { | ||
options.status.returnedStale = true | ||
} | ||
return p.__staleWhileFetching | ||
@@ -900,3 +840,2 @@ } else if (p.__returned === p) { | ||
} | ||
if (options.status) options.status.fetchDispatched = true | ||
const p = new Promise(pcall).then(cb, eb) | ||
@@ -907,4 +846,3 @@ p.__abortController = ac | ||
if (index === undefined) { | ||
// internal, don't expose status. | ||
this.set(k, p, { ...fetchOpts.options, status: undefined }) | ||
this.set(k, p, fetchOpts.options) | ||
index = this.keyMap.get(k) | ||
@@ -952,3 +890,2 @@ } else { | ||
forceRefresh = false, | ||
status, | ||
signal, | ||
@@ -958,3 +895,2 @@ } = {} | ||
if (!this.fetchMethod) { | ||
if (status) status.fetch = 'get' | ||
return this.get(k, { | ||
@@ -964,3 +900,2 @@ allowStale, | ||
noDeleteOnStaleGet, | ||
status, | ||
}) | ||
@@ -982,3 +917,2 @@ } | ||
ignoreFetchAbort, | ||
status, | ||
signal, | ||
@@ -989,3 +923,2 @@ } | ||
if (index === undefined) { | ||
if (status) status.fetch = 'miss' | ||
const p = this.backgroundFetch(k, index, options, fetchContext) | ||
@@ -997,9 +930,5 @@ return (p.__returned = p) | ||
if (this.isBackgroundFetch(v)) { | ||
const stale = | ||
allowStale && v.__staleWhileFetching !== undefined | ||
if (status) { | ||
status.fetch = 'inflight' | ||
if (stale) status.returnedStale = true | ||
} | ||
return stale ? v.__staleWhileFetching : (v.__returned = v) | ||
return allowStale && v.__staleWhileFetching !== undefined | ||
? v.__staleWhileFetching | ||
: (v.__returned = v) | ||
} | ||
@@ -1009,5 +938,3 @@ | ||
// unless we are already in the process of refreshing the cache. | ||
const isStale = this.isStale(index) | ||
if (!forceRefresh && !isStale) { | ||
if (status) status.fetch = 'hit' | ||
if (!forceRefresh && !this.isStale(index)) { | ||
this.moveToTail(index) | ||
@@ -1017,3 +944,2 @@ if (updateAgeOnGet) { | ||
} | ||
this.statusTTL(status, index) | ||
return v | ||
@@ -1025,9 +951,5 @@ } | ||
const p = this.backgroundFetch(k, index, options, fetchContext) | ||
const hasStale = p.__staleWhileFetching !== undefined | ||
const staleVal = hasStale && allowStale | ||
if (status) { | ||
status.fetch = hasStale && isStale ? 'stale' : 'refresh' | ||
if (staleVal && isStale) status.returnedStale = true | ||
} | ||
return staleVal ? p.__staleWhileFetching : (p.__returned = p) | ||
return allowStale && p.__staleWhileFetching !== undefined | ||
? p.__staleWhileFetching | ||
: (p.__returned = p) | ||
} | ||
@@ -1042,3 +964,2 @@ } | ||
noDeleteOnStaleGet = this.noDeleteOnStaleGet, | ||
status, | ||
} = {} | ||
@@ -1050,5 +971,3 @@ ) { | ||
const fetching = this.isBackgroundFetch(value) | ||
this.statusTTL(status, index) | ||
if (this.isStale(index)) { | ||
if (status) status.get = 'stale' | ||
// delete only if not an in-flight background fetch | ||
@@ -1059,20 +978,12 @@ if (!fetching) { | ||
} | ||
if (status) status.returnedStale = allowStale | ||
return allowStale ? value : undefined | ||
} else { | ||
if (status) { | ||
status.returnedStale = | ||
allowStale && value.__staleWhileFetching !== undefined | ||
} | ||
return allowStale ? value.__staleWhileFetching : undefined | ||
} | ||
} else { | ||
if (status) status.get = 'hit' | ||
// if we're currently fetching it, we don't actually have it yet | ||
// it's not stale, which means this isn't a staleWhileRefetching. | ||
// If it's not stale, and fetching, AND has a __staleWhileFetching | ||
// value, then that means the user fetched with {forceRefresh:true}, | ||
// so it's safe to return that value. | ||
// it's not stale, which means this isn't a staleWhileRefetching, | ||
// so we just return undefined | ||
if (fetching) { | ||
return value.__staleWhileFetching | ||
return undefined | ||
} | ||
@@ -1085,4 +996,2 @@ this.moveToTail(index) | ||
} | ||
} else if (status) { | ||
status.get = 'miss' | ||
} | ||
@@ -1089,0 +998,0 @@ } |
{ | ||
"name": "lru-cache", | ||
"description": "A cache object that deletes the least-recently-used items.", | ||
"version": "7.17.1", | ||
"version": "7.17.2", | ||
"author": "Isaac Z. Schlueter <i@izs.me>", | ||
@@ -16,3 +16,3 @@ "keywords": [ | ||
"presnap": "npm run prepare", | ||
"prepare": "node ./scripts/transpile-to-esm.js", | ||
"prepare": "node ./scripts/transpile-to-esm.mjs", | ||
"size": "size-limit", | ||
@@ -19,0 +19,0 @@ "test": "tap", |
186
README.md
@@ -529,3 +529,3 @@ # lru-cache | ||
### `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet, start, status }])` | ||
### `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet, start }])` | ||
@@ -557,7 +557,4 @@ Add a value to the cache. | ||
For the usage of the `status` option, see **Status Tracking** | ||
below. | ||
### `get(key, { updateAgeOnGet, allowStale } = {}) => value` | ||
### `get(key, { updateAgeOnGet, allowStale, status } = {}) => value` | ||
Return a value from the cache. | ||
@@ -572,5 +569,2 @@ | ||
For the usage of the `status` option, see **Status Tracking** | ||
below. | ||
### `async fetch(key, options = {}) => Promise` | ||
@@ -580,16 +574,11 @@ | ||
- `updateAgeOnGet` | ||
- `allowStale` | ||
- `size` | ||
- `sizeCalculation` | ||
- `ttl` | ||
- `noDisposeOnSet` | ||
- `forceRefresh` | ||
- `status` - See **Status Tracking** below. | ||
- `signal` - AbortSignal can be used to cancel the `fetch()`. | ||
Note that the `signal` option provided to the `fetchMethod` is | ||
a different object, because it must also respond to internal | ||
cache state changes, but aborting this signal will abort the | ||
one passed to `fetchMethod` as well. | ||
- `fetchContext` - sets the `context` option passed to the | ||
* `updateAgeOnGet` | ||
* `allowStale` | ||
* `size` | ||
* `sizeCalculation` | ||
* `ttl` | ||
* `noDisposeOnSet` | ||
* `forceRefresh` | ||
* `signal` - AbortSignal can be used to cancel the `fetch()` | ||
* `fetchContext` - sets the `context` option passed to the | ||
underlying `fetchMethod`. | ||
@@ -643,3 +632,3 @@ | ||
### `has(key, { updateAgeOnHas, status } = {}) => Boolean` | ||
### `has(key, { updateAgeOnHas } = {}) => Boolean` | ||
@@ -651,9 +640,4 @@ Check if a key is in the cache, without updating the recency of | ||
Will return `false` if the item is stale, even though it is | ||
technically in the cache. The difference can be determined (if | ||
it matters) by using a `status` argument, and inspecting the | ||
`has` field. | ||
technically in the cache. | ||
For the usage of the `status` option, see **Status Tracking** | ||
below. | ||
### `delete(key)` | ||
@@ -826,146 +810,2 @@ | ||
## Status Tracking | ||
Occasionally, it may be useful to track the internal behavior of | ||
the cache, particularly for logging, debugging, or for behavior | ||
within the `fetchMethod`. To do this, you can pass a `status` | ||
object to the `get()`, `set()`, `has()`, and `fetch()` methods. | ||
The `status` option should be a plain JavaScript object. | ||
The following fields will be set appropriately: | ||
```ts | ||
interface Status<V> { | ||
/** | ||
* The status of a set() operation. | ||
* | ||
* - add: the item was not found in the cache, and was added | ||
* - update: the item was in the cache, with the same value provided | ||
* - replace: the item was in the cache, and replaced | ||
* - miss: the item was not added to the cache for some reason | ||
*/ | ||
set?: 'add' | 'update' | 'replace' | 'miss' | ||
/** | ||
* the ttl stored for the item, or undefined if ttls are not used. | ||
*/ | ||
ttl?: LRUMilliseconds | ||
/** | ||
* the start time for the item, or undefined if ttls are not used. | ||
*/ | ||
start?: LRUMilliseconds | ||
/** | ||
* The timestamp used for TTL calculation | ||
*/ | ||
now?: LRUMilliseconds | ||
/** | ||
* the remaining ttl for the item, or undefined if ttls are not used. | ||
*/ | ||
remainingTTL?: LRUMilliseconds | ||
/** | ||
* The calculated size for the item, if sizes are used. | ||
*/ | ||
size?: LRUSize | ||
/** | ||
* A flag indicating that the item was not stored, due to exceeding the | ||
* {@link maxEntrySize} | ||
*/ | ||
maxEntrySizeExceeded?: true | ||
/** | ||
* The old value, specified in the case of `set:'update'` or | ||
* `set:'replace'` | ||
*/ | ||
oldValue?: V | ||
/** | ||
* The results of a {@link has} operation | ||
* | ||
* - hit: the item was found in the cache | ||
* - stale: the item was found in the cache, but is stale | ||
* - miss: the item was not found in the cache | ||
*/ | ||
has?: 'hit' | 'stale' | 'miss' | ||
/** | ||
* The status of a {@link fetch} operation. | ||
* Note that this can change as the underlying fetch() moves through | ||
* various states. | ||
* | ||
* - inflight: there is another fetch() for this key which is in process | ||
* - get: there is no fetchMethod, so {@link get} was called. | ||
* - miss: the item is not in cache, and will be fetched. | ||
* - hit: the item is in the cache, and was resolved immediately. | ||
* - stale: the item is in the cache, but stale. | ||
* - refresh: the item is in the cache, and not stale, but | ||
* {@link forceRefresh} was specified. | ||
*/ | ||
fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' | ||
/** | ||
* The {@link fetchMethod} was called | ||
*/ | ||
fetchDispatched?: true | ||
/** | ||
* The cached value was updated after a successful call to fetchMethod | ||
*/ | ||
fetchUpdated?: true | ||
/** | ||
* The reason for a fetch() rejection. Either the error raised by the | ||
* {@link fetchMethod}, or the reason for an AbortSignal. | ||
*/ | ||
fetchError?: Error | ||
/** | ||
* The fetch received an abort signal | ||
*/ | ||
fetchAborted?: true | ||
/** | ||
* The abort signal received was ignored, and the fetch was allowed to | ||
* continue. | ||
*/ | ||
fetchAbortIgnored?: true | ||
/** | ||
* The fetchMethod promise resolved successfully | ||
*/ | ||
fetchResolved?: true | ||
/** | ||
* The results of the fetchMethod promise were stored in the cache | ||
*/ | ||
fetchUpdated?: true | ||
/** | ||
* The fetchMethod promise was rejected | ||
*/ | ||
fetchRejected?: true | ||
/** | ||
* The status of a {@link get} operation. | ||
* | ||
* - fetching: The item is currently being fetched. If a previous value is | ||
* present and allowed, that will be returned. | ||
* - stale: The item is in the cache, and is stale. | ||
* - hit: the item is in the cache | ||
* - miss: the item is not in the cache | ||
*/ | ||
get?: 'stale' | 'hit' | 'miss' | ||
/** | ||
* A fetch or get operation returned a stale value. | ||
*/ | ||
returnedStale?: true | ||
} | ||
``` | ||
## Storage Bounds Safety | ||
@@ -972,0 +812,0 @@ |
Sorry, the diff of this file is not supported yet
134
2
2
2
9
117151
2704
958