lru-cache
Advanced tools
Comparing version 7.14.0 to 7.14.1
16
index.js
@@ -382,2 +382,7 @@ const perf = | ||
this.requireSize = (k, v, size, sizeCalculation) => { | ||
// provisionally accept background fetches. | ||
// actual value size will be checked when they return. | ||
if (this.isBackgroundFetch(v)) { | ||
return 0 | ||
} | ||
if (!isPosInt(size)) { | ||
@@ -404,5 +409,7 @@ if (sizeCalculation) { | ||
this.sizes[index] = size | ||
const maxSize = this.maxSize - this.sizes[index] | ||
while (this.calculatedSize > maxSize) { | ||
this.evict(true) | ||
if (this.maxSize) { | ||
const maxSize = this.maxSize - this.sizes[index] | ||
while (this.calculatedSize > maxSize) { | ||
this.evict(true) | ||
} | ||
} | ||
@@ -591,2 +598,5 @@ this.calculatedSize += this.sizes[index] | ||
if (this.maxEntrySize && size > this.maxEntrySize) { | ||
// have to delete, in case a background fetch is there already. | ||
// in non-async cases, this is a no-op | ||
this.delete(k) | ||
return this | ||
@@ -593,0 +603,0 @@ } |
{ | ||
"name": "lru-cache", | ||
"description": "A cache object that deletes the least-recently-used items.", | ||
"version": "7.14.0", | ||
"version": "7.14.1", | ||
"author": "Isaac Z. Schlueter <i@izs.me>", | ||
@@ -6,0 +6,0 @@ "keywords": [ |
@@ -99,6 +99,6 @@ # lru-cache | ||
The maximum number (or size) of items that remain in the cache | ||
(assuming no TTL pruning or explicit deletions). Note that fewer | ||
items may be stored if size calculation is used, and `maxSize` is | ||
exceeded. This must be a positive finite intger. | ||
The maximum number of items that remain in the cache (assuming no | ||
TTL pruning or explicit deletions). Note that fewer items may be | ||
stored if size calculation is used, and `maxSize` is exceeded. | ||
This must be a positive finite intger. | ||
@@ -284,5 +284,3 @@ At least one of `max`, `maxSize`, or `TTL` is required. This | ||
Also, as this cache is optimized for LRU/MRU operations, some of | ||
the staleness/TTL checks will reduce performance, as they will | ||
incur overhead by deleting from Map objects rather than simply | ||
throwing old Map objects away. | ||
the staleness/TTL checks will reduce performance. | ||
@@ -479,4 +477,3 @@ This is not primarily a TTL cache, and does not make strong TTL | ||
### `async fetch(key, { updateAgeOnGet, allowStale, size, | ||
sizeCalculation, ttl, noDisposeOnSet, forceRefresh } = {}) => Promise` | ||
### `async fetch(key, { updateAgeOnGet, allowStale, size, sizeCalculation, ttl, noDisposeOnSet, forceRefresh } = {}) => Promise` | ||
@@ -742,3 +739,3 @@ If the value is in the cache and not stale, then the returned | ||
} | ||
cache.timers.set(k, setTimeout(() => cache.del(k), ttl)) | ||
cache.timers.set(k, setTimeout(() => cache.delete(k), ttl)) | ||
cache.data.set(k, v) | ||
@@ -745,0 +742,0 @@ }, |
75184
1484
840