Socket
Socket
Sign inDemoInstall

lru-cache

Package Overview
Dependencies
0
Maintainers
1
Versions
134
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 7.5.1 to 7.6.0

177

index.js

@@ -27,8 +27,11 @@ const perf = typeof performance === 'object' && performance &&

}
const shouldWarn = (code) => typeof process === 'object' &&
const shouldWarn = code => typeof process === 'object' &&
process &&
!(process.noDeprecation || warned.has(code))
!warned.has(code)
const warn = (code, what, instead, fn) => {
warned.add(code)
process.emitWarning(`The ${what} is deprecated. Please use ${instead} instead.`, 'DeprecationWarning', code, fn)
const msg = `The ${what} is deprecated. Please use ${instead} instead.`
process.emitWarning(msg, 'DeprecationWarning', code, fn)
}

@@ -62,3 +65,3 @@

constructor (max) {
const UintArray = getUintArray(max)
const UintArray = max ? getUintArray(max) : Array
this.heap = new UintArray(max)

@@ -78,3 +81,3 @@ this.length = 0

const {
max,
max = 0,
ttl,

@@ -89,4 +92,5 @@ ttlResolution = 1,

noUpdateTTL,
maxSize,
maxSize = 0,
sizeCalculation,
fetchMethod,
} = options

@@ -102,7 +106,7 @@

if (!isPosInt(max)) {
throw new TypeError('max option must be an integer')
if (max !== 0 && !isPosInt(max)) {
throw new TypeError('max option must be a nonnegative integer')
}
const UintArray = getUintArray(max)
const UintArray = max ? getUintArray(max) : Array
if (!UintArray) {

@@ -113,3 +117,3 @@ throw new Error('invalid max value: ' + max)

this.max = max
this.maxSize = maxSize || 0
this.maxSize = maxSize
this.sizeCalculation = sizeCalculation || length

@@ -124,2 +128,9 @@ if (this.sizeCalculation) {

}
this.fetchMethod = fetchMethod || null
if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
throw new TypeError('fetchMethod must be a function if specified')
}
this.keyMap = new Map()

@@ -149,3 +160,3 @@ this.keyList = new Array(max).fill(null)

if (this.maxSize) {
if (this.maxSize !== 0) {
if (!isPosInt(this.maxSize)) {

@@ -170,2 +181,16 @@ throw new TypeError('maxSize must be a positive integer if specified')

// do not allow completely unbounded caches
if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
throw new TypeError('At least one of max, maxSize, or ttl is required')
}
if (!this.ttlAutopurge && !this.max && !this.maxSize) {
const code = 'LRU_CACHE_UNBOUNDED'
if (shouldWarn(code)) {
warned.add(code)
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
'result in unbounded memory consumption.'
process.emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
}
}
if (stale) {

@@ -182,5 +207,10 @@ deprecatedOption('stale', 'allowStale')

getRemainingTTL (key) {
return this.has(key) ? Infinity : 0
}
initializeTTLTracking () {
this.ttls = new ZeroArray(this.max)
this.starts = new ZeroArray(this.max)
this.setItemTTL = (index, ttl) => {

@@ -201,5 +231,7 @@ this.starts[index] = ttl !== 0 ? perf.now() : 0

}
this.updateItemAge = (index) => {
this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
}
// debounce calls to perf.now() to 1s so we're not hitting

@@ -220,2 +252,12 @@ // that costly call repeatedly.

}
this.getRemainingTTL = (key) => {
const index = this.keyMap.get(key)
if (index === undefined) {
return 0
}
return this.ttls[index] === 0 || this.starts[index] === 0 ? Infinity
: ((this.starts[index] + this.ttls[index]) - (cachedNow || getNow()))
}
this.isStale = (index) => {

@@ -234,5 +276,13 @@ return this.ttls[index] !== 0 && this.starts[index] !== 0 &&

this.removeItemSize = index => this.calculatedSize -= this.sizes[index]
this.addItemSize = (index, v, k, size, sizeCalculation) => {
const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0)
this.sizes[index] = isPosInt(s) ? s : 0
this.requireSize = (k, v, size, sizeCalculation) => {
if (sizeCalculation && !size) {
size = sizeCalculation(v, k)
}
if (!isPosInt(size)) {
throw new TypeError('size must be positive integer')
}
return size
}
this.addItemSize = (index, v, k, size) => {
this.sizes[index] = size
const maxSize = this.maxSize - this.sizes[index]

@@ -255,11 +305,15 @@ while (this.calculatedSize > maxSize) {

removeItemSize (index) {}
addItemSize (index, v, k, size, sizeCalculation) {}
addItemSize (index, v, k, size) {}
requireSize (k, v, size, sizeCalculation) {
if (size || sizeCalculation) {
throw new TypeError('cannot set size without setting maxSize on cache')
}
}
*indexes ({ allowStale = this.allowStale } = {}) {
if (this.size) {
for (let i = this.tail, j; true; ) {
for (let i = this.tail; true; ) {
if (!this.isValidIndex(i)) {
break
}
j = i === this.head
if (allowStale || !this.isStale(i)) {

@@ -279,3 +333,3 @@ yield i

if (this.size) {
for (let i = this.head, j; true; ) {
for (let i = this.head; true; ) {
if (!this.isValidIndex(i)) {

@@ -287,3 +341,2 @@ break

}
// either the tail now, or WAS the tail, and deleted
if (i === this.tail) {

@@ -408,2 +461,3 @@ break

} = {}) {
size = this.requireSize(k, v, size, sizeCalculation)
let index = this.size === 0 ? undefined : this.keyMap.get(k)

@@ -420,3 +474,3 @@ if (index === undefined) {

this.size ++
this.addItemSize(index, v, k, size, sizeCalculation)
this.addItemSize(index, v, k, size)
noUpdateTTL = false

@@ -435,3 +489,3 @@ } else {

this.valList[index] = v
this.addItemSize(index, v, k, size, sizeCalculation)
this.addItemSize(index, v, k, size)
}

@@ -503,2 +557,63 @@ this.moveToTail(index)

backgroundFetch (k, index) {
const v = index === undefined ? undefined : this.valList[index]
if (this.isBackgroundFetch(v)) {
return v
}
const p = Promise.resolve(this.fetchMethod(k, v)).then(v => {
if (this.keyMap.get(k) === index && p === this.valList[index]) {
this.set(k, v)
}
return v
})
p.__staleWhileFetching = v
if (index === undefined) {
this.set(k, p)
index = this.keyMap.get(k)
} else {
this.valList[index] = p
}
return p
}
isBackgroundFetch (p) {
return p && typeof p === 'object' && typeof p.then === 'function' &&
Object.prototype.hasOwnProperty.call(p, '__staleWhileFetching')
}
async fetch (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}) {
if (!this.fetchMethod) {
return this.get(k, {allowStale, updateAgeOnGet})
}
let index = this.keyMap.get(k)
if (index === undefined) {
return this.backgroundFetch(k, index)
} else {
// in cache, maybe already fetching
const v = this.valList[index]
if (this.isBackgroundFetch(v)) {
return allowStale && v.__staleWhileFetching !== undefined
? v.__staleWhileFetching : v
}
if (!this.isStale(index)) {
this.moveToTail(index)
if (updateAgeOnGet) {
this.updateItemAge(index)
}
return v
}
// ok, it is stale, and not already fetching
// refresh the cache.
const p = this.backgroundFetch(k, index)
return allowStale && p.__staleWhileFetching !== undefined
? p.__staleWhileFetching : p
}
}
get (k, {

@@ -510,7 +625,19 @@ allowStale = this.allowStale,

if (index !== undefined) {
const value = this.valList[index]
const fetching = this.isBackgroundFetch(value)
if (this.isStale(index)) {
const value = allowStale ? this.valList[index] : undefined
this.delete(k)
return value
// delete only if not an in-flight background fetch
if (!fetching) {
this.delete(k)
return allowStale ? value : undefined
} else {
return allowStale ? value.__staleWhileFetching : undefined
}
} else {
// if we're currently fetching it, we don't actually have it yet
// it's not stale, which means this isn't a staleWhileRefetching,
// so we just return undefined
if (fetching) {
return undefined
}
this.moveToTail(index)

@@ -520,3 +647,3 @@ if (updateAgeOnGet) {

}
return this.valList[index]
return value
}

@@ -523,0 +650,0 @@ }

3

package.json
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
"version": "7.5.1",
"version": "7.6.0",
"author": "Isaac Z. Schlueter <i@izs.me>",

@@ -25,2 +25,3 @@ "keywords": [

"benchmark": "^2.1.4",
"clock-mock": "^1.0.3",
"size-limit": "^7.0.8",

@@ -27,0 +28,0 @@ "tap": "^15.1.6"

@@ -36,3 +36,3 @@ # lru-cache

max: 500, // <-- mandatory, you must give a maximum capacity
max: 500, // <-- Technically optional, but see "Storage Bounds Safety" below

@@ -116,4 +116,8 @@ // if you wish to track item size, you must provide a maxSize

This option is required, and must be a positive integer.
At least one of `max`, `maxSize`, or `TTL` is required. This must be a
positive integer if set.
**It is strongly recommended to set a `max` to prevent unbounded growth
of the cache.** See "Storage Bounds Safety" below.
* `maxSize` - Set to a positive integer to track the sizes of items added

@@ -126,2 +130,9 @@ to the cache, and automatically evict items in order to stay below this

At least one of `max`, `maxSize`, or `TTL` is required. This must be a
positive integer if set.
Even if size tracking is enabled, **it is strongly recommended to set a
`max` to prevent unbounded growth of the cache.** See "Storage Bounds
Safety" below.
* `sizeCalculation` - Function used to calculate the size of stored

@@ -138,2 +149,9 @@ items. If you're storing strings or buffers, then you probably want to

* `fetchMethod` Function that is used to make background asynchronous
fetches. Called with `fetchMethod(key, staleValue)`. May return a
Promise.
If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
to `Promise.resolve(cache.get(key))`.
* `dispose` Function that is called on items when they are dropped

@@ -200,2 +218,13 @@ from the cache, as `this.dispose(value, key, reason)`.

At least one of `max`, `maxSize`, or `TTL` is required. This must be a
positive integer if set.
Even if ttl tracking is enabled, **it is strongly recommended to set a
`max` to prevent unbounded growth of the cache.** See "Storage Bounds
Safety" below.
If ttl tracking is enabled, and `max` and `maxSize` are not set, and
`ttlAutopurge` is not set, then a warning will be emitted cautioning
about the potential for unbounded memory consumption.
Deprecated alias: `maxAge`

@@ -316,2 +345,21 @@

* `async fetch(key, { updateAgeOnGet, allowStale } = {}) => Promise`
If the value is in the cache and not stale, then the returned Promise
resolves to the value.
If not in the cache, or beyond its TTL staleness, then
`fetchMethod(key, staleValue)` is called, and the value returned will
be added to the cache once resolved.
If called with `allowStale`, and an asynchronous fetch is currently in
progress to reload a stale value, then the former stale value will be
returned.
Multiple fetches for the same `key` will only call `fetchMethod` a
single time, and all will be resolved when the value is resolved.
If `fetchMethod` is not specified, then this is an alias for
`Promise.resolve(cache.get(key))`.
* `peek(key, { allowStale } = {}) => value`

@@ -404,2 +452,8 @@

* `getRemainingTTL(key)`
Return the number of ms left in the item's TTL. If item is not in
cache, returns `0`. Returns `Infinity` if item is in cache without a
defined TTL.
* `forEach(fn, [thisp])`

@@ -477,2 +531,66 @@

## Storage Bounds Safety
This implementation aims to be as flexible as possible, within the limits
of safe memory consumption and optimal performance.
At initial object creation, storage is allocated for `max` items. If `max`
is set to zero, then some performance is lost, and item count is unbounded.
Either `maxSize` or `ttl` _must_ be set if `max` is not specified.
If `maxSize` is set, then this creates a safe limit on the maximum storage
consumed, but without the performance benefits of pre-allocation. When
`maxSize` is set, every item _must_ provide a size, either via the
`sizeCalculation` method provided to the constructor, or via a `size` or
`sizeCalculation` option provided to `cache.set()`. The size of every item
_must_ be a positive integer.
If neither `max` nor `maxSize` are set, then `ttl` tracking must be
enabled. Note that, even when tracking item `ttl`, items are _not_
preemptively deleted when they become stale, unless `ttlAutopurge` is
enabled. Instead, they are only purged the next time the key is requested.
Thus, if `ttlAutopurge`, `max`, and `maxSize` are all not set, then the
cache will potentially grow unbounded.
In this case, a warning is printed to standard error. Future versions may
require the use of `ttlAutopurge` if `max` and `maxSize` are not specified.
If you truly wish to use a cache that is bound _only_ by TTL expiration,
consider using a `Map` object, and calling `setTimeout` to delete entries
when they expire. It will perform much better than an LRU cache.
Here is an implementation you may use, under the same [license](./LICENSE)
as this package:
```js
// a storage-unbounded ttl cache that is not an lru-cache
const cache = {
data: new Map(),
timers: new Map(),
set: (k, v, ttl) => {
if (cache.timers.has(k)) {
clearTimeout(cache.timers.get(k))
}
cache.timers.set(k, setTimeout(() => cache.del(k), ttl))
cache.data.set(k, v)
},
get: k => cache.data.get(k),
has: k => cache.data.has(k),
delete: k => {
if (cache.timers.has(k)) {
clearTimeout(cache.timers.get(k))
}
cache.timers.delete(k)
return cache.data.delete(k)
},
clear: () => {
cache.data.clear()
for (const v of cache.timers.values()) {
clearTimeout(v)
}
cache.timers.clear()
}
}
```
## Performance

@@ -479,0 +597,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc