Socket
Socket
Sign inDemoInstall

lru-cache

Package Overview
Dependencies
Maintainers
1
Versions
142
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lru-cache - npm Package Compare versions

Comparing version 6.0.0 to 7.0.0

648

index.js

@@ -1,250 +0,382 @@

'use strict'
const perf = typeof performance === 'object' && performance &&
typeof performance.now === 'function' ? performance : Date
// A linked list to keep track of recently-used-ness
const Yallist = require('yallist')
const MAX = Symbol('max')
const LENGTH = Symbol('length')
const LENGTH_CALCULATOR = Symbol('lengthCalculator')
const ALLOW_STALE = Symbol('allowStale')
const MAX_AGE = Symbol('maxAge')
const DISPOSE = Symbol('dispose')
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
const LRU_LIST = Symbol('lruList')
const CACHE = Symbol('cache')
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
const warned = new Set()
const deprecatedOption = (opt, msg) => {
const code = `LRU_CACHE_OPTION_${opt}`
if (shouldWarn(code)) {
warn(code, `The ${opt} option is deprecated. ${msg}`, LRUCache)
}
}
const deprecatedMethod = (method, msg) => {
const code = `LRU_CACHE_METHOD_${method}`
if (shouldWarn(code)) {
const fn = LRUCache.prototype[method]
warn(code, `The ${method} method is deprecated. ${msg}`)
}
}
const deprecatedProperty = (field, msg) => {
const code = `LRU_CACHE_PROPERTY_${field}`
if (shouldWarn(code)) {
const { prototype } = LRUCache
const { get } = Object.getOwnPropertyDescriptor(prototype, field)
warn(code, `The ${field} property is deprecated. ${msg}`, get)
}
}
const shouldWarn = (code) => !(process.noDeprecation || warned.has(code))
const warn = (code, msg, fn) => {
warned.add(code)
process.emitWarning(msg, 'DeprecationWarning', code, fn)
}
const naiveLength = () => 1
const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
// lruList is a yallist where the head is the youngest
// item, and the tail is the oldest. the list contains the Hit
// objects as the entries.
// Each Hit object has a reference to its Yallist.Node. This
// never changes.
//
// cache is a Map (or PseudoMap) that matches the keys to
// the Yallist.Node object.
class LRUCache {
constructor (options) {
if (typeof options === 'number')
options = { max: options }
/* istanbul ignore next - This is a little bit ridiculous, tbh.
* The maximum array length is 2^32-1 or thereabouts on most JS impls.
* And well before that point, you're caching the entire world, I mean,
* that's ~32GB of just integers for the next/prev links, plus whatever
* else to hold that many keys and values. Just filling the memory with
* zeroes at init time is brutal when you get that big.
* But why not be complete?
* Maybe in the future, these limits will have expanded. */
const getUintArray = max => !isPosInt(max) ? null
: max <= Math.pow(2, 8) ? Uint8Array
: max <= Math.pow(2, 16) ? Uint16Array
: max <= Math.pow(2, 32) ? Uint32Array
: max <= Number.MAX_SAFE_INTEGER ? ZeroArray
: null
if (!options)
options = {}
if (options.max && (typeof options.max !== 'number' || options.max < 0))
throw new TypeError('max must be a non-negative number')
// Kind of weird to have a default max of Infinity, but oh well.
const max = this[MAX] = options.max || Infinity
const lc = options.length || naiveLength
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
this[ALLOW_STALE] = options.stale || false
if (options.maxAge && typeof options.maxAge !== 'number')
throw new TypeError('maxAge must be a number')
this[MAX_AGE] = options.maxAge || 0
this[DISPOSE] = options.dispose
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
this.reset()
class ZeroArray extends Array {
constructor (size) {
super(size)
this.fill(0)
}
}
// resize the cache when the max changes.
set max (mL) {
if (typeof mL !== 'number' || mL < 0)
throw new TypeError('max must be a non-negative number')
this[MAX] = mL || Infinity
trim(this)
class Stack {
constructor (max) {
const UintArray = getUintArray(max)
this.heap = new UintArray(max)
this.length = 0
}
get max () {
return this[MAX]
push (n) {
this.heap[this.length++] = n
}
set allowStale (allowStale) {
this[ALLOW_STALE] = !!allowStale
pop () {
return this.heap[--this.length]
}
get allowStale () {
return this[ALLOW_STALE]
}
}
set maxAge (mA) {
if (typeof mA !== 'number')
throw new TypeError('maxAge must be a non-negative number')
class LRUCache {
constructor (options = {}) {
const {
max,
ttl,
updateAgeOnGet,
allowStale,
dispose,
noDisposeOnSet,
maxSize,
sizeCalculation,
} = options
this[MAX_AGE] = mA
trim(this)
}
get maxAge () {
return this[MAX_AGE]
}
// deprecated options, don't trigger a warning for getting them if
// the thing being passed in is another LRUCache we're copying.
const {
length,
maxAge,
stale,
} = options instanceof LRUCache ? {} : options
// resize the cache when the lengthCalculator changes.
set lengthCalculator (lC) {
if (typeof lC !== 'function')
lC = naiveLength
if (!isPosInt(max)) {
throw new TypeError('max option must be an integer')
}
if (lC !== this[LENGTH_CALCULATOR]) {
this[LENGTH_CALCULATOR] = lC
this[LENGTH] = 0
this[LRU_LIST].forEach(hit => {
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
this[LENGTH] += hit.length
})
const UintArray = getUintArray(max)
if (!UintArray) {
throw new Error('invalid max value: ' + max)
}
trim(this)
}
get lengthCalculator () { return this[LENGTH_CALCULATOR] }
get length () { return this[LENGTH] }
get itemCount () { return this[LRU_LIST].length }
this.max = max
this.maxSize = maxSize || 0
this.sizeCalculation = sizeCalculation || length
if (this.sizeCalculation) {
if (!this.maxSize) {
throw new TypeError('cannot set sizeCalculation without setting maxSize')
}
if (typeof this.sizeCalculation !== 'function') {
throw new TypeError('sizeCalculating set to non-function')
}
}
this.keyMap = new Map()
this.keyList = new Array(max).fill(null)
this.valList = new Array(max).fill(null)
this.next = new UintArray(max)
this.prev = new UintArray(max)
this.head = 0
this.tail = 0
this.free = new Stack(max)
this.size = 0
rforEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].tail; walker !== null;) {
const prev = walker.prev
forEachStep(this, fn, walker, thisp)
walker = prev
if (typeof dispose === 'function') {
this.dispose = dispose
}
this.noDisposeOnSet = !!noDisposeOnSet
if (this.maxSize) {
if (!isPosInt(this.maxSize)) {
throw new TypeError('maxSize must be a positive integer if specified')
}
this.initializeSizeTracking()
}
this.allowStale = !!allowStale || !!stale
this.updateAgeOnGet = !!updateAgeOnGet
this.ttl = ttl || maxAge || 0
if (this.ttl) {
if (!isPosInt(this.ttl)) {
throw new TypeError('ttl must be a positive integer if specified')
}
this.initializeTTLTracking()
}
if (stale) {
deprecatedOption('stale', 'please use options.allowStale instead')
}
if (maxAge) {
deprecatedOption('maxAge', 'please use options.ttl instead')
}
if (length) {
deprecatedOption('length', 'please use options.sizeCalculation instead')
}
}
forEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].head; walker !== null;) {
const next = walker.next
forEachStep(this, fn, walker, thisp)
walker = next
initializeTTLTracking () {
this.ttls = new ZeroArray(this.max)
this.starts = new ZeroArray(this.max)
this.setItemTTL = (index, ttl) => {
this.starts[index] = ttl !== 0 ? perf.now() : 0
this.ttls[index] = ttl
}
this.updateItemAge = (index) => {
this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
}
this.isStale = (index) => {
return this.ttls[index] !== 0 && this.starts[index] !== 0 &&
(perf.now() - this.starts[index] > this.ttls[index])
}
}
updateItemAge (index) {}
setItemTTL (index, ttl) {}
isStale (index) { return false }
keys () {
return this[LRU_LIST].toArray().map(k => k.key)
initializeSizeTracking () {
this.calculatedSize = 0
this.sizes = new ZeroArray(this.max)
this.removeItemSize = index => this.calculatedSize -= this.sizes[index]
this.addItemSize = (index, v, k, size, sizeCalculation) => {
const s = size || (sizeCalculation ? sizeCalculation(v, k) : 0)
this.sizes[index] = isPosInt(s) ? s : 0
const maxSize = this.maxSize - this.sizes[index]
while (this.calculatedSize > maxSize) {
this.evict()
}
this.calculatedSize += this.sizes[index]
}
this.delete = k => {
if (this.size !== 0) {
const index = this.keyMap.get(k)
if (index !== undefined) {
this.calculatedSize -= this.sizes[index]
}
}
return LRUCache.prototype.delete.call(this, k)
}
}
removeItemSize (index) {}
addItemSize (index, v, k, size, sizeCalculation) {}
values () {
return this[LRU_LIST].toArray().map(k => k.value)
*indexes () {
if (this.size) {
for (let i = this.tail; true; i = this.prev[i]) {
if (!this.isStale(i)) {
yield i
}
if (i === this.head) {
break
}
}
}
}
reset () {
if (this[DISPOSE] &&
this[LRU_LIST] &&
this[LRU_LIST].length) {
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
*entries () {
for (const i of this.indexes()) {
yield [this.keyList[i], this.valList[i]]
}
}
this[CACHE] = new Map() // hash of items by key
this[LRU_LIST] = new Yallist() // list of items in order of use recency
this[LENGTH] = 0 // length of items in the list
*keys () {
for (const i of this.indexes()) {
yield this.keyList[i]
}
}
dump () {
return this[LRU_LIST].map(hit =>
isStale(this, hit) ? false : {
k: hit.key,
v: hit.value,
e: hit.now + (hit.maxAge || 0)
}).toArray().filter(h => h)
*values () {
for (const i of this.indexes()) {
yield this.valList[i]
}
}
dumpLru () {
return this[LRU_LIST]
[Symbol.iterator] () {
return this.entries()
}
set (key, value, maxAge) {
maxAge = maxAge || this[MAX_AGE]
find (fn, getOptions = {}) {
for (const i of this.indexes()) {
if (fn(this.valList[i], this.keyList[i], this)) {
return this.get(this.keyList[i], getOptions)
}
}
}
if (maxAge && typeof maxAge !== 'number')
throw new TypeError('maxAge must be a number')
forEach (fn, thisp = this) {
for (const i of this.indexes()) {
fn.call(thisp, this.valList[i], this.keyList[i], this)
}
}
const now = maxAge ? Date.now() : 0
const len = this[LENGTH_CALCULATOR](value, key)
if (this[CACHE].has(key)) {
if (len > this[MAX]) {
del(this, this[CACHE].get(key))
return false
purgeStale () {
let deleted = false
if (this.size) {
for (let i = this.head; true; i = this.next[i]) {
const b = i === this.tail
if (this.isStale(i)) {
this.delete(this.keyList[i])
deleted = true
}
if (b) {
break
}
}
}
return deleted
}
const node = this[CACHE].get(key)
const item = node.value
// dispose of the old one before overwriting
// split out into 2 ifs for better coverage tracking
if (this[DISPOSE]) {
if (!this[NO_DISPOSE_ON_SET])
this[DISPOSE](key, item.value)
dump () {
const arr = []
for (const i of this.indexes()) {
const key = this.keyList[i]
const value = this.valList[i]
const entry = { value }
if (this.ttls) {
entry.ttl = this.ttls[i]
}
if (this.sizes) {
entry.size = this.sizes[i]
}
arr.unshift([key, entry])
}
return arr
}
item.now = now
item.maxAge = maxAge
item.value = value
this[LENGTH] += len - item.length
item.length = len
this.get(key)
trim(this)
return true
load (arr) {
this.clear()
for (const [key, entry] of arr) {
this.set(key, entry.value, entry)
}
}
const hit = new Entry(key, value, len, now, maxAge)
dispose (v, k) {}
// oversized objects fall out of cache automatically.
if (hit.length > this[MAX]) {
if (this[DISPOSE])
this[DISPOSE](key, value)
return false
set (k, v, {
ttl = this.ttl,
noDisposeOnSet = this.noDisposeOnSet,
size = 0,
sizeCalculation = this.sizeCalculation,
} = {}) {
let index = this.size === 0 ? undefined : this.keyMap.get(k)
if (index === undefined) {
// addition
index = this.newIndex()
this.keyList[index] = k
this.valList[index] = v
this.keyMap.set(k, index)
this.next[this.tail] = index
this.prev[index] = this.tail
this.tail = index
this.size ++
this.addItemSize(index, v, k, size, sizeCalculation)
} else {
// update
const oldVal = this.valList[index]
if (v !== oldVal) {
if (!noDisposeOnSet) {
this.dispose(oldVal, k)
}
this.removeItemSize(index)
this.valList[index] = v
this.addItemSize(index, v, k, size, sizeCalculation)
}
this.moveToTail(index)
}
this[LENGTH] += hit.length
this[LRU_LIST].unshift(hit)
this[CACHE].set(key, this[LRU_LIST].head)
trim(this)
return true
if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
this.initializeTTLTracking()
}
this.setItemTTL(index, ttl)
}
has (key) {
if (!this[CACHE].has(key)) return false
const hit = this[CACHE].get(key).value
return !isStale(this, hit)
newIndex () {
if (this.size === 0) {
return this.tail
}
if (this.size === this.max) {
return this.evict()
}
if (this.free.length !== 0) {
return this.free.pop()
}
// initial fill, just keep writing down the list
return this.tail + 1
}
get (key) {
return get(this, key, true)
evict () {
const head = this.head
const k = this.keyList[head]
this.dispose(this.valList[head], k)
this.removeItemSize(head)
this.head = this.next[head]
this.keyMap.delete(k)
this.size --
return head
}
peek (key) {
return get(this, key, false)
has (k) {
return this.keyMap.has(k) && !this.isStale(this.keyMap.get(k))
}
pop () {
const node = this[LRU_LIST].tail
if (!node)
return null
del(this, node)
return node.value
// like get(), but without any LRU updating or TTL expiration
peek (k, { allowStale = this.allowStale } = {}) {
const index = this.keyMap.get(k)
if (index !== undefined && (allowStale || !this.isStale(index))) {
return this.valList[index]
}
}
del (key) {
del(this, this[CACHE].get(key))
}
load (arr) {
// reset the cache
this.reset()
const now = Date.now()
// A previous serialized cache has the most recent items first
for (let l = arr.length - 1; l >= 0; l--) {
const hit = arr[l]
const expiresAt = hit.e || 0
if (expiresAt === 0)
// the item was created without expiration in a non aged cache
this.set(hit.k, hit.v)
else {
const maxAge = expiresAt - now
// dont add already expired items
if (maxAge > 0) {
this.set(hit.k, hit.v, maxAge)
get (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}) {
const index = this.keyMap.get(k)
if (index !== undefined) {
if (this.isStale(index)) {
const value = allowStale ? this.valList[index] : undefined
this.delete(k)
return value
} else {
this.moveToTail(index)
if (updateAgeOnGet) {
this.updateItemAge(index)
}
return this.valList[index]
}

@@ -254,82 +386,82 @@ }

prune () {
this[CACHE].forEach((value, key) => get(this, key, false))
connect (p, n) {
this.prev[n] = p
this.next[p] = n
}
}
const get = (self, key, doUse) => {
const node = self[CACHE].get(key)
if (node) {
const hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
return undefined
} else {
if (doUse) {
if (self[UPDATE_AGE_ON_GET])
node.value.now = Date.now()
self[LRU_LIST].unshiftNode(node)
moveToTail (index) {
// if tail already, nothing to do
// if head, move head to next[index]
// else
// move next[prev[index]] to next[index] (head has no prev)
// move prev[next[index]] to prev[index]
// prev[index] = tail
// next[tail] = index
// tail = index
if (index !== this.tail) {
if (index === this.head) {
this.head = this.next[index]
} else {
this.connect(this.prev[index], this.next[index])
}
this.connect(this.tail, index)
this.tail = index
}
return hit.value
}
}
const isStale = (self, hit) => {
if (!hit || (!hit.maxAge && !self[MAX_AGE]))
return false
const diff = Date.now() - hit.now
return hit.maxAge ? diff > hit.maxAge
: self[MAX_AGE] && (diff > self[MAX_AGE])
}
const trim = self => {
if (self[LENGTH] > self[MAX]) {
for (let walker = self[LRU_LIST].tail;
self[LENGTH] > self[MAX] && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
const prev = walker.prev
del(self, walker)
walker = prev
delete (k) {
if (this.size !== 0) {
const index = this.keyMap.get(k)
if (index !== undefined) {
this.dispose(this.valList[index], k)
this.removeItemSize(index)
if (this.size === 1) {
this.clear()
} else {
this.keyMap.delete(k)
this.keyList[index] = null
this.valList[index] = null
if (index === this.tail) {
this.tail = this.prev[index]
} else if (index === this.head) {
this.head = this.next[index]
} else {
this.next[this.prev[index]] = this.next[index]
this.prev[this.next[index]] = this.prev[index]
}
this.size --
}
this.free.push(index)
}
}
}
}
const del = (self, node) => {
if (node) {
const hit = node.value
if (self[DISPOSE])
self[DISPOSE](hit.key, hit.value)
self[LENGTH] -= hit.length
self[CACHE].delete(hit.key)
self[LRU_LIST].removeNode(node)
clear () {
this.keyMap.clear()
this.valList.fill(null)
this.keyList.fill(null)
if (this.ttls) {
this.ttls.fill(0)
this.starts.fill(0)
}
if (this.sizes) {
this.sizes.fill(0)
}
this.head = 0
this.tail = 0
this.free.length = 0
this.calculatedSize = 0
this.size = 0
}
}
class Entry {
constructor (key, value, length, now, maxAge) {
this.key = key
this.value = value
this.length = length
this.now = now
this.maxAge = maxAge || 0
reset () {
deprecatedMethod('reset', 'Please use cache.clear() instead.')
return this.clear()
}
}
const forEachStep = (self, fn, node, thisp) => {
let hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
hit = undefined
get length () {
deprecatedProperty('length', 'Please use cache.size instead.')
return this.size
}
if (hit)
fn.call(thisp, hit.value, hit.key, self)
}
module.exports = LRUCache
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
"version": "6.0.0",
"version": "7.0.0",
"author": "Isaac Z. Schlueter <i@izs.me>",

@@ -22,8 +22,5 @@ "keywords": [

"benchmark": "^2.1.4",
"tap": "^14.10.7"
"tap": "^15.1.6"
},
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"files": [

@@ -33,4 +30,7 @@ "index.js"

"engines": {
"node": ">=10"
"node": ">=12"
},
"tap": {
"coverage-map": "map.js"
}
}

@@ -1,24 +0,91 @@

# lru cache
# lru-cache
A cache object that deletes the least-recently-used items.
[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache)
Specify a max number of the most recently used items that you want to keep,
and this cache will keep that many of the most recently accessed items.
## Installation:
This is not primarily a TTL cache, and does not make strong TTL guarantees.
There is no preemptive pruning of expired items, but you _may_ set a TTL
on the cache or on a single `set`. If you do so, it will treat expired
items as missing, and delete them when fetched.
```javascript
As of version 7, this is one of the most performant LRU implementations
available in JavaScript, and supports a wide diversity of use cases.
However, note that using some of the features will necessarily impact
performance, by causing the cache to have to do more work. See the
"Performance" section below.
## Installation
```js
npm install lru-cache --save
```
## Usage:
## Usage
```javascript
var LRU = require("lru-cache")
, options = { max: 500
, length: function (n, key) { return n * 2 + key.length }
, dispose: function (key, n) { n.close() }
, maxAge: 1000 * 60 * 60 }
, cache = new LRU(options)
, otherCache = new LRU(50) // sets just the max size
```js
const LRU = require('lru-cache')
// only 'max' is required, the others are optional, but MAY be
// required if certain other fields are set.
const options = {
// the number of most recently used items to keep.
// note that we may store fewer items than this if maxSize is hit.
max: 500,
// if you wish to track item size, you must provide a maxSize
// note that we still will only keep up to max *actual items*,
// so size tracking may cause fewer than max items to be stored.
// At the extreme, a single item of maxSize size will cause everything
// else in the cache to be dropped when it is added. Use with caution!
// Note also that size tracking can negatively impact performance,
// though for most cases, only minimally.
maxSize: 5000,
// function to calculate size of items. useful if storing strings or
// buffers or other items where memory size depends on the object itself.
// also note that oversized items do NOT immediately get dropped from
// the cache, though they will cause faster turnover in the storage.
sizeCalculation: (value, key) => {
// return an positive integer which is the size of the item,
// if a positive integer is not returned, will use 0 as the size.
return 1
},
// function to call when the item is removed from the cache
// Note that using this can negatively impact performance.
dispose: (value, key) => {
freeFromMemoryOrWhatever(value)
},
// max time to live for items before they are considered stale
// note that stale items are NOT preemptively removed, and MAY
// live in the cache, contributing to its LRU max, long after they
// have expired.
// Also, as this cache is optimized for LRU/MRU operations, some of
// the staleness/TTL checks will reduce performance, as they will incur
// overhead by deleting items.
// Must be a positive integer in ms, defaults to 0, which means "no TTL"
ttl: 1000 * 60 * 5,
// return stale items from cache.get() before disposing of them
// boolean, default false
allowStale: false,
// update the age of items on cache.get(), renewing their TTL
// boolean, default false
updateAgeOnGet: false,
// update the age of items on cache.has(), renewing their TTL
// boolean, default false
updateAgeOnHas: false,
// update the "recently-used"-ness of items on cache.has()
// boolean, default false
updateRecencyOnHas: false,
}
const cache = new LRU(options)
cache.set("key", "value")

@@ -39,3 +106,3 @@ cache.get("key") // "value"

cache.reset() // empty the cache
cache.clear() // empty the cache
```

@@ -45,124 +112,314 @@

If you try to put an oversized thing in it, then it'll fall out right
away.
## Options
* `max` The maximum size of the cache, checked by applying the length
function to all values in the cache. Not setting this is kind of
silly, since that's the whole purpose of this lib, but it defaults
to `Infinity`. Setting it to a non-number or negative number will
throw a `TypeError`. Setting it to 0 makes it be `Infinity`.
* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
as they age, but if you try to get an item that is too old, it'll
drop it and return undefined instead of giving it to you.
Setting this to a negative value will make everything seem old!
Setting it to a non-number will throw a `TypeError`.
* `length` Function that is used to calculate the length of stored
items. If you're storing strings or buffers, then you probably want
to do something like `function(n, key){return n.length}`. The default is
`function(){return 1}`, which is fine if you want to store `max`
like-sized things. The item is passed as the first argument, and
the key is passed as the second argumnet.
* `max` - The maximum number (or size) of items that remain in the cache
(assuming no TTL pruning or explicit deletions). Note that fewer items
may be stored if size calculation is used, and `maxSize` is exceeded.
This must be a positive finite intger.
* `maxSize` - Set to a positive integer to track the sizes of items added
to the cache, and automatically evict items in order to stay below this
size. Note that this may result in fewer than `max` items being stored.
* `sizeCalculation` - Function used to calculate the size of stored
items. If you're storing strings or buffers, then you probably want to
do something like `n => n.length`. The item is passed as the first
argument, and the key is passed as the second argumnet.
This may be overridden by passing an options object to `cache.set()`.
Requires `maxSize` to be set.
Deprecated alias: `length`
* `dispose` Function that is called on items when they are dropped
from the cache. This can be handy if you want to close file
descriptors or do other cleanup tasks when items are no longer
accessible. Called with `key, value`. It's called *before*
actually removing the item from the internal cache, so if you want
to immediately put it back in, you'll have to do that in a
`nextTick` or `setTimeout` callback or it won't do anything.
* `stale` By default, if you set a `maxAge`, it'll only actually pull
stale items out of the cache when you `get(key)`. (That is, it's
not pre-emptively doing a `setTimeout` or anything.) If you set
`stale:true`, it'll return the stale value before deleting it. If
you don't set this, then it'll return `undefined` when you try to
get a stale entry, as if it had already been deleted.
* `noDisposeOnSet` By default, if you set a `dispose()` method, then
it'll be called whenever a `set()` operation overwrites an existing
key. If you set this option, `dispose()` will only be called when a
key falls out of the cache, not when it is overwritten.
* `updateAgeOnGet` When using time-expiring entries with `maxAge`,
setting this to `true` will make each item's effective time update
to the current time whenever it is retrieved from cache, causing it
to not expire. (It can still fall out of cache based on recency of
use, of course.)
stored in the cache.
It is called *after* the item has been fully removed from the cache, so
if you want to put it right back in, that is safe to do.
Unlike several other options, this may _not_ be overridden by passing
an option to `set()`, for performance reasons. If disposal functions
may vary between cache entries, then the entire list must be scanned
on every cache swap, even if no disposal function is in use.
Optional, must be a function.
* `noDisposeOnSet` Set to `true` to suppress calling the `dispose()`
function if the entry key is still accessible within the cache.
This may be overridden by passing an options object to `cache.set()`.
Boolean, default `false`. Only relevant if `dispose` option is set.
* `ttl` - max time to live for items before they are considered stale.
Note that stale items are NOT preemptively removed, and MAY live in the
cache, contributing to its LRU max, long after they have expired.
Also, as this cache is optimized for LRU/MRU operations, some of
the staleness/TTL checks will reduce performance, as they will incur
overhead by deleting from Map objects rather than simply throwing old
Map objects away.
This is not primarily a TTL cache, and does not make strong TTL
guarantees. There is no pre-emptive pruning of expired items, but you
_may_ set a TTL on the cache, and it will treat expired items as missing
when they are fetched, and delete them.
Optional, but must be a positive integer in ms if specified.
This may be overridden by passing an options object to `cache.set()`.
Deprecated alias: `maxAge`
* `allowStale` - By default, if you set `ttl`, it'll only delete stale
items from the cache when you `get(key)`. That is, it's not
pre-emptively pruning items.
If you set `allowStale:true`, it'll return the stale value as well as
deleting it. If you don't set this, then it'll return `undefined` when
you try to get a stale entry.
Note that when a stale entry is fetched, _even if it is returned due to
`allowStale` being set_, it is removed from the cache immediately. You
can immediately put it back in the cache if you wish, thus resetting the
TTL.
This may be overridden by passing an options object to `cache.get()`.
The `cache.has()` method will always return `false` for stale items.
Boolean, default false, only relevant if `ttl` is set.
Deprecated alias: `stale`
* `updateAgeOnGet` - When using time-expiring entries with `ttl`, setting
this to `true` will make each item's age reset to 0 whenever it is
retrieved from cache with `get()`, causing it to not expire. (It can
still fall out of cache based on recency of use, of course.)
This may be overridden by passing an options object to `cache.get()`.
Boolean, default false, only relevant if `ttl` is set.
## API
* `set(key, value, maxAge)`
* `get(key) => value`
* `new LRUCache(options)`
Both of these will update the "recently used"-ness of the key.
They do what you think. `maxAge` is optional and overrides the
cache `maxAge` option if provided.
Create a new LRUCache. All options are documented above, and are on
the cache as public members.
If the key is not found, `get()` will return `undefined`.
* `cache.max`, `cache.ttl`, `cache.allowStale`, etc.
The key and val can be any value.
All option names are exposed as public members on the cache object.
* `peek(key)`
These are intended for read access only. Changing them during program
operation can cause undefined behavior.
Returns the key value (or `undefined` if not found) without
updating the "recently used"-ness of the key.
* `cache.size`
(If you find yourself using this a lot, you *might* be using the
wrong sort of data structure, but there are some use cases where
it's handy.)
The total number of items held in the cache at the current moment.
* `del(key)`
* `cache.calculatedSize`
Deletes a key out of the cache.
The total size of items in cache when using size tracking.
* `reset()`
* `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet }])`
Clear the cache entirely, throwing away all values.
Add a value to the cache.
Optional options object may contain `ttl` and `sizeCalculation` as
described above, which default to the settings on the cache object.
Options object my also include `size`, which will prevent calling the
`sizeCalculation` function and just use the specified number if it is a
positive integer, and `noDisposeOnSet` which will prevent calling a
`dispose` function in the case of overwrites.
Will update the recency of the entry.
* `get(key, { updateAgeOnGet, allowStale } = {}) => value`
Return a value from the cache.
Will update the recency of the cache entry found.
If the key is not found, `get()` will return `undefined`. This can be
confusing when setting values specifically to `undefined`, as in
`cache.set(key, undefined)`. Use `cache.has()` to determine whether a
key is present in the cache at all.
* `peek(key, { allowStale } = {}) => value`
Like `get()` but doesn't update recency or delete stale items.
Returns `undefined` if the item is stale, unless `allowStale` is set
either on the cache or in the options object.
* `has(key)`
Check if a key is in the cache, without updating the recent-ness
or deleting it for being stale.
Check if a key is in the cache, without updating the recency or age.
* `forEach(function(value,key,cache), [thisp])`
Will return `false` if the item is stale, even though it is technically
in the cache.
Just like `Array.prototype.forEach`. Iterates over all the keys
in the cache, in order of recent-ness. (Ie, more recently used
items are iterated over first.)
* `delete(key)`
* `rforEach(function(value,key,cache), [thisp])`
Deletes a key out of the cache.
The same as `cache.forEach(...)` but items are iterated over in
reverse order. (ie, less recently used items are iterated over
first.)
* `clear()`
Clear the cache entirely, throwing away all values.
Deprecated alias: `reset()`
* `keys()`
Return an array of the keys in the cache.
Return a generator yielding the keys in the cache.
* `values()`
Return an array of the values in the cache.
Return a generator yielding the values in the cache.
* `length`
* `entries()`
Return total length of objects in cache taking into account
`length` options function.
Return a generator yielding `[key, value]` pairs.
* `itemCount`
* `find(fn, [getOptions])`
Return total quantity of objects currently in cache. Note, that
`stale` (see options) items are returned as part of this item
count.
Find a value for which the supplied `fn` method returns a truthy value,
similar to `Array.find()`.
`fn` is called as `fn(value, key, cache)`.
The optional `getOptions` are applied to the resulting `get()` of the
item found.
* `dump()`
Return an array of the cache entries ready for serialization and usage
with 'destinationCache.load(arr)`.
Return an array of `[key, entry]` objects which can be passed to
`cache.load()`
* `load(cacheEntriesArray)`
Note: this returns an actual array, not a generator, so it can be more
easily passed around.
Loads another cache entries array, obtained with `sourceCache.dump()`,
into the cache. The destination cache is reset before loading new entries
* `load(entries)`
* `prune()`
Reset the cache and load in the items in `entries` in the order listed.
Note that the shape of the resulting cache may be different if the same
options are not used in both caches.
Manually iterates over the entire cache proactively pruning old entries
* `purgeStale()`
Delete any stale entries. Returns `true` if anything was removed,
`false` otherwise.
### Internal Methods and Properties
Do not use or rely on these. They will change or be removed without
notice. They will cause undefined behavior if used inappropriately.
This documentation is here so that it is especially clear that this not
"undocumented" because someone forgot; it _is_ documented, and the
documentation is telling you not to do it.
Do not report bugs that stem from using these properties. They will be
ignored.
* `setKeyIndex()` Assign an index to a given key.
* `getKeyIndex()` Get the index for a given key.
* `deleteKeyIndex()` Remove the index for a given key.
* `getDisposeData()` Get the data to pass to a `dispose()` call.
* `callDispose()` Actually call the `dispose()` function.
* `onSet()` Called to assign data when `set()` is called.
* `evict()` Delete the least recently used item.
* `onDelete()` Perform actions required for deleting an entry.
* `isStale()` Check if an item is stale, by index.
* `list` The internal linked list of indexes defining recency.
## Performance
As of January 2022, version 7 of this library is one of the most performant
LRU cache implementations in JavaScript.
Note that benchmarks can be extremely difficult to get right. In
particular, the performance of set/get/delete operations on objects will
vary _wildly_ depending on the type of key used. V8 is highly optimized
for objects with keys that are short strings, especially integer numeric
strings. Thus any benchmark which tests _solely_ using numbers as keys
will tend to find that an object-based approach performs the best.
Note that coercing _anything_ to strings to use as object keys is unsafe,
unless you can be 100% certain that no other type of value will be used.
For example:
```js
const myCache = {}
const set = (k, v) => myCache[k] = v
const get = (k) => myCache[k]
set({}, 'please hang onto this for me')
set('[object Object]', 'oopsie')
```
Also beware of "Just So" stories regarding performance. Garbage collection
of large (especially: deep) object graphs can be incredibly costly, with
several "tipping points" where it increases exponentially. As a result,
putting that off until later can make it much worse, and less predictable.
If a library performs well, but only in a scenario where the object graph is
kept shallow, then that won't help you if you are using large objects as
keys.
In general, when attempting to use a library to improve performance (such
as a cache like this one), it's best to choose an option that will perform
well in the sorts of scenarios where you'll actually use it.
This library is optimized for repeated gets and minimizing eviction time,
since that is the expected need of a LRU. Set operations are somewhat
slower on average than a few other options, in part because of that
optimization. It is assumed that you'll be caching some costly operation,
ideally as rarely as possible, so optimizing set over get would be unwise.
If performance matters to you:
1. If it's at all possible to use small integer values as keys, and you can
guarantee that no other types of values will be used as keys, then do that,
and use `mode: 'object'`.
2. Failing that, if at all possible, use short non-numeric strings (ie,
less than 256 characters) as your keys, with `mode: 'object'`.
3. If you know that the types of your keys will be long strings, strings
that look like floats, `null`, objects, or some mix of any random thing,
then use `mode: 'map'` to skip the detection logic.
4. Do not use a `dispose` function, size tracking, or ttl behavior, unless
absolutely needed. These features are convenient, and necessary in some
use cases, and every attempt has been made to make the performance
impact minimal, but it isn't nothing.
## Breaking Changes in Version 7
This library changed to a different algorithm and internal data structure
in version 7, yielding significantly better performance, albeit with
some subtle changes as a result.
If you were relying on the internals of LRUCache in version 6 or before, it
probably will not work in version 7 and above.
### Specific API Changes
For the most part, the feature set has been maintained as much as possible.
However, some other cleanup and refactoring changes were made in v7 as
well.
* The `set()`, `get()`, and `has()` functions take options objects
instead of positional booleans/integers for optional parameters.
* `size` can be set explicitly on `set()`.
* `updateAgeOnHas` and `updateRecencyOnHas` added.
* `cache.length` was renamed to the more fitting `cache.size`.
* Option name deprecations:
* `stale` -> `allowStale`
* `length` -> `sizeCalculation`
* `maxAge` -> `ttl`
* The objects used by `cache.load()` and `cache.dump()` are incompatible
with previous versions.
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc