Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

lru-cache

Package Overview
Dependencies
Maintainers
1
Versions
151
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

lru-cache - npm Package Compare versions

Comparing version
7.9.1
to
7.10.0
+561
index.d.ts
// Type definitions for lru-cache 7.10.0
// Project: https://github.com/isaacs/node-lru-cache
// Based initially on @types/lru-cache
// https://github.com/DefinitelyTyped/DefinitelyTyped
// used under the terms of the MIT License, shown below.
//
// DefinitelyTyped license:
// ------
// MIT License
//
// Copyright (c) Microsoft Corporation.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
// ------
//
// Changes by Isaac Z. Schlueter released under the terms found in the
// LICENSE file within this project.
/// <reference lib="DOM" />
//tslint:disable:member-access
declare class LRUCache<K, V> implements Iterable<[K, V]> {
constructor(options: LRUCache.Options<K, V>)
/**
* Number of items in the cache.
* Alias for `cache.size`
*
* @deprecated since 7.0 use `cache.size` instead
*/
public readonly length: number
public readonly max: number
public readonly maxSize: number
public readonly sizeCalculation:
| LRUCache.SizeCalculator<K, V>
| undefined
public readonly dispose: LRUCache.Disposer<K, V>
/**
* @since 7.4.0
*/
public readonly disposeAfter: LRUCache.Disposer<K, V> | null
public readonly noDisposeOnSet: boolean
public readonly ttl: number
public readonly ttlResolution: number
public readonly ttlAutopurge: boolean
public readonly allowStale: boolean
public readonly updateAgeOnGet: boolean
/**
* @since 7.6.0
*/
public readonly fetchMethod: LRUCache.Fetcher<K, V> | null
/**
* The total number of items held in the cache at the current moment.
*/
public readonly size: number
/**
* The total size of items in cache when using size tracking.
*/
public readonly calculatedSize: number
/**
* Add a value to the cache.
*/
public set(
key: K,
value: V,
options?: LRUCache.SetOptions<K, V>
): this
/**
* Return a value from the cache.
* Will update the recency of the cache entry found.
* If the key is not found, `get()` will return `undefined`.
* This can be confusing when setting values specifically to `undefined`,
* as in `cache.set(key, undefined)`. Use `cache.has()` to determine
* whether a key is present in the cache at all.
*/
// tslint:disable-next-line:no-unnecessary-generics
public get<T = V>(
key: K,
options?: LRUCache.GetOptions
): T | undefined
/**
* Like `get()` but doesn't update recency or delete stale items.
* Returns `undefined` if the item is stale, unless `allowStale` is set
* either on the cache or in the options object.
*/
// tslint:disable-next-line:no-unnecessary-generics
public peek<T = V>(
key: K,
options?: LRUCache.PeekOptions
): T | undefined
/**
* Check if a key is in the cache, without updating the recency of use.
* Will return false if the item is stale, even though it is technically
* in the cache.
* Will not update item age unless `updateAgeOnHas` is set in the options
* or constructor.
*/
public has(key: K, options?: LRUCache.HasOptions): boolean
/**
* Deletes a key out of the cache.
* Returns true if the key was deleted, false otherwise.
*/
public delete(key: K): boolean
/**
* Clear the cache entirely, throwing away all values.
*/
public clear(): void
/**
* Delete any stale entries. Returns true if anything was removed, false
* otherwise.
*/
public purgeStale(): boolean
/**
* Find a value for which the supplied fn method returns a truthy value,
* similar to Array.find(). fn is called as fn(value, key, cache).
*/
// tslint:disable-next-line:no-unnecessary-generics
public find<T = V>(
callbackFn: (
value: V,
key: K,
cache: this
) => boolean | undefined | void,
options?: LRUCache.GetOptions
): T
/**
* Call the supplied function on each item in the cache, in order from
* most recently used to least recently used. fn is called as
* fn(value, key, cache). Does not update age or recenty of use.
*/
public forEach<T = this>(
callbackFn: (this: T, value: V, key: K, cache: this) => void,
thisArg?: T
): void
/**
* The same as `cache.forEach(...)` but items are iterated over in reverse
* order. (ie, less recently used items are iterated over first.)
*/
public rforEach<T = this>(
callbackFn: (this: T, value: V, key: K, cache: this) => void,
thisArg?: T
): void
/**
* Return a generator yielding the keys in the cache,
* in order from most recently used to least recently used.
*/
public keys(): Generator<K>
/**
* Inverse order version of `cache.keys()`
* Return a generator yielding the keys in the cache,
* in order from least recently used to most recently used.
*/
public rkeys(): Generator<K>
/**
* Return a generator yielding the values in the cache,
* in order from most recently used to least recently used.
*/
public values(): Generator<V>
/**
* Inverse order version of `cache.values()`
* Return a generator yielding the values in the cache,
* in order from least recently used to most recently used.
*/
public rvalues(): Generator<V>
/**
* Return a generator yielding `[key, value]` pairs,
* in order from most recently used to least recently used.
*/
public entries(): Generator<[K, V]>
/**
* Inverse order version of `cache.entries()`
* Return a generator yielding `[key, value]` pairs,
* in order from least recently used to most recently used.
*/
public rentries(): Generator<[K, V]>
/**
* Iterating over the cache itself yields the same results as
* `cache.entries()`
*/
public [Symbol.iterator](): Iterator<[K, V]>
/**
* Return an array of [key, entry] objects which can be passed to
* cache.load()
*/
public dump(): Array<[K, LRUCache.Entry<V>]>
/**
* Reset the cache and load in the items in entries in the order listed.
* Note that the shape of the resulting cache may be different if the
* same options are not used in both caches.
*/
public load(
cacheEntries: ReadonlyArray<[K, LRUCache.Entry<V>]>
): void
/**
* Evict the least recently used item, returning its value or `undefined`
* if cache is empty.
*/
public pop(): V | undefined
/**
* Deletes a key out of the cache.
*
* @deprecated since 7.0 use delete() instead
*/
public del(key: K): boolean
/**
* Clear the cache entirely, throwing away all values.
*
* @deprecated since 7.0 use clear() instead
*/
public reset(): void
/**
* Manually iterates over the entire cache proactively pruning old entries.
*
* @deprecated since 7.0 use purgeStale() instead
*/
public prune(): boolean
/**
* since: 7.6.0
*/
// tslint:disable-next-line:no-unnecessary-generics
public fetch<ExpectedValue = V>(
key: K,
options?: LRUCache.FetchOptions
): Promise<ExpectedValue | undefined>
/**
* since: 7.6.0
*/
public getRemainingTTL(key: K): number
}
declare namespace LRUCache {
type LRUMilliseconds = number
type DisposeReason = 'evict' | 'set' | 'delete'
type SizeCalculator<K, V> = (value: V, key: K) => number
type Disposer<K, V> = (
value: V,
key: K,
reason: DisposeReason
) => void
type Fetcher<K, V> = (
key: K,
staleValue: V,
options: FetcherOptions<K, V>
) => Promise<V | void | undefined> | V | void | undefined
interface DeprecatedOptions<K, V> {
/**
* alias for ttl
*
* @deprecated since 7.0 use options.ttl instead
*/
maxAge?: number
/**
* alias for sizeCalculation
*
* @deprecated since 7.0 use options.sizeCalculation instead
*/
length?: SizeCalculator
/**
* alias for allowStale
*
* @deprecated since 7.0 use options.allowStale instead
*/
stale?: boolean
}
interface LimitedByCount {
/**
* The number of most recently used items to keep.
* Note that we may store fewer items than this if maxSize is hit.
*/
max: number
}
interface LimitedBySize<K, V> {
/**
* If you wish to track item size, you must provide a maxSize
* note that we still will only keep up to max *actual items*,
* if max is set, so size tracking may cause fewer than max items
* to be stored. At the extreme, a single item of maxSize size
* will cause everything else in the cache to be dropped when it
* is added. Use with caution!
* Note also that size tracking can negatively impact performance,
* though for most cases, only minimally.
*/
maxSize: number
/**
* Function to calculate size of items. Useful if storing strings or
* buffers or other items where memory size depends on the object itself.
* Also note that oversized items do NOT immediately get dropped from
* the cache, though they will cause faster turnover in the storage.
*/
sizeCalculation?: SizeCalculator<K, V>
}
interface LimitedByTTL {
/**
* Max time in milliseconds for items to live in cache before they are
* considered stale. Note that stale items are NOT preemptively removed
* by default, and MAY live in the cache, contributing to its LRU max,
* long after they have expired.
*
* Also, as this cache is optimized for LRU/MRU operations, some of
* the staleness/TTL checks will reduce performance, as they will incur
* overhead by deleting items.
*
* Must be an integer number of ms, defaults to 0, which means "no TTL"
*/
ttl: number
/**
* Boolean flag to tell the cache to not update the TTL when
* setting a new value for an existing key (ie, when updating a value
* rather than inserting a new value). Note that the TTL value is
* _always_ set (if provided) when adding a new entry into the cache.
*
* @default false
* @since 7.4.0
*/
noUpdateTTL?: boolean
/**
* Minimum amount of time in ms in which to check for staleness.
* Defaults to 1, which means that the current time is checked
* at most once per millisecond.
*
* Set to 0 to check the current time every time staleness is tested.
* (This reduces performance, and is theoretically unnecessary.)
*
* Setting this to a higher value will improve performance somewhat
* while using ttl tracking, albeit at the expense of keeping stale
* items around a bit longer than intended.
*
* @default 1
* @since 7.1.0
*/
ttlResolution?: number
/**
* Preemptively remove stale items from the cache.
* Note that this may significantly degrade performance,
* especially if the cache is storing a large number of items.
* It is almost always best to just leave the stale items in
* the cache, and let them fall out as new items are added.
*
* Note that this means that allowStale is a bit pointless,
* as stale items will be deleted almost as soon as they expire.
*
* Use with caution!
*
* @default false
* @since 7.1.0
*/
ttlAutopurge?: boolean
/**
* Return stale items from cache.get() before disposing of them.
* Return stale values from cache.fetch() while performing a call
* to the `fetchMethod` in the background.
*
* @default false
*/
allowStale?: boolean
/**
* Update the age of items on cache.get(), renewing their TTL
*
* @default false
*/
updateAgeOnGet?: boolean
/**
* Update the age of items on cache.has(), renewing their TTL
*
* @default false
*/
updateAgeOnHas?: boolean
}
type SafetyBounds<K, V> =
| LimitedByCount
| LimitedBySize<K, V>
| LimitedByTTL
// options shared by all three of the limiting scenarios
interface SharedOptions<K, V> {
/**
* Function that is called on items when they are dropped from the cache.
* This can be handy if you want to close file descriptors or do other
* cleanup tasks when items are no longer accessible. Called with `key,
* value`. It's called before actually removing the item from the
* internal cache, so it is *NOT* safe to re-add them.
* Use `disposeAfter` if you wish to dispose items after they have been
* full removed, when it is safe to add them back to the cache.
*/
dispose?: Disposer<K, V>
/**
* The same as dispose, but called *after* the entry is completely
* removed and the cache is once again in a clean state. It is safe to
* add an item right back into the cache at this point.
* However, note that it is *very* easy to inadvertently create infinite
* recursion this way.
*
* @since 7.3.0
*/
disposeAfter?: Disposer<K, V>
/**
* Set to true to suppress calling the dispose() function if the entry
* key is still accessible within the cache.
* This may be overridden by passing an options object to cache.set().
*
* @default false
*/
noDisposeOnSet?: boolean
/**
* `fetchMethod` Function that is used to make background asynchronous
* fetches. Called with `fetchMethod(key, staleValue)`. May return a
* Promise.
*
* If `fetchMethod` is not provided, then `cache.fetch(key)` is
* equivalent to `Promise.resolve(cache.get(key))`.
*
* @since 7.6.0
*/
fetchMethod?: LRUCache.Fetcher<K, V>
/**
* Set to true to suppress the deletion of stale data when a
* `fetchMethod` throws an error or returns a rejected promise
*
* @default false
* @since 7.10.0
*/
noDeleteOnFetchRejection?: boolean
}
type Options<K, V> = SharedOptions<K, V> &
DeprecatedOptions<K, V> &
SafetyBounds<K, V>
/**
* options which override the options set in the LRUCache constructor
* when making `cache.set()` calls.
*/
interface SetOptions<K, V> {
/**
* A value for the size of the entry, prevents calls to
* `sizeCalculation` function.
*/
size?: number
sizeCalculation?: SizeCalculator<K, V>
ttl?: number
noDisposeOnSet?: boolean
noUpdateTTL?: boolean
}
/**
* options which override the options set in the LRUCAche constructor
* when making `cache.has()` calls.
*/
interface HasOptions {
updateAgeOnHas?: boolean
}
/**
* options which override the options set in the LRUCache constructor
* when making `cache.get()` calls.
*/
interface GetOptions {
allowStale?: boolean
updateAgeOnGet?: boolean
}
/**
* options which override the options set in the LRUCache constructor
* when making `cache.peek()` calls.
*/
interface PeekOptions {
allowStale?: boolean
}
/**
* options which override the options set in the LRUCache constructor
* when making `cache.fetch()` calls.
* This is the union of GetOptions and SetOptions, plus the
* `noDeleteOnFetchRejection` boolean.
*/
interface FetchOptions<K, V> {
allowStale?: boolean
updateAgeOnGet?: boolean
size?: number
sizeCalculation?: SizeCalculator<K, V>
ttl?: number
noDisposeOnSet?: boolean
noUpdateTTL?: boolean
noDeleteOnFetchRejection?: boolean
}
interface FetcherOptions<K, V> {
signal: AbortSignal
options: FetchOptions<K, V>
}
interface Entry<V> {
value: V
ttl?: number
size?: number
}
}
export = LRUCache
+208
-128

@@ -1,3 +0,7 @@

const perf = typeof performance === 'object' && performance &&
typeof performance.now === 'function' ? performance : Date
const perf =
typeof performance === 'object' &&
performance &&
typeof performance.now === 'function'
? performance
: Date

@@ -10,16 +14,21 @@ const hasAbortController = typeof AbortController === 'function'

// our purposes, and if used properly, behaves the same.
const AC = hasAbortController ? AbortController : Object.assign(
class AbortController {
constructor () { this.signal = new AC.AbortSignal }
abort () {
this.signal.dispatchEvent('abort')
const AC = hasAbortController
? AbortController
: class AbortController {
constructor() {
this.signal = new AS()
}
abort() {
this.signal.dispatchEvent('abort')
}
}
},
{
AbortSignal: class AbortSignal {
constructor () {
const AS = hasAbortController
? AbortSignal
: class AbortSignal {
constructor() {
this.aborted = false
this._listeners = []
}
dispatchEvent (type) {
dispatchEvent(type) {
if (type === 'abort') {

@@ -32,4 +41,4 @@ this.aborted = true

}
onabort () {}
addEventListener (ev, fn) {
onabort() {}
addEventListener(ev, fn) {
if (ev === 'abort') {

@@ -39,3 +48,3 @@ this._listeners.push(fn)

}
removeEventListener (ev, fn) {
removeEventListener(ev, fn) {
if (ev === 'abort') {

@@ -46,4 +55,2 @@ this._listeners = this._listeners.filter(f => f !== fn)

}
}
)

@@ -76,6 +83,6 @@ const warned = new Set()

typeof process === 'object' &&
process &&
typeof process.emitWarning === 'function'
? process.emitWarning(...a)
: console.error(...a)
process &&
typeof process.emitWarning === 'function'
? process.emitWarning(...a)
: console.error(...a)
}

@@ -101,11 +108,17 @@

* Maybe in the future, these limits will have expanded. */
const getUintArray = max => !isPosInt(max) ? null
: max <= Math.pow(2, 8) ? Uint8Array
: max <= Math.pow(2, 16) ? Uint16Array
: max <= Math.pow(2, 32) ? Uint32Array
: max <= Number.MAX_SAFE_INTEGER ? ZeroArray
: null
const getUintArray = max =>
!isPosInt(max)
? null
: max <= Math.pow(2, 8)
? Uint8Array
: max <= Math.pow(2, 16)
? Uint16Array
: max <= Math.pow(2, 32)
? Uint32Array
: max <= Number.MAX_SAFE_INTEGER
? ZeroArray
: null
class ZeroArray extends Array {
constructor (size) {
constructor(size) {
super(size)

@@ -117,3 +130,3 @@ this.fill(0)

class Stack {
constructor (max) {
constructor(max) {
if (max === 0) {

@@ -126,6 +139,6 @@ return []

}
push (n) {
push(n) {
this.heap[this.length++] = n
}
pop () {
pop() {
return this.heap[--this.length]

@@ -136,3 +149,3 @@ }

class LRUCache {
constructor (options = {}) {
constructor(options = {}) {
const {

@@ -153,2 +166,3 @@ max = 0,

fetchMethod,
noDeleteOnFetchRejection,
} = options

@@ -158,7 +172,4 @@

// the thing being passed in is another LRUCache we're copying.
const {
length,
maxAge,
stale,
} = options instanceof LRUCache ? {} : options
const { length, maxAge, stale } =
options instanceof LRUCache ? {} : options

@@ -179,3 +190,5 @@ if (max !== 0 && !isPosInt(max)) {

if (!this.maxSize) {
throw new TypeError('cannot set sizeCalculation without setting maxSize')
throw new TypeError(
'cannot set sizeCalculation without setting maxSize'
)
}

@@ -189,3 +202,5 @@ if (typeof this.sizeCalculation !== 'function') {

if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
throw new TypeError('fetchMethod must be a function if specified')
throw new TypeError(
'fetchMethod must be a function if specified'
)
}

@@ -216,6 +231,9 @@

this.noUpdateTTL = !!noUpdateTTL
this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection
if (this.maxSize !== 0) {
if (!isPosInt(this.maxSize)) {
throw new TypeError('maxSize must be a positive integer if specified')
throw new TypeError(
'maxSize must be a positive integer if specified'
)
}

@@ -228,4 +246,6 @@ this.initializeSizeTracking()

this.updateAgeOnHas = !!updateAgeOnHas
this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0
? ttlResolution : 1
this.ttlResolution =
isPosInt(ttlResolution) || ttlResolution === 0
? ttlResolution
: 1
this.ttlAutopurge = !!ttlAutopurge

@@ -235,3 +255,5 @@ this.ttl = ttl || maxAge || 0

if (!isPosInt(this.ttl)) {
throw new TypeError('ttl must be a positive integer if specified')
throw new TypeError(
'ttl must be a positive integer if specified'
)
}

@@ -243,3 +265,5 @@ this.initializeTTLTracking()

if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
throw new TypeError('At least one of max, maxSize, or ttl is required')
throw new TypeError(
'At least one of max, maxSize, or ttl is required'
)
}

@@ -250,3 +274,4 @@ if (!this.ttlAutopurge && !this.max && !this.maxSize) {

warned.add(code)
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
const msg =
'TTL caching without ttlAutopurge, max, or maxSize can ' +
'result in unbounded memory consumption.'

@@ -268,7 +293,7 @@ emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)

getRemainingTTL (key) {
getRemainingTTL(key) {
return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
}
initializeTTLTracking () {
initializeTTLTracking() {
this.ttls = new ZeroArray(this.max)

@@ -293,3 +318,3 @@ this.starts = new ZeroArray(this.max)

this.updateItemAge = (index) => {
this.updateItemAge = index => {
this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0

@@ -305,3 +330,6 @@ }

cachedNow = n
const t = setTimeout(() => cachedNow = 0, this.ttlResolution)
const t = setTimeout(
() => (cachedNow = 0),
this.ttlResolution
)
/* istanbul ignore else - not available on all platforms */

@@ -315,3 +343,3 @@ if (t.unref) {

this.getRemainingTTL = (key) => {
this.getRemainingTTL = key => {
const index = this.keyMap.get(key)

@@ -321,19 +349,29 @@ if (index === undefined) {

}
return this.ttls[index] === 0 || this.starts[index] === 0 ? Infinity
: ((this.starts[index] + this.ttls[index]) - (cachedNow || getNow()))
return this.ttls[index] === 0 || this.starts[index] === 0
? Infinity
: this.starts[index] +
this.ttls[index] -
(cachedNow || getNow())
}
this.isStale = (index) => {
return this.ttls[index] !== 0 && this.starts[index] !== 0 &&
((cachedNow || getNow()) - this.starts[index] > this.ttls[index])
this.isStale = index => {
return (
this.ttls[index] !== 0 &&
this.starts[index] !== 0 &&
(cachedNow || getNow()) - this.starts[index] >
this.ttls[index]
)
}
}
updateItemAge (index) {}
setItemTTL (index, ttl) {}
isStale (index) { return false }
updateItemAge(index) {}
setItemTTL(index, ttl) {}
isStale(index) {
return false
}
initializeSizeTracking () {
initializeSizeTracking() {
this.calculatedSize = 0
this.sizes = new ZeroArray(this.max)
this.removeItemSize = index => this.calculatedSize -= this.sizes[index]
this.removeItemSize = index =>
(this.calculatedSize -= this.sizes[index])
this.requireSize = (k, v, size, sizeCalculation) => {

@@ -347,6 +385,10 @@ if (!isPosInt(size)) {

if (!isPosInt(size)) {
throw new TypeError('sizeCalculation return invalid (expect positive integer)')
throw new TypeError(
'sizeCalculation return invalid (expect positive integer)'
)
}
} else {
throw new TypeError('invalid size value (must be positive integer)')
throw new TypeError(
'invalid size value (must be positive integer)'
)
}

@@ -365,11 +407,13 @@ }

}
removeItemSize (index) {}
addItemSize (index, v, k, size) {}
requireSize (k, v, size, sizeCalculation) {
removeItemSize(index) {}
addItemSize(index, v, k, size) {}
requireSize(k, v, size, sizeCalculation) {
if (size || sizeCalculation) {
throw new TypeError('cannot set size without setting maxSize on cache')
throw new TypeError(
'cannot set size without setting maxSize on cache'
)
}
}
*indexes ({ allowStale = this.allowStale } = {}) {
*indexes({ allowStale = this.allowStale } = {}) {
if (this.size) {

@@ -392,3 +436,3 @@ for (let i = this.tail; true; ) {

*rindexes ({ allowStale = this.allowStale } = {}) {
*rindexes({ allowStale = this.allowStale } = {}) {
if (this.size) {

@@ -411,7 +455,7 @@ for (let i = this.head; true; ) {

isValidIndex (index) {
isValidIndex(index) {
return this.keyMap.get(this.keyList[index]) === index
}
*entries () {
*entries() {
for (const i of this.indexes()) {

@@ -421,3 +465,3 @@ yield [this.keyList[i], this.valList[i]]

}
*rentries () {
*rentries() {
for (const i of this.rindexes()) {

@@ -428,3 +472,3 @@ yield [this.keyList[i], this.valList[i]]

*keys () {
*keys() {
for (const i of this.indexes()) {

@@ -434,3 +478,3 @@ yield this.keyList[i]

}
*rkeys () {
*rkeys() {
for (const i of this.rindexes()) {

@@ -441,3 +485,3 @@ yield this.keyList[i]

*values () {
*values() {
for (const i of this.indexes()) {

@@ -447,3 +491,3 @@ yield this.valList[i]

}
*rvalues () {
*rvalues() {
for (const i of this.rindexes()) {

@@ -454,7 +498,7 @@ yield this.valList[i]

[Symbol.iterator] () {
[Symbol.iterator]() {
return this.entries()
}
find (fn, getOptions = {}) {
find(fn, getOptions = {}) {
for (const i of this.indexes()) {

@@ -467,3 +511,3 @@ if (fn(this.valList[i], this.keyList[i], this)) {

forEach (fn, thisp = this) {
forEach(fn, thisp = this) {
for (const i of this.indexes()) {

@@ -474,3 +518,3 @@ fn.call(thisp, this.valList[i], this.keyList[i], this)

rforEach (fn, thisp = this) {
rforEach(fn, thisp = this) {
for (const i of this.rindexes()) {

@@ -481,3 +525,3 @@ fn.call(thisp, this.valList[i], this.keyList[i], this)

get prune () {
get prune() {
deprecatedMethod('prune', 'purgeStale')

@@ -487,3 +531,3 @@ return this.purgeStale

purgeStale () {
purgeStale() {
let deleted = false

@@ -499,3 +543,3 @@ for (const i of this.rindexes({ allowStale: true })) {

dump () {
dump() {
const arr = []

@@ -517,3 +561,3 @@ for (const i of this.indexes()) {

load (arr) {
load(arr) {
this.clear()

@@ -525,11 +569,15 @@ for (const [key, entry] of arr) {

dispose (v, k, reason) {}
dispose(v, k, reason) {}
set (k, v, {
ttl = this.ttl,
noDisposeOnSet = this.noDisposeOnSet,
size = 0,
sizeCalculation = this.sizeCalculation,
noUpdateTTL = this.noUpdateTTL,
} = {}) {
set(
k,
v,
{
ttl = this.ttl,
noDisposeOnSet = this.noDisposeOnSet,
size = 0,
sizeCalculation = this.sizeCalculation,
noUpdateTTL = this.noUpdateTTL,
} = {}
) {
size = this.requireSize(k, v, size, sizeCalculation)

@@ -546,3 +594,3 @@ let index = this.size === 0 ? undefined : this.keyMap.get(k)

this.tail = index
this.size ++
this.size++
this.addItemSize(index, v, k, size)

@@ -584,3 +632,3 @@ noUpdateTTL = false

newIndex () {
newIndex() {
if (this.size === 0) {

@@ -599,3 +647,3 @@ return this.tail

pop () {
pop() {
if (this.size) {

@@ -608,3 +656,3 @@ const val = this.valList[this.head]

evict (free) {
evict(free) {
const head = this.head

@@ -630,7 +678,7 @@ const k = this.keyList[head]

this.keyMap.delete(k)
this.size --
this.size--
return head
}
has (k, { updateAgeOnHas = this.updateAgeOnHas } = {}) {
has(k, { updateAgeOnHas = this.updateAgeOnHas } = {}) {
const index = this.keyMap.get(k)

@@ -649,3 +697,3 @@ if (index !== undefined) {

// like get(), but without any LRU updating or TTL expiration
peek (k, { allowStale = this.allowStale } = {}) {
peek(k, { allowStale = this.allowStale } = {}) {
const index = this.keyMap.get(k)

@@ -657,3 +705,3 @@ if (index !== undefined && (allowStale || !this.isStale(index))) {

backgroundFetch (k, index, options) {
backgroundFetch(k, index, options) {
const v = index === undefined ? undefined : this.valList[index]

@@ -676,3 +724,12 @@ if (this.isBackgroundFetch(v)) {

if (this.valList[index] === p) {
this.delete(k)
const del =
!options.noDeleteOnFetchRejection ||
p.__staleWhileFetching === undefined
if (del) {
this.delete(k)
} else {
// still replace the *promise* with the stale value,
// since we are done with the promise at this point.
this.valList[index] = p.__staleWhileFetching
}
}

@@ -697,21 +754,35 @@ if (p.__returned === p) {

isBackgroundFetch (p) {
return p && typeof p === 'object' && typeof p.then === 'function' &&
Object.prototype.hasOwnProperty.call(p, '__staleWhileFetching') &&
isBackgroundFetch(p) {
return (
p &&
typeof p === 'object' &&
typeof p.then === 'function' &&
Object.prototype.hasOwnProperty.call(
p,
'__staleWhileFetching'
) &&
Object.prototype.hasOwnProperty.call(p, '__returned') &&
(p.__returned === p || p.__returned === null)
)
}
// this takes the union of get() and set() opts, because it does both
async fetch (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
ttl = this.ttl,
noDisposeOnSet = this.noDisposeOnSet,
size = 0,
sizeCalculation = this.sizeCalculation,
noUpdateTTL = this.noUpdateTTL,
} = {}) {
async fetch(
k,
{
// get options
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
// set options
ttl = this.ttl,
noDisposeOnSet = this.noDisposeOnSet,
size = 0,
sizeCalculation = this.sizeCalculation,
noUpdateTTL = this.noUpdateTTL,
// fetch exclusive options
noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,
} = {}
) {
if (!this.fetchMethod) {
return this.get(k, {allowStale, updateAgeOnGet})
return this.get(k, { allowStale, updateAgeOnGet })
}

@@ -727,2 +798,3 @@

noUpdateTTL,
noDeleteOnFetchRejection,
}

@@ -739,3 +811,4 @@

return allowStale && v.__staleWhileFetching !== undefined
? v.__staleWhileFetching : (v.__returned = v)
? v.__staleWhileFetching
: (v.__returned = v)
}

@@ -755,10 +828,14 @@

return allowStale && p.__staleWhileFetching !== undefined
? p.__staleWhileFetching : (p.__returned = p)
? p.__staleWhileFetching
: (p.__returned = p)
}
}
get (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}) {
get(
k,
{
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}
) {
const index = this.keyMap.get(k)

@@ -792,3 +869,3 @@ if (index !== undefined) {

connect (p, n) {
connect(p, n) {
this.prev[n] = p

@@ -798,3 +875,3 @@ this.next[p] = n

moveToTail (index) {
moveToTail(index) {
// if tail already, nothing to do

@@ -819,3 +896,3 @@ // if head, move head to next[index]

get del () {
get del() {
deprecatedMethod('del', 'delete')

@@ -825,3 +902,3 @@ return this.delete

delete (k) {
delete(k) {
let deleted = false

@@ -856,3 +933,3 @@ if (this.size !== 0) {

}
this.size --
this.size--
this.free.push(index)

@@ -870,3 +947,3 @@ }

clear () {
clear() {
for (const index of this.rindexes({ allowStale: true })) {

@@ -908,3 +985,3 @@ const v = this.valList[index]

get reset () {
get reset() {
deprecatedMethod('reset', 'clear')

@@ -914,3 +991,3 @@ return this.clear

get length () {
get length() {
deprecatedProperty('length', 'size')

@@ -920,7 +997,10 @@ return this.size

static get AbortController () {
static get AbortController() {
return AC
}
static get AbortSignal() {
return AS
}
}
module.exports = LRUCache
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
"version": "7.9.1",
"version": "7.10.0",
"author": "Isaac Z. Schlueter <i@izs.me>",

@@ -13,8 +13,9 @@ "keywords": [

"build": "",
"size": "size-limit",
"test": "tap",
"snap": "tap",
"size": "size-limit",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
"prepublishOnly": "git push origin --follow-tags",
"format": "prettier --write ."
},

@@ -25,10 +26,19 @@ "main": "index.js",

"@size-limit/preset-small-lib": "^7.0.8",
"@types/node": "^17.0.31",
"@types/tap": "^15.0.6",
"benchmark": "^2.1.4",
"c8": "^7.11.2",
"clock-mock": "^1.0.4",
"eslint-config-prettier": "^8.5.0",
"prettier": "^2.6.2",
"size-limit": "^7.0.8",
"tap": "^15.1.6"
"tap": "^16.0.1",
"ts-node": "^10.7.0",
"tslib": "^2.4.0",
"typescript": "^4.6.4"
},
"license": "ISC",
"files": [
"index.js"
"index.js",
"index.d.ts"
],

@@ -38,7 +48,20 @@ "engines": {

},
"prettier": {
"semi": false,
"printWidth": 70,
"tabWidth": 2,
"useTabs": false,
"singleQuote": true,
"jsxSingleQuote": false,
"bracketSameLine": true,
"arrowParens": "avoid",
"endOfLine": "lf"
},
"tap": {
"coverage-map": "map.js",
"node-arg": [
"--expose-gc"
]
"--expose-gc",
"--require",
"ts-node/register"
],
"ts": false
},

@@ -45,0 +68,0 @@ "size-limit": [

@@ -167,2 +167,19 @@ # lru-cache

### `noDeleteOnFetchRejection`
If a `fetchMethod` throws an error or returns a rejected promise,
then by default, any existing stale value will be removed from
the cache.
If `noDeleteOnFetchRejection` is set to `true`, then this
behavior is suppressed, and the stale value remains in the cache
in the case of a rejected `fetchMethod`.
This is important in cases where a `fetchMethod` is _only_ called
as a background update while the stale value is returned, when
`allowStale` is used.
This may be set in calls to `fetch()`, or defaulted on the
constructor.
### `dispose`

@@ -169,0 +186,0 @@