Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

async-cache-dedupe

Package Overview
Dependencies
Maintainers
1
Versions
28
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

async-cache-dedupe - npm Package Compare versions

Comparing version 0.5.0 to 1.0.0

bench/bench.sh

167

index.js

@@ -1,148 +0,29 @@

'use strict'
const { Cache } = require('./src/cache')
const createStorage = require('./src/storage')
const kValues = require('./symbol')
const stringify = require('safe-stable-stringify')
const LRUCache = require('mnemonist/lru-cache')
const kCacheSize = Symbol('kCacheSize')
const kTTL = Symbol('kTTL')
const kOnHit = Symbol('kOnHit')
class Cache {
constructor (opts) {
opts = opts || {}
this[kValues] = {}
this[kCacheSize] = opts.cacheSize || 1024
this[kTTL] = opts.ttl || 0
this[kOnHit] = opts.onHit || noop
/**
* @param {!Object} options
* @param {!Object} [options.storage] - the storage to use; default is `{ type: 'memory' }`
* @param {?number} [options.ttl=0] - in seconds; default is 0 seconds, so it only does dedupe without cache
* @param {?function} options.onDedupe
* @param {?function} options.onHit
* @param {?function} options.onMiss
*/
function createCache (options) {
if (!options) {
options = { storage: { type: 'memory' } }
} else if (!options.storage) {
options.storage = { type: 'memory' }
}
define (key, opts, func) {
if (typeof opts === 'function') {
func = opts
opts = {}
}
if (key && this[key]) {
throw new Error(`${key} is already defined in the cache or it is a forbidden name`)
}
opts = opts || {}
if (typeof func !== 'function') {
throw new TypeError(`Missing the function parameter for '${key}'`)
}
const serialize = opts.serialize
if (serialize && typeof serialize !== 'function') {
throw new TypeError('serialize must be a function')
}
const cacheSize = opts.cacheSize || this[kCacheSize]
const ttl = opts.ttl || this[kTTL]
const onHit = opts.onHit || this[kOnHit]
const wrapper = new Wrapper(func, key, serialize, cacheSize, ttl, onHit)
this[kValues][key] = wrapper
this[key] = wrapper.add.bind(wrapper)
}
clear (key, value) {
if (key) {
this[kValues][key].clear(value)
return
}
for (const wrapper of Object.values(this[kValues])) {
wrapper.clear()
}
}
const storage = createStorage(options.storage.type, options.storage.options)
return new Cache({
...options,
storage
})
}
let _currentSecond
function currentSecond () {
if (_currentSecond !== undefined) {
return _currentSecond
}
_currentSecond = Math.floor(Date.now() / 1000)
setTimeout(_clearSecond, 1000).unref()
return _currentSecond
module.exports = {
Cache,
createCache,
createStorage
}
function _clearSecond () {
_currentSecond = undefined
}
class Wrapper {
constructor (func, key, serialize, cacheSize, ttl, onHit) {
this.ids = new LRUCache(cacheSize)
this.error = null
this.started = false
this.func = func
this.key = key
this.serialize = serialize
this.ttl = ttl
this.onHit = onHit
}
buildPromise (query, args, key) {
query.promise = this.func(args, key)
// we fork the promise chain on purpose
const p = query.promise.catch(() => this.ids.set(key, undefined))
if (this.ttl > 0) {
query.cachedOn = currentSecond()
} else {
// clear the cache if there is no TTL
p.then(() => this.ids.set(key, undefined))
}
}
getKey (args) {
const id = this.serialize ? this.serialize(args) : args
return typeof id === 'string' ? id : stringify(id)
}
add (args) {
const key = this.getKey(args)
const onHit = this.onHit
let query = this.ids.get(key)
if (!query) {
query = new Query()
this.buildPromise(query, args, key)
this.ids.set(key, query)
} else if (this.ttl > 0) {
onHit()
if (currentSecond() - query.cachedOn > this.ttl) {
// restart
this.buildPromise(query, args, key)
}
} else {
onHit()
}
return query.promise
}
clear (value) {
if (value) {
const key = this.getKey(value)
this.ids.set(key, undefined)
return
}
this.ids.clear()
}
}
class Query {
constructor () {
this.promise = null
this.cachedOn = null
}
}
function noop () {}
module.exports.Cache = Cache
{
"name": "async-cache-dedupe",
"version": "0.5.0",
"version": "1.0.0",
"description": "An async deduping cache",
"main": "index.js",
"scripts": {
"test": "standard | snazzy && tap test/*test.js"
"test": "standard **.js | snazzy && tap test/*test.js",
"lint:fix": "standard --fix **.js"
},

@@ -20,16 +21,28 @@ "repository": {

"author": "Matteo Collina <hello@matteocollina.com>",
"contributors": [
{
"name": "Simone Sanfratello",
"url": "https://github.com/simone-sanfratello",
"email": "simone.sanfra@gmail.com"
}
],
"license": "MIT",
"devDependencies": {
"@fastify/pre-commit": "^2.0.0",
"@fastify/pre-commit": "^2.0.2",
"ioredis": "^4.28.0",
"proxyquire": "^2.1.3",
"snazzy": "^9.0.0",
"standard": "^16.0.0",
"tap": "^15.0.9"
"standard": "^16.0.4",
"tap": "^15.0.10"
},
"dependencies": {
"mnemonist": "^0.38.3",
"safe-stable-stringify": "^2.0.0"
"abstract-logging": "^2.0.1",
"mnemonist": "^0.39.0",
"safe-stable-stringify": "^2.2.0"
},
"standard": {
"ignore": ["example.mjs"]
"ignore": [
"example.mjs"
]
}
}

@@ -15,6 +15,7 @@ # async-cache-dedupe

```js
import { Cache } from 'async-cache-dedupe'
import { createCache } from 'async-cache-dedupe'
const cache = new Cache({
ttl: 5 // seconds
const cache = createCache({
ttl: 5, // seconds
storage: { type: 'memory' },
})

@@ -48,3 +49,3 @@

### `new Cache(opts)`
### `createCache(opts)`

@@ -55,5 +56,33 @@ Creates a new cache.

* `tll`: the maximum time a cache entry can live, default `0`; if `0`, an element is removed from the cache as soon as as the promise resolves.
* `cacheSize`: the maximum amount of entries to fit in the cache for each defined method, default `1024`.
* `ttl`: the maximum time a cache entry can live, default `0`; if `0`, an element is removed from the cache as soon as the promise resolves.
* `onDedupe`: a function that is called every time it is defined is deduped.
* `onHit`: a function that is called every time there is a hit in the cache.
* `onMiss`: a function that is called every time the result is not in the cache.
* `storage`: the storage options; default is `{ type: "memory" }`
Storage options are:
* `type`: `memory` (default) or `redis`
* `options`: by storage type
* for `memory` type
* `size`: maximum number of items to store in the cache _per resolver_. Default is `1024`.
* `invalidation`: enable invalidation, see [invalidation](#invalidation). Default is disabled.
* `log`: logger instance `pino` compatible, default is disabled.
Example
```js
createCache({ storage: { type: 'memory', options: { size: 2048 } } })
```
* for `redis` type
* `client`: a redis client instance, mandatory. Should be an `ioredis` client or compatible.
* `invalidation`: enable invalidation, see [invalidation](#invalidation). Default is disabled.
* `invalidation.referencesTTL`: references TTL in seconds, it means how long the references are alive; it should be set at the maximum of all the caches ttl.
* `log`: logger instance `pino` compatible, default is disabled.
Example
```js
createCache({ storage: { type: 'redis', options: { client: new Redis(), invalidation: { referencesTTL: 60 } } } })
```
### `cache.define(name[, opts], original(arg, cacheKey))`

@@ -63,13 +92,27 @@

The `define` method adds a `cache[name]` function that will call the `original` function if the result is not present
in the cache. The cache key for `arg` is computed using [`safe-stable-stringify`](https://www.npmjs.com/package/safe-stable-stringify) and it is passed as the `cacheKey` argument to the original function.
Options:
* `tll`: the maximum time a cache entry can live, default as defined in the cache.
* `cacheSize`: the maximum amount of entries to fit in the cache for each defined method, default as defined in the cache.
* `ttl`: the maximum time a cache entry can live, default as defined in the cache; default is zero, so cache is disabled, the function will be only the deduped.
* `serialize`: a function to convert the given argument into a serializable object (or string).
* `onDedupe`: a function that is called every time there is defined is deduped.
* `onHit`: a function that is called every time there is a hit in the cache.
* `onMiss`: a function that is called every time the result is not in the cache.
* `storage`: the storage to use, same as above. It's possible to specify different storages for each defined function for fine-tuning.
* `references`: sync or async function to generate references, it receives `(args, key, result)` from the defined function call and must return an array of strings or falsy; see [invalidation](#invalidation) to know how to use them.
Example
The `define` method adds a `cache[name]` function that will call the `original` function if the result is not present
in the cache. The cache key for `arg` is computed using [`safe-stable-stringify`](https://www.npmjs.com/package/safe-stable-stringify)
and it is passed as the `cacheKey` argument to the original function.
```js
const cache = createCache({ ttl: 60 })
cache.define('fetchUser', {
references: (args, key, result) => result ? [`user~${result.id}`] : null
},
(id) => database.find({ table: 'users', where: { id }}))
await cache.fetchUser(1)
```
### `cache.clear([name], [arg])`

@@ -80,4 +123,116 @@

## Invalidation
Along with `time to live` invalidation of the cache entries, we can use invalidation by keys.
The concept behind invalidation by keys is that entries have an auxiliary key set that explicitly links requests along with their own result. These auxiliary keys are called here `references`.
A scenario. Let's say we have an entry _user_ `{id: 1, name: "Alice"}`, it may change often or rarely, the `ttl` system is not accurate:
* it can be updated before `ttl` expiration, in this case the old value is shown until expiration by `ttl`.
* it's not been updated during `ttl` expiration, so in this case, we don't need to reload the value, because it's not changed
To solve this common problem, we can use `references`.
We can say that the result of defined function `getUser(id: 1)` has reference `user~1`, and the result of defined function `findUsers`, containing `{id: 1, name: "Alice"},{id: 2, name: "Bob"}` has references `[user~1,user~2]`.
So we can find the results in the cache by their `references`, independently of the request that generated them, and we can invalidate by `references`.
So, when a writing event involving `user {id: 1}` happens (usually an update), we can remove all the entries in the cache that have references to `user~1`, so the result of `getUser(id: 1)` and `findUsers`, and they will be reloaded at the next request with the new data - but not the result of `getUser(id: 2)`.
Explicit invalidation is `disabled` by default, you have to enable it in `storage` settings.
See [mercurius-cache-example](https://github.com/mercurius/mercurius-cache-example) for a complete example.
### Redis
Using a `redis` storage is the best choice for a shared and/or large cache.
All the `references` entries in redis have `referencesTTL`, so they are all cleaned at some time.
`referencesTTL` value should be set at the maximum of all the `ttl`s, to let them be available for every cache entry, but at the same time, they expire, avoiding data leaking.
Anyway, we should keep `references` up-to-date to be more efficient on writes and invalidation, using the `garbage collector` function, that prunes the expired references: while expired references do not compromise the cache integrity, they slow down the I/O operations.
Storage `memory` doesn't have `gc`.
### Redis garbage collector
As said, While the garbage collector is optional, is highly recommended to keep references up to date and improve performances on setting cache entries and invalidation of them.
### `storage.gc([mode], [options])`
* `mode`: `lazy` (default) or `strict`.
In `lazy` mode, only a chunk of the `references` are randomly checked, and probably freed; running `lazy` jobs tend to eventually clear all the expired `references`.
In `strict` mode, all the `references` are checked and freed, and after that, `references` and entries are perfectly clean.
`lazy` mode is the light heuristic way to ensure cached entries and `references` are cleared without stressing too much `redis`, `strict` mode at the opposite stress more `redis` to get a perfect result.
The best strategy is to combine them both, running often `lazy` jobs along with some `strict` ones, depending on the size of the cache.
Options:
* `chunk`: the chunk size of references analyzed per loops, default `64`
* `lazy~chunk`: the chunk size of references analyzed per loops in `lazy` mode, default `64`; if both `chunk` and `lazy.chunk` is set, the maximum one is taken
* `lazy~cursor`: the cursor offset, default zero; cursor should be set at `report.cursor` to continue scanning from the previous operation
Return `report` of the `gc` job, as follows
```json
"report":{
"references":{
"scanned":["r:user:8", "r:group:11", "r:group:16"],
"removed":["r:user:8", "r:group:16"]
},
"keys":{
"scanned":["users~1"],
"removed":["users~1"]
},
"loops":4,
"cursor":0,
"error":null
}
```
Example
```js
import { createCache, createStorage } from 'async-cache-dedupe'
const cache = createCache({
ttl: 5,
storage: { type: 'redis', options: { client: redisClient, invalidation: true } },
})
// ... cache.define('fetchSomething'
const storage = createStorage('redis', { client: redisClient, invalidation: true })
let cursor
setInterval(() => {
const report = await storage.gc('lazy', { lazy: { cursor }})
if(report.error) {
console.error('error on redis gc', error)
return
}
console.log('gc report (lazy)', report)
cursor = report.cursor
}, 60e3).unref()
setInterval(() => {
const report = await storage.gc('strict', chunk: 128})
if(report.error) {
console.error('error on redis gc', error)
return
}
console.log('gc report (strict)', report)
}, 10 * 60e3).unref()
```
---
## Maintainers
* [__Matteo Collina__](https://github.com/mcollina), <https://twitter.com/matteocollina>, <https://www.npmjs.com/~matteo.collina>
* [__Simone Sanfratello__](https://github.com/simone-sanfratello), <https://twitter.com/simonesanfradev>, <https://www.npmjs.com/~simone.sanfra>
---
## Breaking Changes
* version `0.5.0` -> `0.6.0`
* `options.cacheSize` is dropped in favor of `storage`
## License
MIT
'use strict'
const { test } = require('tap')
const { Cache } = require('..')
const { test, before, teardown } = require('tap')
const Redis = require('ioredis')
const stringify = require('safe-stable-stringify')
const kValues = require('../symbol')
const { kValues, kStorage } = require('../src/symbol')
const createStorage = require('../src/storage')
const { Cache, createCache } = require('../')
const dummyStorage = {
async get(key) { },
async set(key, value, ttl, references) { },
async remove(key) { },
async invalidate(references) { },
async clear() { },
async refresh() { }
}
let redisClient
before(async (t) => {
redisClient = new Redis()
})
teardown(async (t) => {
await redisClient.quit()
})
test('create a Cache that dedupes', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(6)
let hits = 0
let dedupes = 0
const cache = new Cache({
onHit () {
hits++
storage: dummyStorage,
onDedupe() {
dedupes++
}

@@ -22,6 +42,6 @@ })

cache.define('fetchSomething', async (query, cacheKey) => {
t.equal(query, expected.shift())
t.equal(stringify(query), cacheKey)
return { k: query }
cache.define('fetchSomething', async (value, key) => {
t.equal(value, expected.shift())
t.equal(stringify(value), key)
return { k: value }
})

@@ -40,10 +60,9 @@

])
t.equal(hits, 1)
t.equal(dedupes, 1)
})
test('create a Cache that dedupes full signature', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(3)
const cache = new Cache()
const cache = new Cache({ storage: dummyStorage })

@@ -70,4 +89,54 @@ const expected = [42, 24]

test('create a cache with the factory function, default options', async t => {
const cache = createCache()
t.ok(cache[kStorage])
cache.define('plusOne', async (value, key) => value + 1)
t.equal(await cache.plusOne(42), 43)
t.equal(await cache.plusOne(24), 25)
t.equal(await cache.plusOne(42), 43)
})
test('create a cache with the factory function, with default storage', async t => {
let hits = 0
const cache = createCache({
ttl: 1,
onHit() { hits++ }
})
t.ok(cache[kStorage].get)
t.ok(cache[kStorage].set)
cache.define('plusOne', async (value, key) => value + 1)
t.equal(await cache.plusOne(42), 43)
t.equal(await cache.plusOne(24), 25)
t.equal(await cache.plusOne(42), 43)
t.equal(hits, 1)
})
test('create a cache with the factory function, with storage', async t => {
let hits = 0
const cache = createCache({
ttl: 1,
storage: { type: 'memory', options: { size: 9 } },
onHit() { hits++ }
})
t.equal(cache[kStorage].size, 9)
cache.define('plusOne', async (value, key) => value + 1)
t.equal(await cache.plusOne(42), 43)
t.equal(await cache.plusOne(24), 25)
t.equal(await cache.plusOne(42), 43)
t.equal(hits, 1)
})
test('missing function', async (t) => {
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })
t.throws(function () {

@@ -85,6 +154,5 @@ cache.define('something', null)

test('works with custom serialize', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(2)
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })

@@ -94,3 +162,3 @@ cache.define(

{
serialize (args) { return args.k }
serialize(args) { return args.k }
},

@@ -112,19 +180,75 @@ async (queries) => {

t.same(Object.keys(cache[kValues].fetchSomething.ids.items), ['24', '42'])
t.same([...cache[kValues].fetchSomething.dedupes.keys()], ['42', '24'])
})
test('missing serialize', async (t) => {
const cache = new Cache()
t.throws(function () {
cache.define('something', {
serialize: 42
}, async () => {})
test('constructor - options', async (t) => {
test('missing storage', async (t) => {
t.throws(function () {
// eslint-disable-next-line no-new
new Cache()
})
})
test('invalid ttl', async (t) => {
t.throws(function () {
// eslint-disable-next-line no-new
new Cache({ storage: createStorage(), ttl: -1 })
})
})
test('invalid onDedupe', async (t) => {
t.throws(function () {
// eslint-disable-next-line no-new
new Cache({ storage: createStorage(), onDedupe: -1 })
})
})
test('invalid onHit', async (t) => {
t.throws(function () {
// eslint-disable-next-line no-new
new Cache({ storage: createStorage(), onHit: -1 })
})
})
test('invalid onMiss', async (t) => {
t.throws(function () {
// eslint-disable-next-line no-new
new Cache({ storage: createStorage(), onMiss: -1 })
})
})
})
test('define - options', async (t) => {
test('wrong serialize', async (t) => {
const cache = new Cache({ storage: createStorage() })
t.throws(function () {
cache.define('something', {
serialize: 42
}, async () => { })
})
})
test('wrong references', async (t) => {
const cache = new Cache({ storage: createStorage() })
t.throws(function () {
cache.define('something', {
references: 42
}, async () => { })
})
})
test('custom storage', async (t) => {
const cache = new Cache({ storage: createStorage() })
cache.define('foo', {
storage: { type: 'memory', options: { size: 9 } }
}, () => true)
t.ok(cache.foo())
})
})
test('safe stable serialize', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(5)
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })

@@ -157,6 +281,5 @@ const expected = [

test('strings', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(3)
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })

@@ -183,51 +306,155 @@ const expected = ['42', '24']

test('cacheSize on define', async (t) => {
// plan verifies that fetchSomething is called only once
test('do not cache failures', async (t) => {
t.plan(4)
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })
const expected = [42, 24, 42]
let called = false
cache.define('fetchSomething', async (query) => {
t.pass('called')
if (!called) {
called = true
throw new Error('kaboom')
}
return { k: query }
})
cache.define('fetchSomething', { cacheSize: 1 }, async (query) => {
t.equal(query, expected.shift())
await t.rejects(cache.fetchSomething(42))
t.same(await cache.fetchSomething(42), { k: 42 })
})
test('do not cache failures async', async (t) => {
t.plan(5)
const storage = createStorage()
storage.remove = async () => {
t.pass('async remove called')
throw new Error('kaboom')
}
const cache = new Cache({ storage })
let called = false
cache.define('fetchSomething', async (query) => {
t.pass('called')
if (!called) {
called = true
throw new Error('kaboom')
}
return { k: query }
})
const p1 = cache.fetchSomething(42)
const p2 = cache.fetchSomething(24)
const p3 = cache.fetchSomething(42)
await t.rejects(cache.fetchSomething(42))
t.same(await cache.fetchSomething(42), { k: 42 })
})
const res = await Promise.all([p1, p2, p3])
test('clear the full cache', async (t) => {
t.plan(7)
t.same(res, [
const cache = new Cache({ ttl: 1, storage: createStorage() })
cache.define('fetchA', async (query) => {
t.pass('a called')
return { k: query }
})
cache.define('fetchB', async (query) => {
t.pass('b called')
return { j: query }
})
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ k: 24 },
{ k: 42 }
{ j: 24 }
])
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ j: 24 }
])
await cache.clear()
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ j: 24 }
])
})
test('cacheSize on constructor', async (t) => {
// plan verifies that fetchSomething is called only once
t.plan(4)
test('clears only one method', async (t) => {
t.plan(6)
const cache = new Cache({ cacheSize: 1 })
const cache = new Cache({ ttl: 1, storage: createStorage() })
const expected = [42, 24, 42]
cache.define('fetchA', async (query) => {
t.pass('a called')
return { k: query }
})
cache.define('fetchSomething', async (query) => {
t.equal(query, expected.shift())
cache.define('fetchB', async (query) => {
t.pass('b called')
return { j: query }
})
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ j: 24 }
])
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ j: 24 }
])
await cache.clear('fetchA')
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchB(24)
]), [
{ k: 42 },
{ j: 24 }
])
})
test('clears only one method with one value', async (t) => {
t.plan(5)
const cache = new Cache({ ttl: 10, storage: createStorage() })
cache.define('fetchA', async (query) => {
t.pass('a called')
return { k: query }
})
const p1 = cache.fetchSomething(42)
const p2 = cache.fetchSomething(24)
const p3 = cache.fetchSomething(42)
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchA(24)
]), [
{ k: 42 },
{ k: 24 }
])
const res = await Promise.all([p1, p2, p3])
await cache.clear('fetchA', 42)
t.same(res, [
t.same(await Promise.all([
cache.fetchA(42),
cache.fetchA(24)
]), [
{ k: 42 },
{ k: 24 },
{ k: 42 }
{ k: 24 }
])

@@ -237,3 +464,3 @@ })

test('throws for methods in the property chain', async function (t) {
const cache = new Cache()
const cache = new Cache({ storage: createStorage() })

@@ -249,3 +476,3 @@ const keys = [

t.throws(() => {
cache.define(key, () => {})
cache.define(key, () => { })
})

@@ -255,2 +482,40 @@ }

test('should cache with references', async function (t) {
t.plan(1)
const cache = new Cache({ ttl: 60, storage: createStorage() })
cache.define('run', {
references: (args, key, result) => {
t.pass('references called')
return ['some-reference']
}
}, () => 'something')
await cache.run(1)
})
test('should cache with async references', async function (t) {
t.plan(1)
const cache = new Cache({ ttl: 60, storage: createStorage() })
cache.define('run', {
references: async (args, key, result) => {
t.pass('references called')
return ['some-reference']
}
}, () => 'something')
await cache.run(1)
})
test('should cache with async storage (redis)', async function (t) {
const cache = new Cache({ ttl: 60, storage: createStorage('redis', { client: redisClient }) })
cache.define('run', () => 'something')
await cache.run(1)
t.equal(await cache.run(2), 'something')
})
test('automatically expires with no TTL', async (t) => {

@@ -260,6 +525,7 @@ // plan verifies that fetchSomething is called only once

let hits = 0
let dedupes = 0
const cache = new Cache({
onHit () {
hits++
storage: createStorage(),
onDedupe() {
dedupes++
}

@@ -287,6 +553,6 @@ })

])
t.equal(hits, 1)
t.equal(dedupes, 1)
t.same(await cache.fetchSomething(42), { k: 42 })
t.equal(hits, 1)
t.equal(dedupes, 1)
})
'use strict'
const t = require('tap')
const { Cache } = require('..')
const { promisify } = require('util')
const { AsyncLocalStorage } = require('async_hooks')
const { Cache } = require('../src/cache')
const createStorage = require('../src/storage')

@@ -18,3 +18,4 @@ const sleep = promisify(setTimeout)

const cache = new Cache({
ttl: 1 // seconds
storage: createStorage(),
ttl: 1
})

@@ -39,3 +40,4 @@

const cache = new Cache({
ttl: 2 // seconds
storage: createStorage(),
ttl: 2
})

@@ -59,35 +61,6 @@

test('AsyncLocalStoreage', (t) => {
t.plan(5)
const als = new AsyncLocalStorage()
const cache = new Cache({ ttl: 42000 })
cache.define('fetchSomething', async (query) => {
t.equal(query, 42)
return { k: query }
})
als.run({ foo: 'bar' }, function () {
setImmediate(function () {
cache.fetchSomething(42).then((res) => {
t.same(res, { k: 42 })
t.same(als.getStore(), { foo: 'bar' })
})
})
})
als.run({ bar: 'foo' }, function () {
setImmediate(function () {
cache.fetchSomething(42).then((res) => {
t.same(res, { k: 42 })
t.same(als.getStore(), { bar: 'foo' })
})
})
})
})
test('do not cache failures', async (t) => {
t.plan(4)
const cache = new Cache({ ttl: 42000 })
const cache = new Cache({ ttl: 42, storage: createStorage() })

@@ -111,3 +84,3 @@ let called = false

const cache = new Cache({ ttl: 42000 })
const cache = new Cache({ ttl: 42, storage: createStorage() })

@@ -154,3 +127,3 @@ cache.define('fetchA', async (query) => {

const cache = new Cache({ ttl: 42000 })
const cache = new Cache({ ttl: 42, storage: createStorage() })

@@ -197,3 +170,3 @@ cache.define('fetchA', async (query) => {

const cache = new Cache({ ttl: 42000 })
const cache = new Cache({ ttl: 42, storage: createStorage() })

@@ -200,0 +173,0 @@ cache.define('fetchA', async (query) => {

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc