Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

cache-manager

Package Overview
Dependencies
Maintainers
1
Versions
110
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cache-manager - npm Package Compare versions

Comparing version 0.0.5 to 0.1.0

examples/redis_example/example.js

37

examples/example.js

@@ -1,12 +0,15 @@

var cache_manager = require('cache-manager');
var redis_cache = cache_manager.caching({store: 'redis', db: 1, ttl: 100/*seconds*/});
var cache_manager = require('../');
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});
var memory_cache2 = cache_manager.caching({store: 'memory', max: 100, ttl: 100/*seconds*/});
redis_cache.set('foo', 'bar', function(err) {
//
// Basic usage
//
memory_cache2.set('foo', 'bar', function (err) {
if (err) { throw err; }
redis_cache.get('foo', function(err, result) {
memory_cache2.get('foo', function (err, result) {
console.log(result);
// >> 'bar'
redis_cache.del('foo', function(err) {});
memory_cache2.del('foo', function (err) { console.log(err); });
});

@@ -23,5 +26,8 @@ });

var user_id = 123;
var key = 'user_' + user_id;
var key = 'user_' + user_id;
redis_cache.wrap(key, function (cb) {
//
// wrap() example
//
memory_cache2.wrap(key, function (cb) {
get_user(user_id, cb);

@@ -31,4 +37,4 @@ }, function (err, user) {

// Second time fetches user from redis_cache
redis_cache.wrap(key, function (cb) {
// Second time fetches user from memory_cache2
memory_cache2.wrap(key, function (cb) {
get_user(user_id, cb);

@@ -46,5 +52,5 @@ }, function (err, user) {

var multi_cache = cache_manager.multi_caching([memory_cache, redis_cache]);
user_id2 = 456;
key2 = 'user_' + user_id;
var multi_cache = cache_manager.multi_caching([memory_cache, memory_cache2]);
var user_id2 = 456;
var key2 = 'user_' + user_id;

@@ -66,7 +72,7 @@ multi_cache.wrap(key2, function (cb) {

// Sets in all caches.
multi_cache.set('foo2', 'bar2', function(err) {
multi_cache.set('foo2', 'bar2', function (err) {
if (err) { throw err; }
// Fetches from highest priority cache that has the key.
multi_cache.get('foo2', function(err, result) {
multi_cache.get('foo2', function (err, result) {
console.log(result);

@@ -76,3 +82,4 @@ // >> 'bar2'

// Delete from all caches
multi_cache.del('foo2', function(err) {
multi_cache.del('foo2', function (err) {
console.log(err);
process.exit();

@@ -79,0 +86,0 @@ });

@@ -0,1 +1,5 @@

- 0.1.0 2013-10-13
Removing built-in Redis store to emphasize that you should plug in your own
cache store.
- 0.0.5 2013-10-13

@@ -2,0 +6,0 @@ Removing hiredis requirement.

{
"name": "cache-manager",
"version": "0.0.5",
"version": "0.1.0",
"description": "Cache module for Node.js",

@@ -15,3 +15,2 @@ "main": "index.js",

"cache",
"redis",
"lru-cache",

@@ -25,4 +24,3 @@ "memory cache",

"async": ">=0.1.22",
"lru-cache": ">=2.3.0",
"redis": ">=0.6.7"
"lru-cache": ">=2.3.0"
},

@@ -29,0 +27,0 @@ "devDependencies": {

@@ -25,4 +25,8 @@ node-cache-manager

First, node-cache-manager features the standard functions you'd expect in most caches:
First, it includes a `wrap` function that lets you wrap any function in cache.
(Note, this was inspired by [node-caching](https://github.com/mape/node-caching).)
Second, node-cache-manager features a built-in memory cache (using [node-lru-cache](https://github.com/isaacs/node-lru-cache)),
with the standard functions you'd expect in most caches:
set(key, val, cb)

@@ -32,9 +36,6 @@ get(key, cb)

Second, it includes a `wrap` function that lets you wrap any function in cache.
(Note, this was inspired by [node-caching](https://github.com/mape/node-caching).)
Third, node-cache-manager lets you set up a tiered cache strategy. This may be of
limited use in most cases, but imagine a scenario where you expect tons of
traffic, and don't want to hit Redis for every request. You decide to store
the most commonly-requested data in an in-memory cache (like [node-lru-cache](https://github.com/isaacs/node-lru-cache)),
traffic, and don't want to hit your primary cache (like Redis) for every request.
You decide to store the most commonly-requested data in an in-memory cache,
perhaps with a very short timeout and/or a small data size limit. But you

@@ -48,2 +49,5 @@ still want to store the data in Redis for backup, and for the requests that

See examples below and in the examples directory. See ``examples/redis_example`` for an example of how to implement a
Redis cache store with connection pooling.
### Single Store

@@ -53,3 +57,2 @@

var cache_manager = require('cache-manager');
var redis_cache = cache_manager.caching({store: 'redis', db: 1, ttl: 100/*seconds*/});
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});

@@ -59,9 +62,9 @@

redis_cache.set('foo', 'bar', function(err) {
memory_cache.set('foo', 'bar', function(err) {
if (err) { throw err; }
redis_cache.get('foo', function(err, result) {
memory_cache.get('foo', function(err, result) {
console.log(result);
// >> 'bar'
redis_cache.del('foo', function(err) {});
memory_cache.del('foo', function(err) {});
});

@@ -80,3 +83,3 @@ });

redis_cache.wrap(key, function (cb) {
memory_cache.wrap(key, function (cb) {
get_user(user_id, cb);

@@ -86,4 +89,4 @@ }, function (err, user) {

// Second time fetches user from redis_cache
redis_cache.wrap(key, function (cb) {
// Second time fetches user from memory_cache
memory_cache.wrap(key, function (cb) {
get_user(user_id, cb);

@@ -105,3 +108,3 @@ }, function (err, user) {

You can use your own custom store by creating one with the same API as the
build-in redis and memory stores. To use your own store, you can either pass
build-in memory stores (such as a redis or memcached store). To use your own store, you can either pass
in an instance of it, or pass in the path to the module.

@@ -121,3 +124,3 @@

```javascript
var multi_cache = cache_manager.multi_caching([memory_cache, redis_cache]);
var multi_cache = cache_manager.multi_caching([memory_cache, some_other_cache]);
user_id2 = 456;

@@ -147,3 +150,3 @@ key2 = 'user_' + user_id;

// If the data expires in the memory cache, the next fetch would pull it from
// the Redis cache, and set the data in memory again.
// the 'some_other_cache', and set the data in memory again.
multi_cache.wrap(key2, function (cb) {

@@ -150,0 +153,0 @@ get_user(user_id2, cb);

var assert = require('assert');
var sinon = require('sinon');
var redis = require('redis');
var support = require('./support');

@@ -23,3 +22,3 @@ var check_err = support.check_err;

describe("get() and set()", function () {
['redis', 'memory'].forEach(function (store) {
['memory'].forEach(function (store) {
context("using " + store + " store", function () {

@@ -56,3 +55,3 @@ beforeEach(function () {

describe("del()", function () {
['redis', 'memory'].forEach(function (store) {
['memory'].forEach(function (store) {
context("using " + store + " store", function () {

@@ -103,119 +102,2 @@ beforeEach(function (done) {

describe("wrap()", function () {
context("using redis store", function () {
var redis_client;
before(function () {
redis_client = redis.createClient();
sinon.stub(redis, 'createClient').returns(redis_client);
});
beforeEach(function () {
cache = caching({store: 'redis'});
key = support.random.string(20);
name = support.random.string();
});
after(function () {
redis.createClient.restore();
});
it("calls back with the result of the wrapped function", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.deepEqual(widget, {name: name});
done();
});
});
it("caches the result of the function in redis", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.deepEqual(JSON.parse(result), {name: name});
done();
});
});
});
context("when wrapped function calls back with an error", function () {
it("calls back with that error and doesn't cache result", function (done) {
var fake_error = new Error(support.random.string());
sinon.stub(methods, 'get_widget', function (name, cb) {
cb(fake_error, {name: name});
});
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
methods.get_widget.restore();
assert.equal(err, fake_error);
assert.ok(!widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
done();
});
});
});
});
it("retrieves data from redis when available", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(result);
sinon.spy(redis_client, 'get');
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.deepEqual(widget, {name: name});
assert.ok(redis_client.get.calledWith(key));
redis_client.get.restore();
done();
});
});
});
});
context("when using ttl", function () {
beforeEach(function () {
ttl = 50;
cache = caching({store: 'redis', ttl: ttl});
});
it("expires cached result after ttl seconds", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.ttl(key, function (err, result) {
check_err(err);
support.assert_within(result, ttl, 2);
done();
});
});
});
});
});
describe("using memory (lru-cache) store", function () {

@@ -345,2 +227,21 @@ var memory_store_stub;

});
context("when wrapped function calls back with an error", function () {
it("calls back with that error", function (done) {
var fake_error = new Error(support.random.string());
sinon.stub(methods, 'get_widget', function (name, cb) {
cb(fake_error, {name: name});
});
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
methods.get_widget.restore();
assert.equal(err, fake_error);
assert.ok(!widget);
done();
});
});
});
});

@@ -347,0 +248,0 @@ });

var assert = require('assert');
var sinon = require('sinon');
var redis = require('redis');
var support = require('./support');

@@ -17,9 +16,8 @@ var check_err = support.check_err;

describe("multi_caching", function () {
var redis_cache;
var memory_cache;
var memory_cache2;
var memory_cache3;
var multi_cache;
var key;
var memory_ttl;
var redis_ttl;
var name;

@@ -29,7 +27,6 @@

memory_ttl = 0.1;
redis_ttl = 1;
memory_cache = caching({store: 'memory', ttl: memory_ttl});
memory_cache2 = caching({store: 'memory', ttl: memory_ttl});
redis_cache = caching({store: 'redis', ttl: redis_ttl});
memory_cache3 = caching({store: 'memory', ttl: memory_ttl});

@@ -44,3 +41,3 @@ key = support.random.string(20);

beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache, memory_cache2]);
multi_cache = multi_caching([memory_cache, memory_cache2, memory_cache3]);
key = support.random.string(20);

@@ -57,7 +54,7 @@ value = support.random.string();

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.equal(result, value);
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -80,7 +77,7 @@ assert.equal(result, value);

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.equal(result, value);
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -99,3 +96,3 @@ assert.equal(result, value);

it("gets data from first cache that has it", function (done) {
redis_cache.set(key, value, function (err) {
memory_cache3.set(key, value, function (err) {
check_err(err);

@@ -123,7 +120,7 @@

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -149,7 +146,7 @@ assert.ok(!result);

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -168,16 +165,5 @@ assert.ok(!result);

describe("wrap()", function () {
var redis_client;
beforeEach(function () {
redis_client = redis.createClient();
sinon.stub(redis, 'createClient').returns(redis_client);
});
afterEach(function () {
redis.createClient.restore();
});
describe("using a single cache store", function () {
beforeEach(function () {
multi_cache = multi_caching([redis_cache]);
multi_cache = multi_caching([memory_cache3]);
});

@@ -196,3 +182,3 @@

context("when wrapped function calls back with an error", function () {
it("calls back with that error and doesn't cache result", function (done) {
it("calls back with that error", function (done) {
var fake_error = new Error(support.random.string());

@@ -209,8 +195,3 @@ sinon.stub(methods, 'get_widget', function (name, cb) {

assert.ok(!widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
done();
});
done();
});

@@ -223,3 +204,3 @@ });

beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache]);
multi_cache = multi_caching([memory_cache, memory_cache3]);
});

@@ -248,3 +229,3 @@

redis_cache.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -269,3 +250,3 @@ assert.deepEqual(result, {name: name});

redis_cache.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -282,3 +263,3 @@ assert.equal(result, null);

it("returns value from second store, sets it in first store", function (done) {
redis_cache.set(key, {name: name}, function (err) {
memory_cache3.set(key, {name: name}, function (err) {
check_err(err);

@@ -305,3 +286,3 @@

beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache, memory_cache2]);
multi_cache = multi_caching([memory_cache, memory_cache3, memory_cache2]);
});

@@ -330,7 +311,7 @@

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.deepEqual(result, {name: name});
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -356,7 +337,7 @@ assert.deepEqual(result, {name: name});

redis_cache.get(key, function (err, result) {
memory_cache2.get(key, function (err, result) {
check_err(err);
assert.equal(result, null);
memory_cache2.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -374,3 +355,3 @@ assert.equal(result, null);

it("returns value from second store, sets it in first store, does not set third store", function (done) {
redis_cache.set(key, {name: name}, function (err) {
memory_cache3.set(key, {name: name}, function (err) {
check_err(err);

@@ -410,3 +391,3 @@

redis_cache.get(key, function (err, result) {
memory_cache3.get(key, function (err, result) {
check_err(err);

@@ -413,0 +394,0 @@ assert.deepEqual(result, {name: name});

@@ -10,4 +10,4 @@ #!/usr/bin/env node

var argv = optimist
.usage("Usage: $0 -t [types] --reporter [reporter] --timeout [timeout]")
.default({types: 'unit,functional', reporter: 'spec', timeout: 6000})
.usage("Usage: $0 -t [types] --reporter [reporter] --timeout [timeout]")['default'](
{types: 'unit,functional', reporter: 'spec', timeout: 6000})
.describe('types', 'The types of tests to run, separated by commas. E.g., unit,functional,acceptance')

@@ -14,0 +14,0 @@ .describe('reporter', 'The mocha test reporter to use.')

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc