fash
Advanced tools
Comparing version 2.1.1 to 2.2.0
@@ -9,2 +9,3 @@ /** | ||
var common = require('../common'); | ||
var dtrace = require('../dtrace'); | ||
var fash = require('../index'); | ||
@@ -19,2 +20,3 @@ var crypto = require('crypto'); | ||
/** | ||
@@ -114,2 +116,3 @@ * level db keys. | ||
dtrace['new-start'].fire(function() { return([]); }); | ||
/** | ||
@@ -160,2 +163,3 @@ * 1) create 'VNODE_COUNT' key which keeps track of the # of vnodes. | ||
function openDb(_, _cb) { | ||
_cb = once(_cb); | ||
levelup(options.location, self.leveldbCfg_, function(err, db) { | ||
@@ -170,3 +174,3 @@ if (err) { | ||
self.db_ = db; | ||
_.batch = db.batch(); | ||
_.db = db; | ||
return _cb(); | ||
@@ -177,4 +181,9 @@ }); | ||
function putVnodeCount(_, _cb) { | ||
_.batch = _.batch.put(LKEY_VNODE_COUNT, self.vnodeCount_); | ||
return _cb(); | ||
_cb = once(_cb); | ||
_.db.put(LKEY_VNODE_COUNT, self.vnodeCount_, function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
}, | ||
@@ -187,2 +196,3 @@ /** | ||
function allocateVnodes(_, _cb) { | ||
_cb = once(_cb); | ||
_.pnodeToVnodeMap = {}; | ||
@@ -208,3 +218,11 @@ for (var vnode = 0; vnode < self.vnodeCount_; vnode++) { | ||
*/ | ||
_.batch = _.batch.put(sprintf(LKEY_VNODE_V, vnode), pnode); | ||
_.db.put(sprintf(LKEY_VNODE_V, vnode), | ||
pnode, | ||
function(err) | ||
{ | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
/** | ||
@@ -215,3 +233,8 @@ * we put the vnode in the path, to avoid having to put all | ||
var pnodePath = sprintf(LKEY_PNODE_P_V, pnode, vnode); | ||
_.batch = _.batch.put(pnodePath, LVAL_NULL); | ||
_.db.put(pnodePath, LVAL_NULL, function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
// cache the pnopdeToVnode mapping for step 4 | ||
@@ -232,2 +255,3 @@ if (!_.pnodeToVnodeMap[pnode]) { | ||
function writePnodeKeys(_, _cb) { | ||
_cb = once(_cb); | ||
var pnodeMap = {}; | ||
@@ -245,11 +269,27 @@ for (var i = 0; i < self.pnodes_.length; i++) { | ||
}, 'writing vnode list for pnode'); | ||
_.batch = _.batch.put(sprintf(LKEY_PNODE_P, pnode), | ||
_.pnodeToVnodeMap[pnode]); | ||
_.db.put(sprintf(LKEY_PNODE_P, pnode), | ||
_.pnodeToVnodeMap[pnode], | ||
function(err) | ||
{ | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
} | ||
_.bach = _.batch.put(LKEY_PNODE, Object.keys(pnodeMap)); | ||
return _cb(); | ||
_.db.put(LKEY_PNODE, Object.keys(pnodeMap), function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
}, | ||
function writeVnodeDataArray(_, _cb) { | ||
_.batch = _.batch.put(LKEY_VNODE_DATA, []); | ||
return _cb(); | ||
_cb = once(_cb); | ||
_.db.put(LKEY_VNODE_DATA, [], function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
}, | ||
@@ -262,3 +302,3 @@ function writeMetadata(_, _cb) { | ||
self.algorithm_.VNODE_HASH_INTERVAL.toString(16); | ||
_.batch = _.batch.put(LKEY_ALGORITHM, algorithm); | ||
_.batch = _.db.batch().put(LKEY_ALGORITHM, algorithm); | ||
// step 6 | ||
@@ -283,7 +323,11 @@ _.batch = _.batch.put(LKEY_VERSION, fash.VERSION); | ||
if (err) { | ||
return callback(new verror.VError(err, 'unable to create ring')); | ||
} else { | ||
log.info('successfully instantiated new ring'); | ||
return callback(null, self); | ||
err = new verror.VError(err, 'unable to create ring'); | ||
} | ||
log.info({ | ||
err: err | ||
}, 'finished instantiated new ring'); | ||
dtrace['new-done'].fire(function() { | ||
return ([err ? err.message : null, 'createNewRing']); | ||
}); | ||
return callback(err, self); | ||
}); | ||
@@ -315,4 +359,5 @@ } | ||
} | ||
self.db_ = db; | ||
_.batch = db.batch(); | ||
_.db = db; | ||
return _cb(); | ||
@@ -324,13 +369,25 @@ }); | ||
log.info('ConsistentHash.new.deserialize: put vnodeCount'); | ||
_.batch = _.batch.put(LKEY_VNODE_COUNT, topology.vnodes); | ||
return _cb(); | ||
_.db.put(LKEY_VNODE_COUNT, topology.vnodes, function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
}, | ||
// step 2 | ||
function allocateVnodes(_, _cb) { | ||
_cb = once(_cb); | ||
log.info('ConsistentHash.new.deserialize: allocate vnodes'); | ||
var vnodeData = []; | ||
var pvMap = topology.pnodeToVnodeMap; | ||
// /PNODE | ||
var pnodes = Object.keys(pvMap); | ||
_.batch = _.batch.put(LKEY_PNODE, pnodes); | ||
_.db.put(LKEY_PNODE, pnodes, function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
return _cb(err); | ||
} | ||
}); | ||
// /VNODE/V, /PNODE/P, /P/P/V | ||
@@ -341,14 +398,29 @@ var pcount = pnodes.length; | ||
var vcount = vnodes.length; | ||
// write /P/P and /V/V. and /P/P/V | ||
vnodes.forEach(function(vnode, index) { | ||
// json serializes vnode into a string, we need to | ||
// parse it back into an integer before we store it | ||
vnodes[index] = parseInt(vnode, 10); | ||
// write /V/V | ||
_.batch = _.batch.put(sprintf(LKEY_VNODE_V, vnode), | ||
pnode); | ||
_.db.put(sprintf(LKEY_VNODE_V, vnode), pnode, | ||
function(err) | ||
{ | ||
if (err) { | ||
err = new verror.VError(err); | ||
return _cb(err); | ||
} | ||
}); | ||
// write /P/P/V | ||
_.batch = _.batch.put( | ||
_.db.put( | ||
sprintf(LKEY_PNODE_P_V, pnode, vnode), | ||
pvMap[pnode][vnode] | ||
pvMap[pnode][vnode], | ||
function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
return _cb(err); | ||
} | ||
} | ||
); | ||
@@ -361,2 +433,3 @@ | ||
} | ||
// write /P/P once all the vnodes have been parsed back | ||
@@ -366,9 +439,22 @@ // into ints. | ||
if (vcount === 0) { | ||
_.batch = _.batch.put(sprintf(LKEY_PNODE_P, pnode), | ||
vnodes); | ||
_.db.put( | ||
sprintf(LKEY_PNODE_P, pnode), | ||
vnodes, | ||
function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
return _cb(err); | ||
} | ||
} | ||
); | ||
} | ||
// write the VNDOE_DATA array. | ||
if (vcount === 0 && --pcount === 0) { | ||
_.batch = _.batch.put(LKEY_VNODE_DATA, vnodeData); | ||
_cb(); | ||
_.db.put(LKEY_VNODE_DATA, vnodeData, function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
} | ||
@@ -384,21 +470,27 @@ }); | ||
var algorithm = JSON.parse(JSON.stringify(self.algorithm_)); | ||
var batch = self.db_.batch(); | ||
algorithm.VNODE_HASH_INTERVAL = | ||
self.algorithm_.VNODE_HASH_INTERVAL.toString(16); | ||
_.batch = _.batch.put(LKEY_ALGORITHM, algorithm); | ||
_.batch = _.batch.put(LKEY_VERSION, fash.VERSION); | ||
_.batch = _.batch.put(LKEY_COMPLETE, 1); | ||
return _cb(); | ||
}, | ||
function commit(_, _cb) { | ||
_.batch.write(_cb); | ||
batch.put(LKEY_ALGORITHM, algorithm). | ||
put(LKEY_VERSION, fash.VERSION). | ||
put(LKEY_COMPLETE, 1); | ||
batch.write(function(err) { | ||
if (err) { | ||
err = new verror.VError(err); | ||
} | ||
return _cb(err); | ||
}); | ||
} | ||
], arg: {}}, function(err) { | ||
if (err) { | ||
return callback(new verror.VError(err, 'unable to ' + | ||
'deserialize ring')); | ||
} else { | ||
log.info('successfully deserialized ring'); | ||
return callback(null, self); | ||
err = new verror.VError(err, 'unable to deserialize ring'); | ||
} | ||
log.info({ | ||
err: err | ||
}, 'finished deserializing ring'); | ||
dtrace['new-done'].fire(function() { | ||
return ([err ? err.message : null, 'deserialize']); | ||
}); | ||
return callback(err, self); | ||
}); | ||
@@ -481,8 +573,11 @@ } | ||
if (err) { | ||
return callback(new verror.VError(err, 'unable to ' + | ||
'load ring from db')); | ||
} else { | ||
log.info('successfully loaded ring from db'); | ||
return callback(null, self); | ||
err = new verror.VError(err, 'unable to load ring from db'); | ||
} | ||
dtrace['new-done'].fire(function() { | ||
return ([err ? err.message : null, 'loadFromDb']); | ||
}); | ||
log.info({ | ||
err: err | ||
}, 'finished loading ring from db'); | ||
return callback(err, self); | ||
}); | ||
@@ -512,2 +607,5 @@ } | ||
ConsistentHash.prototype.getNode = function getNode(key, callback) { | ||
dtrace['getnode-start'].fire(function() { | ||
return ([key]); | ||
}); | ||
var self = this; | ||
@@ -521,2 +619,6 @@ var value = crypto.createHash(this.algorithm_.NAME).update(key).digest('hex'); | ||
if (err) { | ||
err = new verror.VError(err); | ||
dtrace['getnode-done'].fire(function() { | ||
return([err.message, key, value, null, vnode]); | ||
}); | ||
return callback(err); | ||
@@ -528,5 +630,10 @@ } | ||
if (_err) { | ||
return callback(_err); | ||
_err = new verror.VError(_err); | ||
} | ||
return callback(null, {pnode: pnode, vnode: vnode, data: data}); | ||
dtrace['getnode-done'].fire(function() { | ||
return([_err ? _err.message : null, | ||
key, value, pnode, vnode, data]); | ||
}); | ||
return callback(_err, {pnode: pnode, vnode: vnode, data: data}); | ||
}); | ||
@@ -549,2 +656,5 @@ | ||
ConsistentHash.prototype.addData = function addData(vnode, data, cb) { | ||
dtrace['adddata-start'].fire(function() { | ||
return([vnode, data]); | ||
}); | ||
var self = this; | ||
@@ -626,2 +736,5 @@ var log = self.log; | ||
], arg:{}}, function(err) { | ||
dtrace['adddata-done'].fire(function() { | ||
return([err ? err.message : null]); | ||
}); | ||
return cb(err); | ||
@@ -663,2 +776,5 @@ }); | ||
ConsistentHash.prototype.remapVnode = function remapVnode(newPnode, vnode, cb) { | ||
dtrace['remapvnode-start'].fire(function() { | ||
return ([newPnode, vnode]); | ||
}); | ||
var self = this; | ||
@@ -675,2 +791,3 @@ var log = self.log; | ||
var oldPnode; | ||
/** | ||
@@ -722,2 +839,3 @@ * assert the vnodes, ensuring that: | ||
_.oldPnode = pnode; | ||
oldPnode = pnode; | ||
// check that the vnode doesn't already belong to the newPnode. | ||
@@ -812,2 +930,5 @@ if (pnode === newPnode) { | ||
log.info({err: err}, 'ConsistentHash.remapVnode: exiting'); | ||
dtrace['remapvnode-done'].fire(function() { | ||
return ([err ? err.message : null, newPnode, oldPnode, vnode]); | ||
}); | ||
return cb(err); | ||
@@ -876,2 +997,5 @@ }); | ||
ConsistentHash.prototype.removePnode = function removePnode(pnode, cb) { | ||
dtrace['removepnode-start'].fire(function() { | ||
return ([pnode]); | ||
}); | ||
var self = this; | ||
@@ -935,2 +1059,5 @@ var log = self.log; | ||
], arg: {}}, function(err) { | ||
dtrace['removepnode-done'].fire(function() { | ||
return ([err ? err.message : null, pnode]); | ||
}); | ||
return cb(err); | ||
@@ -951,2 +1078,5 @@ }); | ||
ConsistentHash.prototype.serialize = function serialize(callback) { | ||
dtrace['serialize-start'].fire(function() { | ||
return ([]); | ||
}); | ||
var self = this; | ||
@@ -1021,10 +1151,16 @@ var log = self.log; | ||
if (err) { | ||
return callback(new verror.VError(err, 'unable to serialize ring')); | ||
} else { | ||
log.trace({ | ||
serializedRing: serializedHash | ||
}, 'ConsistentHash.serialize: fully serialized ring'); | ||
log.info('ConsistentHash.serialize: exiting'); | ||
return callback(null, JSON.stringify(serializedHash)); | ||
err = new verror.VError(err, 'unable to serialize ring'); | ||
} | ||
log.trace({ | ||
serializedRing: serializedHash | ||
}, 'ConsistentHash.serialize: fully serialized ring'); | ||
if (!err) { | ||
var stringifiedHash = JSON.stringify(serializedHash); | ||
} | ||
log.info({err: err}, 'ConsistentHash.serialize: exiting'); | ||
dtrace['serialize-done'].fire(function() { | ||
return ([err ? err.message : null]); | ||
}); | ||
return callback(err, stringifiedHash); | ||
}); | ||
@@ -1031,0 +1167,0 @@ }; |
@@ -22,3 +22,3 @@ { | ||
], | ||
"version": "2.1.1", | ||
"version": "2.2.0", | ||
"repository": { | ||
@@ -41,4 +41,5 @@ "type": "git", | ||
"dashdash": "1.2.1", | ||
"levelup": "0.12.0", | ||
"leveldown": "0.6.2", | ||
"dtrace-provider": "0.2.8", | ||
"levelup": "git://github.com/yunong/node-levelup.git#c8d0bcb", | ||
"leveldown": "0.8.0", | ||
"once": "1.1.1", | ||
@@ -45,0 +46,0 @@ "redis": "0.8.4", |
Git dependency
Supply chain riskContains a dependency which resolves to a remote git URL. Dependencies fetched from git URLs are not immutable can be used to inject untrusted code or reduce the likelihood of a reproducible install.
Found 1 instance in 1 package
4434172
191
4608
12
1
+ Addeddtrace-provider@0.2.8
+ Addeddtrace-provider@0.2.8(transitive)
+ Addedleveldown@0.8.0(transitive)
+ Addednan@0.3.2(transitive)
- Removedbase64-js@0.0.2(transitive)
- Removedbops@0.0.7(transitive)
- Removedconcat-stream@0.1.1(transitive)
- Removederrno@0.0.5(transitive)
- Removedforeach@2.0.6(transitive)
- Removedindexof@0.0.1(transitive)
- Removedis@0.2.7(transitive)
- Removedis-object@0.1.2(transitive)
- Removedleveldown@0.6.2(transitive)
- Removedlevelup@0.12.0(transitive)
- Removedobject-keys@0.2.0(transitive)
- Removedprr@0.0.0(transitive)
- Removedsemver@1.1.4(transitive)
- Removedsimple-bufferstream@0.0.4(transitive)
- Removedto-utf8@0.0.1(transitive)
- Removedxtend@2.0.6(transitive)
Updatedleveldown@0.8.0
Updatedlevelup@git://github.com/yunong/node-levelup.git#c8d0bcb