Socket
Socket
Sign inDemoInstall

tendermint

Package Overview
Dependencies
Maintainers
1
Versions
45
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

tendermint - npm Package Compare versions

Comparing version 3.1.11 to 3.2.0

lib/common.js

3

index.js
module.exports = require('./lib/lightNode.js')
module.exports.LightNode = require('./lib/lightNode.js')
module.exports.RpcClient = require('./lib/rpc.js')
module.exports.RpcClient.METHODS = require('./lib/methods.js')
Object.assign(module.exports, require('./lib/verify.js'))

@@ -8,2 +8,3 @@ 'use strict';

var _require = require('./types.js'),
VarInt = _require.VarInt,
VarString = _require.VarString,

@@ -15,48 +16,32 @@ VarBuffer = _require.VarBuffer,

TreeHashInput = _require.TreeHashInput,
ValidatorHashInput = _require.ValidatorHashInput,
Int64BE = _require.Int64BE;
ValidatorHashInput = _require.ValidatorHashInput;
var blockHashFields = [['ChainID', 'chain_id', VarString], ['Height', 'height', Int64BE], ['Time', 'time', Time], ['NumTxs', 'num_txs', Int64BE], ['LastBlockID', 'last_block_id', BlockID], ['TotalTxs', 'total_txs', Int64BE], ['LastCommit', 'last_commit_hash', VarHexBuffer], ['Data', 'data_hash', VarHexBuffer], ['Validators', 'validators_hash', VarHexBuffer], ['Consensus', 'consensus_hash', VarHexBuffer], ['App', 'app_hash', VarHexBuffer], ['Results', 'last_results_hash', VarHexBuffer], ['Evidence', 'evidence_hash', VarHexBuffer]];
var sha256 = hashFunc('sha256');
var tmhash = function tmhash() {
return sha256.apply(undefined, arguments).slice(0, 20);
};
var blockHashFields = [['ChainID', 'chain_id', VarString], ['Height', 'height', VarInt], ['Time', 'time', Time], ['NumTxs', 'num_txs', VarInt], ['TotalTxs', 'total_txs', VarInt], ['LastBlockID', 'last_block_id', BlockID], ['LastCommit', 'last_commit_hash', VarHexBuffer], ['Data', 'data_hash', VarHexBuffer], ['Validators', 'validators_hash', VarHexBuffer], ['NextValidators', 'next_validators_hash', VarHexBuffer], ['App', 'app_hash', VarHexBuffer], ['Consensus', 'consensus_hash', VarHexBuffer], ['Results', 'last_results_hash', VarHexBuffer], ['Evidence', 'evidence_hash', VarHexBuffer], ['Proposer', 'proposer_address', VarHexBuffer]];
// sort fields by hash of name
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
blockHashFields.sort(function (_ref, _ref2) {
var _ref4 = _slicedToArray(_ref, 1),
keyA = _ref4[0];
try {
for (var _iterator = blockHashFields[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var field = _step.value;
var _ref3 = _slicedToArray(_ref2, 1),
keyB = _ref3[0];
field.push(ripemd160(field[0]));
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
blockHashFields.sort(function (a, b) {
return a[3].compare(b[3]);
var bufA = Buffer.from(keyA);
var bufB = Buffer.from(keyB);
return bufA.compare(bufB);
});
function getBlockHash(header) {
var hashes = blockHashFields.map(function (_ref) {
var _ref2 = _slicedToArray(_ref, 4),
key = _ref2[0],
jsonKey = _ref2[1],
type = _ref2[2],
keyHash = _ref2[3];
var hashes = blockHashFields.map(function (_ref5) {
var _ref6 = _slicedToArray(_ref5, 3),
key = _ref6[0],
jsonKey = _ref6[1],
type = _ref6[2];
var hash = kvHash(keyHash, type, header[jsonKey], key);
hash.key = key;
return hash;
return kvHash(type, header[jsonKey], key);
});

@@ -73,13 +58,18 @@ return treeHash(hashes).toString('hex').toUpperCase();

var bytes = ValidatorHashInput.encode(validator);
return ripemd160(bytes);
return tmhash(bytes);
}
function kvHash(keyHash, type, value, key) {
function kvHash(type, value, key) {
var encodedValue = '';
if (value || typeof value === 'number') {
encodedValue = type.encode(value);
// some types have an "empty" value,
// if we got that then use an empty buffer instead
if (type.empty != null && encodedValue === type.empty) {
encodedValue = Buffer.alloc(0);
}
}
var valueHash = ripemd160(encodedValue);
var bytes = Buffer.concat([VarBuffer.encode(keyHash), VarBuffer.encode(valueHash)]);
return ripemd160(bytes);
var valueHash = tmhash(encodedValue);
return tmhash(VarString.encode(key), VarBuffer.encode(valueHash));
}

@@ -95,7 +85,39 @@

var hashInput = TreeHashInput.encode({ left: left, right: right });
return ripemd160(hashInput);
return tmhash(hashInput);
}
function ripemd160(data) {
return createHash('ripemd160').update(data).digest();
function hashFunc(algorithm) {
return function () {
var hash = createHash(algorithm);
for (var _len = arguments.length, chunks = Array(_len), _key = 0; _key < _len; _key++) {
chunks[_key] = arguments[_key];
}
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = chunks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var data = _step.value;
hash.update(data);
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
return hash.digest();
};
}

@@ -107,3 +129,4 @@

getValidatorSetHash: getValidatorSetHash,
ripemd160: ripemd160
sha256: sha256,
tmhash: tmhash
};

@@ -21,2 +21,5 @@ 'use strict';

var _require2 = require('./common.js'),
safeParseInt = _require2.safeParseInt;
var HOUR = 60 * 60 * 1000;

@@ -54,5 +57,6 @@ var FOUR_HOURS = 4 * HOUR;

if (typeof state.header.height !== 'number') {
if (state.header.height == null) {
throw Error('Expected state header to have a height');
}
state.header.height = safeParseInt(state.header.height);

@@ -71,8 +75,5 @@ // we should be able to trust this state since it was either

_this.rpc = RpcClient(peer);
_this.rpc.on('error', function (err) {
return _this.emit('error', err);
});
_this.on('error', function () {
return _this.rpc.close();
});
// TODO: ensure we're using websocket
_this.emitError = _this.emitError.bind(_this);
_this.rpc.on('error', _this.emitError);

@@ -96,3 +97,3 @@ _this.handleError(_this.initialSync)().then(function () {

return func.call.apply(func, [_this2].concat(args)).catch(function (err) {
return _this2.emit('error', err);
return _this2.emitError(err);
});

@@ -102,6 +103,12 @@ };

}, {
key: 'emitError',
value: function emitError(err) {
this.rpc.close();
this.emit('error', err);
}
}, {
key: 'state',
value: function state() {
// TODO: deep clone
return this._state;
return Object.assign({}, this._state);
}

@@ -123,4 +130,6 @@ }, {

var status = await this.rpc.status();
var tip = status.sync_info.latest_block_height;
await this.syncTo(tip);
var tip = safeParseInt(status.sync_info.latest_block_height);
if (tip > this.height()) {
await this.syncTo(tip);
}
this.handleError(this.subscribe)();

@@ -138,8 +147,8 @@ }

var _ref = await this.rpc.commit({ height: nextHeight }),
SignedHeader = _ref.SignedHeader;
_ref$signed_header = _ref.signed_header,
header = _ref$signed_header.header,
commit = _ref$signed_header.commit;
var header = SignedHeader.header,
commit = SignedHeader.commit;
header.height = safeParseInt(header.height);
try {

@@ -159,3 +168,3 @@ // test if this commit is signed by 2/3+ of our old set

} catch (err) {
// real error, not just insufficient voting power
// throw real errors
if (!err.insufficientVotingPower) {

@@ -165,3 +174,3 @@ throw err;

// insufficient verifiable voting power,
// insufficient verifiable voting power error,
// couldn't verify this header

@@ -171,3 +180,4 @@

if (nextHeight === height + 1) {
throw Error('Validator set changed too much to verify transition');
// should not happen unless peer sends us fake transition
throw Error('Could not verify transition');
}

@@ -190,9 +200,19 @@

var syncing = false;
var targetHeight = this.height();
await this.rpc.subscribe({ query: query }, this.handleError(async function (_ref2) {
var header = _ref2.header;
// don't start another recursive sync if we are in the middle of syncing
header.height = safeParseInt(header.height);
targetHeight = header.height;
// don't start another sync loop if we are in the middle of syncing
if (syncing) return;
syncing = true;
await _this3.syncTo(header.height);
// sync one block at a time to target
while (_this3.height() < targetHeight) {
await _this3.syncTo(_this3.height() + 1);
}
// unlock
syncing = false;

@@ -204,10 +224,7 @@ }));

value: async function update(header, commit) {
header.height = safeParseInt(header.height);
var height = header.height;
// make sure we aren't syncing from longer than than the unbonding period
if (!height) {
throw Error('Expected header to have height');
}
// make sure we aren't syncing from longer than than the unbonding period
var prevTime = new Date(this._state.header.time).getTime();

@@ -226,3 +243,4 @@ if (Date.now() - prevTime > this.maxAge) {

var res = await this.rpc.commit({ height: height });
commit = res.SignedHeader.commit;
commit = res.signed_header.commit;
commit.header.height = safeParseInt(commit.header.height);
}

@@ -244,2 +262,7 @@

}
}, {
key: 'close',
value: function close() {
this.rpc.close();
}
}]);

@@ -246,0 +269,0 @@

'use strict';
module.exports = ['subscribe', 'unsubscribe', 'status', 'net_info', 'dial_seeds', 'blockchain', 'genesis', 'block', 'validators', 'dump_consensus_state', 'broadcast_tx_commit', 'broadcast_tx_sync', 'broadcast_tx_async', 'unconfirmed_txs', 'num_unconfirmed_txs', 'commit', 'abci_query', 'abci_info', 'abci_proof', 'unsafe_flush_mempool', 'unsafe_set_config', 'unsafe_start_cpu_profiler', 'unsafe_stop_cpu_profiler', 'unsafe_write_heap_profile'];
module.exports = ['subscribe', 'unsubscribe', 'unsubscribe_all', 'status', 'net_info', 'dial_peers', 'dial_seeds', 'blockchain', 'genesis', 'health', 'block', 'block_results', 'blockchain', 'validators', 'consensus_state', 'dump_consensus_state', 'broadcast_tx_commit', 'broadcast_tx_sync', 'broadcast_tx_async', 'unconfirmed_txs', 'num_unconfirmed_txs', 'commit', 'tx', 'tx_search', 'abci_query', 'abci_info', 'unsafe_flush_mempool', 'unsafe_start_cpu_profiler', 'unsafe_stop_cpu_profiler', 'unsafe_write_heap_profile'];
'use strict';
var struct = require('varstruct');
var _require = require('./hash.js'),
ripemd160 = _require.ripemd160;
tmhash = _require.tmhash;
var _require2 = require('./types.js'),
VarHexBuffer = _require2.VarHexBuffer;
var AddressBytes = struct([{ name: 'type', type: struct.Byte }, { name: 'key', type: VarHexBuffer }]);
var types = {
'ed25519': 1,
'secp256k1': 2
};
function getAddress(pubkey) {
var type = types[pubkey.type];
if (type == null) {
throw Error('Invalid pubkey type');
}
var bytes = AddressBytes.encode({
type: type,
key: pubkey.data
});
return ripemd160(bytes);
var bytes = Buffer.from(pubkey.value, 'base64');
return tmhash(bytes).toString('hex').toUpperCase();
}
module.exports = getAddress;
module.exports = { getAddress: getAddress };

@@ -22,7 +22,20 @@ 'use strict';

function convertArgs(args) {
function convertHttpArgs(args) {
args = args || {};
for (var k in args) {
var v = args[k];
if (Buffer.isBuffer(v)) {
if (typeof v === 'number') {
args[k] = '"' + v + '"';
}
}
return args;
}
function convertWsArgs(args) {
args = args || {};
for (var k in args) {
var v = args[k];
if (typeof v === 'number') {
args[k] = String(v);
} else if (Buffer.isBuffer(v)) {
args[k] = '0x' + v.toString('hex');

@@ -44,3 +57,3 @@ } else if (v instanceof Uint8Array) {

function Client() {
var uriString = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'localhost:46657';
var uriString = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'localhost:26657';

@@ -94,3 +107,4 @@ _classCallCheck(this, Client);

this.ws.on('close', function () {
return _this2.emit('error', Error('websocket disconnected'));
if (_this2.closed) return;
_this2.emit('error', Error('websocket disconnected'));
});

@@ -108,3 +122,3 @@ this.ws.on('data', function (data) {

url: this.uri + method,
params: args
params: convertHttpArgs(args)
}).then(function (_ref) {

@@ -114,3 +128,5 @@ var data = _ref.data;

if (data.error) {
throw Error(JSON.stringify(data.error));
var err = Error(data.error.message);
Object.assign(err, data.error);
throw err;
}

@@ -130,3 +146,3 @@ return data.result;

var id = Math.random().toString(36);
var params = convertArgs(args);
var params = convertWsArgs(args);

@@ -163,2 +179,3 @@ if (method === 'subscribe') {

value: function close() {
this.closed = true;
if (!this.ws) return;

@@ -165,0 +182,0 @@ this.ws.destroy();

'use strict';
var struct = require('varstruct');
var Int64BE = struct.Int64BE;
var Int64LE = struct.Int64LE;
var VarInt = require('./varint.js');
var _require = require('./varint.js'),
VarInt = _require.VarInt,
UVarInt = _require.UVarInt;
var VarString = struct.VarString(VarInt);
var VarBuffer = struct.VarBuffer(VarInt);
var VarString = struct.VarString(UVarInt);
var VarBuffer = struct.VarBuffer(UVarInt);

@@ -23,3 +25,3 @@ var VarHexBuffer = {

var length = value.length / 2;
return length + VarInt.encodingLength(length);
return length + UVarInt.encodingLength(length);
}

@@ -42,13 +44,10 @@ };

var buffer = Buffer.alloc(15);
// TODO: use js-amino
var buffer = Buffer.alloc(14);
buffer[0] = 1 << 3 | 1; // field 1, typ3 1
buffer.writeUInt32BE(seconds, 5);
buffer.writeUInt32LE(seconds, 1);
buffer[9] = 2 << 3 | 5; // field 2, typ3 5
buffer.writeUInt32BE(nanos, 10);
buffer.writeUInt32LE(nanos, 10);
buffer[14] = 4; // terminator
return buffer;

@@ -59,9 +58,10 @@ }

var BlockID = {
empty: Buffer.from('1200', 'hex'),
encode: function encode(value) {
// empty block id
if (!value.hash) {
return Buffer.from('1308000404', 'hex');
return BlockID.empty;
}
var buffer = Buffer.alloc(49);
var buffer = Buffer.alloc(48);

@@ -76,10 +76,9 @@ // TODO: actually do amino encoding stuff

// block parts
buffer[22] = 0x13;
buffer[23] = 0x08;
buffer[24] = 0x02;
buffer[25] = 0x12;
buffer[26] = 0x14;
Buffer.from(value.parts.hash, 'hex').copy(buffer, 27);
buffer[47] = 0x04;
buffer[48] = 0x04;
buffer[22] = 0x12;
buffer[23] = 0x18;
buffer[24] = 0x08;
buffer[25] = 0x02;
buffer[26] = 0x12;
buffer[27] = 0x14;
Buffer.from(value.parts.hash, 'hex').copy(buffer, 28);

@@ -92,3 +91,3 @@ return buffer;

var pubkeyAminoPrefix = Buffer.from('1624DE6220', 'hex');
var pubkeyAminoPrefix = Buffer.from('1624DE6420', 'hex');
var PubKey = {

@@ -129,3 +128,4 @@ decode: function decode(buffer) {

encode: function encode(validator) {
var buffer = Buffer.alloc(70);
var length = ValidatorHashInput.encodingLength(validator);
var buffer = Buffer.alloc(length);

@@ -139,23 +139,21 @@ // address field

// pubkey field
buffer[22] = 0x17;
PubKey.encode(validator.pub_key, buffer, 23);
buffer[22] = 0x12;
buffer[23] = 0x25;
PubKey.encode(validator.pub_key, buffer, 24);
// voting power field
buffer[60] = 0x19;
Int64BE.encode(validator.voting_power, buffer, 61);
buffer[61] = 0x18;
VarInt.encode(validator.voting_power, buffer, 62);
// terminator
buffer[69] = 0x04;
ValidatorHashInput.encode.bytes = 70;
ValidatorHashInput.encode.bytes = length;
return buffer;
},
encodingLength: function encodingLength(validator) {
return 70;
return 62 + VarInt.encodingLength(validator.voting_power);
}
};
struct([{ name: 'address', type: VarHexBuffer }, { name: 'pub_key', type: PubKey }, { name: 'voting_power', type: Int64BE }]);
module.exports = {
VarInt: VarInt,
UVarInt: UVarInt,
VarString: VarString,

@@ -169,3 +167,3 @@ VarBuffer: VarBuffer,

PubKey: PubKey,
Int64BE: Int64BE
Int64LE: Int64LE
};
'use strict';
function decode(buffer) {
var start = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : buffer.length;
var _require = require('./common.js'),
safeParseInt = _require.safeParseInt;
throw Error('not implemented');
}
function VarInt(signed) {
function decode(buffer) {
var start = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : buffer.length;
function encode(n) {
var buffer = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : Buffer.alloc(encodingLength(n));
var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
throw Error('not implemented');
}
n *= 2;
var i = 0;
while (n >= 0x80) {
buffer[offset + i] = n & 0xff | 0x80;
n >>= 7;
i++;
function encode(n) {
var buffer = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : Buffer.alloc(encodingLength(n));
var offset = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
n = safeParseInt(n);
// amino signed varint is multiplied by 2 ¯\_(ツ)_/¯
if (signed) n *= 2;
var i = 0;
while (n >= 0x80) {
buffer[offset + i] = n & 0xff | 0x80;
n >>= 7;
i++;
}
buffer[offset + i] = n & 0xff;
encode.bytes = i + 1;
return buffer;
}
buffer[offset + i] = n;
encode.bytes = i + 1;
return buffer;
}
function encodingLength(n) {
if (n < 0 || n > Number.MAX_SAFE_INTEGER) {
throw Error('varint value is out of bounds');
function encodingLength(n) {
if (signed) n *= 2;
if (n < 0 || n > Number.MAX_SAFE_INTEGER) {
throw Error('varint value is out of bounds');
}
var bits = Math.log2(n + 1);
return Math.ceil(bits / 7) || 1;
}
var bits = Math.log2(n + 1);
return Math.ceil(bits / 7) || 1;
return { encode: encode, decode: decode, encodingLength: encodingLength };
}
module.exports = { encode: encode, decode: decode, encodingLength: encodingLength };
module.exports = VarInt(true);
module.exports.UVarInt = VarInt(false);
module.exports.VarInt = module.exports;
'use strict';
var stringify = require('json-stable-stringify');
var ed25519 = require('supercop.js');
// TODO: try to load native ed25519 implementation, fall back to supercop.js

@@ -9,13 +11,12 @@ var _require = require('./hash.js'),

var _require2 = require('./types.js'),
PubKey = _require2.PubKey;
var _require2 = require('./pubkey.js'),
getAddress = _require2.getAddress;
var _require3 = require('./hash.js'),
ripemd160 = _require3.ripemd160;
var _require3 = require('./common.js'),
safeParseInt = _require3.safeParseInt;
var ed25519 = require('supercop.js');
// TODO: try to load native ed25519 implementation, fall back to supercop.js
// gets the serialized representation of a vote, which is used
// in the commit signatures
function getVoteSignBytes(chainId, vote) {

@@ -28,6 +29,3 @@ var height = vote.height,

// ensure timestamp only has millisecond precision
timestamp = new Date(timestamp).toISOString();
return Buffer.from(stringify({

@@ -37,6 +35,6 @@ '@chain_id': chainId,

block_id: blockId,
height: height,
round: round,
height: String(height),
round: String(round),
timestamp: timestamp,
type: type
type: safeParseInt(type)
}));

@@ -82,2 +80,5 @@ }

precommit.height = safeParseInt(precommit.height);
precommit.round = safeParseInt(precommit.round);
// all fields of block ID must match commit

@@ -193,3 +194,3 @@ if (precommit.block_id.hash !== commit.block_id.hash) {

var signature = Buffer.from(precommit.signature.value, 'base64');
var signature = Buffer.from(precommit.signature, 'base64');
var signBytes = getVoteSignBytes(header.chain_id, precommit);

@@ -257,2 +258,3 @@ var pubKey = Buffer.from(_validator.pub_key.value, 'base64');

validator.voting_power = safeParseInt(validator.voting_power);
verifyPositiveInt(validator.voting_power);

@@ -287,29 +289,40 @@ if (validator.voting_power === 0) {

function verify(oldState, newState) {
if (newState.header.chain_id !== oldState.header.chain_id) {
var oldHeader = oldState.header;
var oldValidators = oldState.validators;
var newHeader = newState.header;
var newValidators = newState.validators;
if (newHeader.chain_id !== oldHeader.chain_id) {
throw Error('Chain IDs do not match');
}
if (newState.header.height <= oldState.header.height) {
if (newHeader.height <= oldHeader.height) {
throw Error('New state height must be higher than old state height');
}
var validatorSetChanged = newState.header.validators_hash !== oldState.header.validators_hash;
var validatorSetChanged = newHeader.validators_hash !== oldHeader.validators_hash;
if (validatorSetChanged && newValidators == null) {
throw Error('Must specify new validator set');
}
// make sure new header has a valid commit
var validators = validatorSetChanged ? newState.validators : oldState.validators;
verifyCommit(newState.header, newState.commit, validators);
var validators = validatorSetChanged ? newValidators : oldValidators;
verifyCommit(newHeader, newState.commit, validators);
if (validatorSetChanged) {
// make sure new validator set is valid
// make sure new validator set has correct hash
verifyValidatorSet(newState.validators, newState.header.validators_hash);
verifyValidatorSet(newValidators, newHeader.validators_hash);
// make sure new commit is signed by 2/3+ of old validator set
verifyCommitSigs(newState.header, newState.commit, oldState.validators);
// if previous state's `next_validators_hash` matches the new validator
// set hash, then we already know it is valid
if (oldHeader.next_validators_hash !== newHeader.validators_hash) {
// otherwise, make sure new commit is signed by 2/3+ of old validator set.
// sometimes we will take this path to skip ahead, we don't need any
// headers between `oldState` and `newState` if this check passes
verifyCommitSigs(newHeader, newState.commit, oldValidators);
}
}
}
function getAddress(pubkey) {
var bytes = PubKey.encode(pubkey);
return ripemd160(bytes).toString('hex').toUpperCase();
}
module.exports = verify;

@@ -320,3 +333,4 @@ Object.assign(module.exports, {

verifyValidatorSet: verifyValidatorSet,
verify: verify
verify: verify,
getVoteSignBytes: getVoteSignBytes
});
{
"name": "tendermint",
"version": "3.1.11",
"version": "3.2.0",
"description": "A light client which talks to your Tendermint node over RPC",

@@ -32,4 +32,8 @@ "main": "index.js",

"depcheck": "^0.6.9",
"get-port": "^3.2.0",
"nyc": "^11.8.0",
"standard": "^11.0.1"
"standard": "^11.0.1",
"tempy": "^0.2.1",
"tendermint-node": "^3.3.0",
"ws": "^5.2.1"
},

@@ -36,0 +40,0 @@ "babel": {

@@ -20,8 +20,5 @@ # tendermint

// `state` contains information about an older part of the chain which is known
// to be valid. This cannot be older than the unbonding period, otherwise we
// cannot safely sync using proof-of-stake. This should either be hardcoded by
// the app developer as a trusted starting point, manually accepted as
// trustworthy by the user, or loaded from the last time the user ran the
// light client.
// `state` contains a part of the chain we know to be valid. If it's
// too old, we cannot safely verify the chain and need to get a newer
// state out-of-band.
let state = {

@@ -43,5 +40,6 @@ // a header, in the same format as returned by RPC

let opts = {
// the maximum number of blocks we can sync into the future
// from our previous state, e.g. the unbonding period
maxAge: 1728000 // defaults to 30 days of 1 second blocks
// the maximum age of a state to be safely accepted,
// e.g. the unbonding period
// (in seconds)
maxAge: 1728000 // defaults to 30 days
}

@@ -48,0 +46,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc