gridplus-sdk
Advanced tools
Comparing version 0.7.20 to 0.7.21
{ | ||
"name": "gridplus-sdk", | ||
"version": "0.7.20", | ||
"version": "0.7.21", | ||
"description": "SDK to interact with GridPlus Lattice1 device", | ||
@@ -26,6 +26,6 @@ "scripts": { | ||
"aes-js": "^3.1.1", | ||
"bech32": "^2.0.0", | ||
"bignumber.js": "^9.0.1", | ||
"bitwise": "^2.0.4", | ||
"borc": "^2.1.2", | ||
"bs58": "^4.0.1", | ||
"bs58check": "^2.1.2", | ||
@@ -32,0 +32,0 @@ "buffer": "^5.6.0", |
// Util for Bitcoin-specific functionality | ||
const bs58 = require('bs58'); | ||
const bs58check = require('bs58check') | ||
const bech32 = require('bech32').bech32; | ||
const bs58check = require('bs58check'); | ||
const Buffer = require('buffer/').Buffer; | ||
@@ -12,3 +12,3 @@ const constants = require('./constants') | ||
const OP = { | ||
'0': 0x00, | ||
ZERO: 0x00, | ||
HASH160: 0xa9, | ||
@@ -26,2 +26,4 @@ DUP: 0x76, | ||
'SEGWIT_TESTNET': 0xC4, | ||
'SEGWIT_NATIVE_V0': 0xD0, | ||
'SEGWIT_NATIVE_V0_TESTNET': 0xF0, | ||
} | ||
@@ -31,6 +33,8 @@ exports.addressVersion = addressVersion; | ||
// Bitcoin script types -- defined by the Lattice protocol spec | ||
// NOTE: Only certain script types are supported for the spender, but all are supported for recipient | ||
const scriptTypes = { | ||
P2PKH: 0x01, | ||
P2PKH: 0x01, // Supported spender type | ||
P2SH: 0x02, | ||
P2SH_P2WPKH: 0x03, | ||
P2SH_P2WPKH: 0x03, // Supported spender type | ||
P2WPKH_V0: 0x04, | ||
} | ||
@@ -60,3 +64,6 @@ exports.scriptTypes = scriptTypes | ||
try { | ||
const { prevOuts, recipient, value, changePath=DEFAULT_CHANGE, fee, isSegwit, changeVersion='SEGWIT' } = data; | ||
const { | ||
prevOuts, recipient, value, changePath=DEFAULT_CHANGE, | ||
fee, isSegwit=null, changeVersion='SEGWIT', spenderScriptType=null | ||
} = data; | ||
if (changePath.length !== 5) throw new Error('Please provide a full change path.') | ||
@@ -79,7 +86,6 @@ // Serialize the request | ||
payload.writeUInt32LE(fee, off); off += 4; | ||
const recipientVersionByte = bs58.decode(recipient)[0]; | ||
const recipientPubkeyhash = bs58check.decode(recipient).slice(1); | ||
const dec = decodeAddress(recipient); | ||
// Parameterize the recipient output | ||
payload.writeUInt8(recipientVersionByte, off); off++; | ||
recipientPubkeyhash.copy(payload, off); off += recipientPubkeyhash.length; | ||
payload.writeUInt8(dec.versionByte, off); off++; | ||
dec.pkh.copy(payload, off); off += dec.pkh.length; | ||
writeUInt64LE(value, payload, off); off += 8; | ||
@@ -90,5 +96,14 @@ | ||
let inputSum = 0; | ||
const scriptType = isSegwit === true ? | ||
scriptTypes.P2SH_P2WPKH : // Only support p2sh(p2wpkh) for segwit spends for now | ||
scriptTypes.P2PKH; // No support for multisig p2sh in v1 (p2sh == segwit here) | ||
let spenderScriptTypeToUse; | ||
if (spenderScriptType !== null && scriptTypes[spenderScriptType]) { | ||
// For newer versions we use the input scriptType | ||
spenderScriptTypeToUse = scriptTypes[spenderScriptType]; | ||
} else if (isSegwit !== null) { | ||
// For legacy callers we use the boolean `isSegwit` to denote if we are spending | ||
// *wrapped* segwit inputs | ||
spenderScriptTypeToUse = isSegwit === true ? scriptTypes.P2SH_P2WPKH : scriptTypes.P2PKH; | ||
} else { | ||
throw new Error('Unsupported spender script type or none provided.') | ||
} | ||
prevOuts.forEach((input) => { | ||
@@ -105,3 +120,3 @@ if (!input.signerPath || input.signerPath.length !== 5) { | ||
inputSum += input.value; | ||
payload.writeUInt8(scriptType, off); off++; | ||
payload.writeUInt8(spenderScriptTypeToUse, off); off++; | ||
if (!Buffer.isBuffer(input.txHash)) input.txHash = Buffer.from(input.txHash, 'hex'); | ||
@@ -113,2 +128,3 @@ input.txHash.copy(payload, off); off += input.txHash.length; | ||
payload, | ||
spenderScriptType: spenderScriptTypeToUse, | ||
schema: constants.signingSchema.BTC_TRANSFER, | ||
@@ -135,3 +151,3 @@ origData: data, // We will need the original data for serializing the tx | ||
exports.serializeTx = function(data) { | ||
const { inputs, outputs, isSegwitSpend, lockTime=0, crypto } = data; | ||
const { inputs, outputs, spenderScriptType, lockTime=0, crypto } = data; | ||
let payload = Buffer.alloc(4); | ||
@@ -142,3 +158,3 @@ let off = 0; | ||
payload.writeUInt32LE(version, off); off += 4; | ||
if (isSegwitSpend === true) { | ||
if (spenderScriptType === scriptTypes.P2SH_P2WPKH) { | ||
payload = concat(payload, Buffer.from('00', 'hex')); // marker = 0x00 | ||
@@ -154,3 +170,3 @@ payload = concat(payload, Buffer.from('01', 'hex')); // flag = 0x01 | ||
payload = concat(payload, index); off += index.length; | ||
if (isSegwitSpend === true) { | ||
if (spenderScriptType === scriptTypes.P2SH_P2WPKH) { | ||
// Build a vector (varSlice of varSlice) containing the redeemScript | ||
@@ -185,3 +201,3 @@ const redeemScript = buildRedeemScript(input.pubkey, crypto); | ||
// Add witness data if needed | ||
if (isSegwitSpend === true) { | ||
if (spenderScriptType === scriptTypes.P2SH_P2WPKH) { | ||
const sigs = []; | ||
@@ -212,3 +228,3 @@ const pubkeys = []; | ||
const pubkeyhash = crypto.createHash('rmd160').update(shaHash).digest(); | ||
redeemScript.writeUInt8(OP['0']); | ||
redeemScript.writeUInt8(OP.ZERO); | ||
redeemScript.writeUInt8(pubkeyhash.length, 1); | ||
@@ -248,10 +264,15 @@ pubkeyhash.copy(redeemScript, 2); | ||
function buildLockingScript(address) { | ||
const versionByte = bs58.decode(address)[0]; | ||
const pubkeyhash = bs58check.decode(address).slice(1); | ||
if (versionByte === addressVersion.SEGWIT || versionByte === addressVersion.SEGWIT_TESTNET) { | ||
// Also works for p2sh | ||
return buildP2shLockingScript(pubkeyhash); | ||
} else { | ||
// We assume testnet uses p2pkh | ||
return buildP2pkhLockingScript(pubkeyhash); | ||
const dec = decodeAddress(address); | ||
switch (dec.versionByte) { | ||
case addressVersion.SEGWIT_NATIVE_V0: | ||
case addressVersion.SEGWIT_NATIVE_V0_TESTNET: | ||
return buildP2wpkhLockingScript(dec.pkh); | ||
case addressVersion.SEGWIT: | ||
case addressVersion.SEGWIT_TESTNET: | ||
return buildP2shLockingScript(dec.pkh); | ||
case addressVersion.LEGACY: | ||
case addressVersion.TESTNET: | ||
return buildP2pkhLockingScript(dec.pkh); | ||
default: | ||
throw new Error(`Unknown version byte: ${dec.versionByte}. Cannot build BTC transaction.`); | ||
} | ||
@@ -282,2 +303,10 @@ } | ||
function buildP2wpkhLockingScript(pubkeyhash) { | ||
const out = Buffer.alloc(2 + pubkeyhash.length); | ||
out.writeUInt8(OP.ZERO, 0); | ||
out.writeUInt8(pubkeyhash.length, 1); | ||
pubkeyhash.copy(out, 2); | ||
return out; | ||
} | ||
// Static Utils | ||
@@ -331,2 +360,26 @@ //---------------------- | ||
return preBuf; | ||
} | ||
function decodeAddress(address) { | ||
let versionByte, pkh; | ||
try { | ||
versionByte = bs58check.decode(address)[0]; | ||
pkh = bs58check.decode(address).slice(1); | ||
} catch (err) { | ||
try { | ||
const bech32Dec = bech32.decode(address); | ||
if (bech32Dec.prefix === 'bc') | ||
versionByte = 0xD0; | ||
else if (bech32Dec.prefix === 'tb') | ||
versionByte = 0xF0; | ||
else | ||
throw new Error('Unsupported prefix: must be bc or tb.'); | ||
if (bech32Dec.words[0] !== 0) | ||
throw new Error(`Unsupported segwit version: must be 0, got ${bech32Dec.words[0]}`); | ||
pkh = Buffer.from(bech32.fromWords(bech32Dec.words.slice(1))); | ||
} catch (err) { | ||
throw new Error(`Unable to decode address: ${address}: ${err.message}`) | ||
} | ||
} | ||
return {versionByte, pkh}; | ||
} |
@@ -509,3 +509,2 @@ const bitwise = require('bitwise'); | ||
.catch((err) => { | ||
console.log('request err', err) | ||
const isTimeout = err.code === 'ECONNABORTED' && err.errno === 'ETIME'; | ||
@@ -694,3 +693,3 @@ if (isTimeout) | ||
outputs: [], | ||
isSegwitSpend: req.origData.isSegwit, | ||
spenderScriptType: req.spenderScriptType, | ||
network: req.origData.network, | ||
@@ -697,0 +696,0 @@ crypto: this.crypto, |
@@ -189,68 +189,85 @@ // Consistent with Lattice's IV | ||
'uint16': 4, | ||
'uint32': 5, | ||
'uint64': 6, | ||
'uint128': 7, | ||
'uint256': 8, | ||
// 'int8': 9, // We do not support signed integer types in v1 because we can't display them | ||
// 'int16': 10, | ||
// 'int24': 11, | ||
// 'int64': 12, | ||
// 'int128': 13, | ||
// 'int256': 14, | ||
'uint': 15, | ||
// 'int': 16, | ||
'bytes1': 17, | ||
'bytes2': 18, | ||
'bytes3': 19, | ||
'bytes4': 20, | ||
'bytes5': 21, | ||
'bytes6': 22, | ||
'bytes7': 23, | ||
'bytes8': 24, | ||
'bytes9': 25, | ||
'bytes10': 26, | ||
'bytes11': 27, | ||
'bytes12': 28, | ||
'bytes13': 29, | ||
'bytes14': 30, | ||
'bytes15': 31, | ||
'bytes16': 32, | ||
'bytes17': 33, | ||
'bytes18': 34, | ||
'bytes19': 35, | ||
'bytes20': 36, | ||
'bytes21': 37, | ||
'bytes22': 38, | ||
'bytes23': 39, | ||
'bytes24': 40, | ||
'bytes25': 41, | ||
'bytes26': 42, | ||
'bytes27': 43, | ||
'bytes28': 44, | ||
'bytes29': 45, | ||
'bytes30': 46, | ||
'bytes31': 47, | ||
'bytes32': 48, | ||
'bytes': 49, | ||
'string': 50, | ||
'tuple1': 51, | ||
'tuple2': 52, | ||
'tuple3': 53, | ||
'tuple4': 54, | ||
'tuple5': 55, | ||
'tuple6': 56, | ||
'tuple7': 57, | ||
'tuple8': 58, | ||
'tuple9': 59, | ||
'tuple10': 60, | ||
'tuple11': 61, | ||
'tuple12': 62, | ||
'tuple13': 63, | ||
'tuple14': 64, | ||
'tuple15': 65, | ||
'tuple16': 66, | ||
'tuple17': 67, // Firmware currently cannot support tuples larger than this | ||
// 'tuple18': 68, | ||
// 'tuple19': 69, | ||
// 'tuple20': 70, | ||
'uint24': 5, | ||
'uint32': 6, | ||
'uint40': 7, | ||
'uint48': 8, | ||
'uint56': 9, | ||
'uint64': 10, | ||
'uint72': 11, | ||
'uint80': 12, | ||
'uint88': 13, | ||
'uint96': 14, | ||
'uint104': 15, | ||
'uint112': 16, | ||
'uint120': 17, | ||
'uint128': 18, | ||
'uint136': 19, | ||
'uint144': 20, | ||
'uint152': 21, | ||
'uint160': 22, | ||
'uint168': 23, | ||
'uint176': 24, | ||
'uint184': 25, | ||
'uint192': 26, | ||
'uint200': 27, | ||
'uint208': 28, | ||
'uint216': 29, | ||
'uint224': 30, | ||
'uint232': 31, | ||
'uint240': 32, | ||
'uint248': 33, | ||
'uint256': 34, | ||
// Lattice firmware does not currently support signed integer types | ||
'uint': 67, | ||
'bytes1': 69, | ||
'bytes2': 70, | ||
'bytes3': 71, | ||
'bytes4': 72, | ||
'bytes5': 73, | ||
'bytes6': 74, | ||
'bytes7': 75, | ||
'bytes8': 76, | ||
'bytes9': 77, | ||
'bytes10': 78, | ||
'bytes11': 79, | ||
'bytes12': 80, | ||
'bytes13': 81, | ||
'bytes14': 82, | ||
'bytes15': 83, | ||
'bytes16': 84, | ||
'bytes17': 85, | ||
'bytes18': 86, | ||
'bytes19': 87, | ||
'bytes20': 88, | ||
'bytes21': 89, | ||
'bytes22': 90, | ||
'bytes23': 91, | ||
'bytes24': 92, | ||
'bytes25': 93, | ||
'bytes26': 94, | ||
'bytes27': 95, | ||
'bytes28': 96, | ||
'bytes29': 97, | ||
'bytes30': 98, | ||
'bytes31': 99, | ||
'bytes32': 100, | ||
'bytes': 101, | ||
'string': 102, | ||
'tuple1': 103, | ||
'tuple2': 104, | ||
'tuple3': 105, | ||
'tuple4': 106, | ||
'tuple5': 107, | ||
'tuple6': 108, | ||
'tuple7': 109, | ||
'tuple8': 110, | ||
'tuple9': 111, | ||
'tuple10': 112, | ||
'tuple11': 113, | ||
'tuple12': 114, | ||
'tuple13': 115, | ||
'tuple14': 116, | ||
'tuple15': 117, | ||
'tuple16': 118, | ||
'tuple17': 119, // Firmware currently cannot support tuples larger than this | ||
}; | ||
@@ -272,2 +289,7 @@ | ||
const legacy = (v.length === 0); | ||
// V0.10.8 allows a user to sign a prehashed transaction if the payload | ||
// is too big | ||
if (!legacy && gte(v, [0, 10, 8])) { | ||
c.prehashAllowed = true; | ||
} | ||
// V0.10.5 added the ability to use flexible address path sizes, which | ||
@@ -274,0 +296,0 @@ // changes the `getAddress` API. It also added support for EIP712 |
@@ -64,3 +64,3 @@ // Utils for Ethereum transactions. This is effecitvely a shim of ethereumjs-util, which | ||
const { signerPath, eip155=null, fwConstants } = data; | ||
const { extraDataFrameSz, extraDataMaxFrames } = fwConstants; | ||
const { extraDataFrameSz, extraDataMaxFrames, prehashAllowed } = fwConstants; | ||
const EXTRA_DATA_ALLOWED = extraDataFrameSz > 0 && extraDataMaxFrames > 0; | ||
@@ -173,2 +173,3 @@ const MAX_BASE_DATA_SZ = fwConstants.ethMaxDataSz; | ||
const extraDataPayloads = []; | ||
let prehash = null; | ||
if (dataBytes && dataBytes.length > MAX_BASE_DATA_SZ) { | ||
@@ -179,6 +180,4 @@ // Determine sizes and run through sanity checks | ||
const maxSzAllowed = MAX_BASE_DATA_SZ + (extraDataMaxFrames * extraDataFrameSz); | ||
if ((!EXTRA_DATA_ALLOWED) || (EXTRA_DATA_ALLOWED && totalSz > maxSzAllowed)) | ||
throw new Error(`Data field too large (got ${dataBytes.length}; must be <=${maxSzAllowed-chainIdExtraSz} bytes)`); | ||
// Copy over the data. Account for larger chain ID sizes if applicable. | ||
// Copy the data into a tmp buffer. Account for larger chain ID sizes if applicable. | ||
const dataToCopy = Buffer.alloc(dataBytes.length + chainIdExtraSz) | ||
@@ -192,9 +191,17 @@ if (chainIdExtraSz > 0) { | ||
} | ||
// Split overflow data into extraData frames | ||
const frames = splitFrames(dataToCopy.slice(MAX_BASE_DATA_SZ), extraDataFrameSz); | ||
frames.forEach((frame) => { | ||
const szLE = Buffer.alloc(4); | ||
szLE.writeUInt32LE(frame.length); | ||
extraDataPayloads.push(Buffer.concat([szLE, frame])); | ||
}) | ||
if (prehashAllowed && totalSz > maxSzAllowed) { | ||
// If this payload is too large to send, but the Lattice allows a prehashed message, do that | ||
prehash = Buffer.from(keccak256(rlp.encode(rawTx)), 'hex') | ||
} else { | ||
if ((!EXTRA_DATA_ALLOWED) || (EXTRA_DATA_ALLOWED && totalSz > maxSzAllowed)) | ||
throw new Error(`Data field too large (got ${dataBytes.length}; must be <=${maxSzAllowed-chainIdExtraSz} bytes)`); | ||
// Split overflow data into extraData frames | ||
const frames = splitFrames(dataToCopy.slice(MAX_BASE_DATA_SZ), extraDataFrameSz); | ||
frames.forEach((frame) => { | ||
const szLE = Buffer.alloc(4); | ||
szLE.writeUInt32LE(frame.length); | ||
extraDataPayloads.push(Buffer.concat([szLE, frame])); | ||
}) | ||
} | ||
} | ||
@@ -208,4 +215,9 @@ // Write the data size (does *NOT* include the chainId buffer, if that exists) | ||
} | ||
// Copy the first slice of the data itself | ||
dataBytes.slice(0, MAX_BASE_DATA_SZ).copy(txReqPayload, off); off += MAX_BASE_DATA_SZ; | ||
// Copy the first slice of the data itself. If this payload has been pre-hashed, include it | ||
// in the `data` field. This will result in a different Lattice screen being drawn. | ||
if (prehash) { | ||
prehash.copy(txReqPayload, off); off += MAX_BASE_DATA_SZ; | ||
} else { | ||
dataBytes.slice(0, MAX_BASE_DATA_SZ).copy(txReqPayload, off); off += MAX_BASE_DATA_SZ; | ||
} | ||
return { | ||
@@ -212,0 +224,0 @@ rawTx, |
@@ -6,2 +6,3 @@ const Buffer = require('buffer/').Buffer | ||
const HEADER_SZ = 5 + NAME_MAX_SZ; // 4 byte sig + name + 1 byte param count | ||
const CATEGORY_SZ = 32; | ||
const PARAM_SZ = 26; // 20 byte name + 6 byte def | ||
@@ -18,3 +19,3 @@ const MAX_PARAMS = 18; | ||
throw new Error(`You may only add ${MAX_ABI_DEFS} ABI definitions per request.`); | ||
const b = Buffer.alloc(1 + (MAX_ABI_DEFS * (HEADER_SZ + (PARAM_SZ * MAX_PARAMS)))); | ||
const b = Buffer.alloc(1 + (MAX_ABI_DEFS * (HEADER_SZ + CATEGORY_SZ + (PARAM_SZ * MAX_PARAMS)))); | ||
let off = 0; | ||
@@ -25,3 +26,3 @@ b.writeUInt8(defs.length, off); off++; | ||
throw new Error('name, sig, and params must be present for every ABI definition.') | ||
// Header data | ||
// -- Header data -- | ||
const sig = Buffer.from(def.sig, 'hex'); | ||
@@ -33,3 +34,3 @@ if (sig.length !== 4) | ||
if (name.length > NAME_MAX_SZ - 1) // The -1 accounts for the null terminator | ||
throw new Error(`Only function names shorter than ${NAME_MAX_SZ} characters are supported.`); | ||
throw new Error(`Only function names shorter than ${NAME_MAX_SZ-1} characters are supported.`); | ||
Buffer.from(def.name).slice(0, NAME_MAX_SZ).copy(b, off); off += NAME_MAX_SZ; | ||
@@ -39,6 +40,13 @@ // Number of parameters | ||
b.writeUInt8(numParams, off); off++; | ||
// Don't overflow the buffer | ||
// -- (optional) Category name -- | ||
if (def.category && typeof def.category === 'string') { | ||
const category = Buffer.from(def.category); | ||
if (category.length > CATEGORY_SZ - 1) // -1 accounts for null terminator | ||
throw new Error(`Category name must be shorter than ${CATEGORY_SZ - 1}. Got ${category.length}`); | ||
category.copy(b, off); | ||
} | ||
off += CATEGORY_SZ; | ||
// -- Param data -- | ||
if (numParams > MAX_PARAMS) | ||
throw new Error('Currently only ABI defintions with <=10 parameters are supported.'); | ||
// Copy the params if needed | ||
if (numParams > 0) { | ||
@@ -45,0 +53,0 @@ // First copy param names (first 20 bytes) |
105121
2400
+ Addedbech32@^2.0.0
+ Addedbech32@2.0.0(transitive)
- Removedbs58@^4.0.1