Comparing version 3.0.2 to 3.0.3
@@ -11,1 +11,7 @@ # 3.0.0 | ||
- Removed explicit hex methods (use `toString('hex')` on the Buffer) (#104) | ||
# 2.3.1 | ||
__breaking changes__ | ||
9-letter mnemonics can no longer be geerated and calling `validateMnemonic` will always return false. This was [fixed in the BIP as a patch](https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki#generating-the-mnemonic), so we had to follow suite. |
{ | ||
"name": "bip39", | ||
"version": "3.0.2", | ||
"version": "3.0.3", | ||
"description": "Bitcoin BIP39: Mnemonic code for generating deterministic keys", | ||
@@ -47,10 +47,10 @@ "main": "src/index.js", | ||
"@types/randombytes": "2.0.0", | ||
"node-fetch": "^1.6.3", | ||
"nyc": "^13.1.0", | ||
"node-fetch": "^2.6.1", | ||
"nyc": "^15.0.0", | ||
"prettier": "1.16.4", | ||
"proxyquire": "^1.7.10", | ||
"tape": "^4.6.2", | ||
"tslint": "5.14.0", | ||
"tape": "^4.13.2", | ||
"tslint": "^6.1.0", | ||
"typescript": "3.3.4000" | ||
} | ||
} |
@@ -33,2 +33,3 @@ # BIP39 | ||
--exclude=./wordlists/korean.json \ | ||
--exclude=./wordlists/czech.json \ | ||
--exclude=./wordlists/chinese_traditional.json \ | ||
@@ -39,9 +40,9 @@ > bip39.browser.js | ||
This will create a bundle that only contains the chinese_simplified wordlist, and it will be the default wordlist for all calls without explicit wordlists. | ||
You can also do this in Webpack using the `IgnorePlugin`. Here is an example of excluding all non-English wordlists | ||
```javascript | ||
... | ||
plugins: [ | ||
new webpack.IgnorePlugin(/^\.\/(?!english)/, /bip39\/src\/wordlists$/), | ||
new webpack.IgnorePlugin(/^\.\/wordlists\/(?!english)/, /bip39\/src$/), | ||
], | ||
@@ -48,0 +49,0 @@ ... |
@@ -11,2 +11,7 @@ "use strict"; | ||
try { | ||
exports._default = _default = require('./wordlists/czech.json'); | ||
wordlists.czech = _default; | ||
} | ||
catch (err) { } | ||
try { | ||
exports._default = _default = require('./wordlists/chinese_simplified.json'); | ||
@@ -13,0 +18,0 @@ wordlists.chinese_simplified = _default; |
100
src/index.js
@@ -13,5 +13,22 @@ "use strict"; | ||
'Please explicitly pass a 2048 word array explicitly.'; | ||
function pbkdf2Promise(password, saltMixin, iterations, keylen, digest) { | ||
return Promise.resolve().then(() => new Promise((resolve, reject) => { | ||
const callback = (err, derivedKey) => { | ||
if (err) { | ||
return reject(err); | ||
} | ||
else { | ||
return resolve(derivedKey); | ||
} | ||
}; | ||
pbkdf2_1.pbkdf2(password, saltMixin, iterations, keylen, digest, callback); | ||
})); | ||
} | ||
function normalize(str) { | ||
return (str || '').normalize('NFKD'); | ||
} | ||
function lpad(str, padString, length) { | ||
while (str.length < length) | ||
while (str.length < length) { | ||
str = padString + str; | ||
} | ||
return str; | ||
@@ -23,3 +40,3 @@ } | ||
function bytesToBinary(bytes) { | ||
return bytes.map(x => lpad(x.toString(2), '0', 8)).join(''); | ||
return bytes.map((x) => lpad(x.toString(2), '0', 8)).join(''); | ||
} | ||
@@ -32,3 +49,3 @@ function deriveChecksumBits(entropyBuffer) { | ||
.digest(); | ||
return bytesToBinary([...hash]).slice(0, CS); | ||
return bytesToBinary(Array.from(hash)).slice(0, CS); | ||
} | ||
@@ -39,4 +56,4 @@ function salt(password) { | ||
function mnemonicToSeedSync(mnemonic, password) { | ||
const mnemonicBuffer = Buffer.from((mnemonic || '').normalize('NFKD'), 'utf8'); | ||
const saltBuffer = Buffer.from(salt((password || '').normalize('NFKD')), 'utf8'); | ||
const mnemonicBuffer = Buffer.from(normalize(mnemonic), 'utf8'); | ||
const saltBuffer = Buffer.from(salt(normalize(password)), 'utf8'); | ||
return pbkdf2_1.pbkdf2Sync(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512'); | ||
@@ -46,16 +63,6 @@ } | ||
function mnemonicToSeed(mnemonic, password) { | ||
return new Promise((resolve, reject) => { | ||
try { | ||
const mnemonicBuffer = Buffer.from((mnemonic || '').normalize('NFKD'), 'utf8'); | ||
const saltBuffer = Buffer.from(salt((password || '').normalize('NFKD')), 'utf8'); | ||
pbkdf2_1.pbkdf2(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512', (err, data) => { | ||
if (err) | ||
return reject(err); | ||
else | ||
return resolve(data); | ||
}); | ||
} | ||
catch (error) { | ||
return reject(error); | ||
} | ||
return Promise.resolve().then(() => { | ||
const mnemonicBuffer = Buffer.from(normalize(mnemonic), 'utf8'); | ||
const saltBuffer = Buffer.from(salt(normalize(password)), 'utf8'); | ||
return pbkdf2Promise(mnemonicBuffer, saltBuffer, 2048, 64, 'sha512'); | ||
}); | ||
@@ -69,11 +76,13 @@ } | ||
} | ||
const words = (mnemonic || '').normalize('NFKD').split(' '); | ||
if (words.length % 3 !== 0) | ||
const words = normalize(mnemonic).split(' '); | ||
if (words.length % 3 !== 0) { | ||
throw new Error(INVALID_MNEMONIC); | ||
} | ||
// convert word indices to 11 bit binary strings | ||
const bits = words | ||
.map(word => { | ||
.map((word) => { | ||
const index = wordlist.indexOf(word); | ||
if (index === -1) | ||
if (index === -1) { | ||
throw new Error(INVALID_MNEMONIC); | ||
} | ||
return lpad(index.toString(2), '0', 11); | ||
@@ -88,12 +97,16 @@ }) | ||
const entropyBytes = entropyBits.match(/(.{1,8})/g).map(binaryToByte); | ||
if (entropyBytes.length < 16) | ||
if (entropyBytes.length < 16) { | ||
throw new Error(INVALID_ENTROPY); | ||
if (entropyBytes.length > 32) | ||
} | ||
if (entropyBytes.length > 32) { | ||
throw new Error(INVALID_ENTROPY); | ||
if (entropyBytes.length % 4 !== 0) | ||
} | ||
if (entropyBytes.length % 4 !== 0) { | ||
throw new Error(INVALID_ENTROPY); | ||
} | ||
const entropy = Buffer.from(entropyBytes); | ||
const newChecksum = deriveChecksumBits(entropy); | ||
if (newChecksum !== checksumBits) | ||
if (newChecksum !== checksumBits) { | ||
throw new Error(INVALID_CHECKSUM); | ||
} | ||
return entropy.toString('hex'); | ||
@@ -103,4 +116,5 @@ } | ||
function entropyToMnemonic(entropy, wordlist) { | ||
if (!Buffer.isBuffer(entropy)) | ||
if (!Buffer.isBuffer(entropy)) { | ||
entropy = Buffer.from(entropy, 'hex'); | ||
} | ||
wordlist = wordlist || DEFAULT_WORDLIST; | ||
@@ -111,13 +125,16 @@ if (!wordlist) { | ||
// 128 <= ENT <= 256 | ||
if (entropy.length < 16) | ||
if (entropy.length < 16) { | ||
throw new TypeError(INVALID_ENTROPY); | ||
if (entropy.length > 32) | ||
} | ||
if (entropy.length > 32) { | ||
throw new TypeError(INVALID_ENTROPY); | ||
if (entropy.length % 4 !== 0) | ||
} | ||
if (entropy.length % 4 !== 0) { | ||
throw new TypeError(INVALID_ENTROPY); | ||
const entropyBits = bytesToBinary([...entropy]); | ||
} | ||
const entropyBits = bytesToBinary(Array.from(entropy)); | ||
const checksumBits = deriveChecksumBits(entropy); | ||
const bits = entropyBits + checksumBits; | ||
const chunks = bits.match(/(.{1,11})/g); | ||
const words = chunks.map(binary => { | ||
const words = chunks.map((binary) => { | ||
const index = binaryToByte(binary); | ||
@@ -133,4 +150,5 @@ return wordlist[index]; | ||
strength = strength || 128; | ||
if (strength % 32 !== 0) | ||
if (strength % 32 !== 0) { | ||
throw new TypeError(INVALID_ENTROPY); | ||
} | ||
rng = rng || randomBytes; | ||
@@ -152,14 +170,18 @@ return entropyToMnemonic(rng(strength / 8), wordlist); | ||
const result = _wordlists_1.wordlists[language]; | ||
if (result) | ||
if (result) { | ||
DEFAULT_WORDLIST = result; | ||
else | ||
} | ||
else { | ||
throw new Error('Could not find wordlist for language "' + language + '"'); | ||
} | ||
} | ||
exports.setDefaultWordlist = setDefaultWordlist; | ||
function getDefaultWordlist() { | ||
if (!DEFAULT_WORDLIST) | ||
if (!DEFAULT_WORDLIST) { | ||
throw new Error('No Default Wordlist set'); | ||
return Object.keys(_wordlists_1.wordlists).filter(lang => { | ||
if (lang === 'JA' || lang === 'EN') | ||
} | ||
return Object.keys(_wordlists_1.wordlists).filter((lang) => { | ||
if (lang === 'JA' || lang === 'EN') { | ||
return false; | ||
} | ||
return _wordlists_1.wordlists[lang].every((word, index) => word === DEFAULT_WORDLIST[index]); | ||
@@ -166,0 +188,0 @@ })[0]; |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
302076
18
18707
134