multiformats
Advanced tools
Comparing version 0.0.0 to 0.0.1
'use strict' | ||
const { Buffer } = require('buffer') | ||
const { fromHex } = require('../bytes') | ||
const bytes = require('../bytes') | ||
const create = function base16 (alphabet) { | ||
return { | ||
encode: input => input.toString('hex'), | ||
encode: input => bytes.toHex(input), | ||
decode (input) { | ||
@@ -13,3 +14,3 @@ for (const char of input) { | ||
} | ||
return Buffer.from(input, 'hex') | ||
return fromHex(input) | ||
} | ||
@@ -16,0 +17,0 @@ } |
'use strict' | ||
const { Buffer } = require('buffer') | ||
@@ -24,3 +23,3 @@ function decode (input, alphabet) { | ||
return Buffer.from(output.buffer) | ||
return output | ||
} | ||
@@ -27,0 +26,0 @@ |
'use strict' | ||
const baseX = require('base-x') | ||
const bytes = require('../bytes') | ||
const { Buffer } = require('buffer') | ||
const wrap = obj => ({ | ||
encode: b => obj.encode(Buffer.from(b)), | ||
decode: s => bytes.coerce(obj.decode(s)) | ||
}) | ||
const btc = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz' | ||
const flickr = '123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ' | ||
module.exports = [ | ||
{ name: 'base58btc', prefix: 'z', ...baseX('123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz') }, | ||
{ name: 'base58flickr', prefix: 'Z', ...baseX('123456789abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ') } | ||
{ name: 'base58btc', prefix: 'z', ...wrap(baseX(btc)) }, | ||
{ name: 'base58flickr', prefix: 'Z', ...wrap(baseX(flickr)) } | ||
] |
'use strict' | ||
const { Buffer } = require('buffer') | ||
const b64 = require('./_base64') | ||
@@ -15,3 +15,3 @@ const create = alphabet => { | ||
encode (input) { | ||
let output = input.toString('base64') | ||
let output = b64.encode(input) | ||
@@ -36,3 +36,3 @@ if (url) { | ||
return Buffer.from(input, 'base64') | ||
return b64.decode(input) | ||
} | ||
@@ -39,0 +39,0 @@ } |
24
cid.js
'use strict' | ||
const { Buffer } = require('buffer') | ||
const bytes = require('./bytes') | ||
const withIs = require('class-is') | ||
@@ -21,15 +20,15 @@ | ||
const encode = (version, codec, multihash) => { | ||
return Buffer.concat([varint.encode(version), varint.encode(codec), multihash]) | ||
return Uint8Array.from([ | ||
...varint.encode(version), | ||
...varint.encode(codec), | ||
...multihash | ||
]) | ||
} | ||
class CID { | ||
constructor (cid, ...args) { | ||
Object.defineProperty(this, '_baseCache', { | ||
value: new Map(), | ||
writable: false, | ||
enumerable: false | ||
}) | ||
readonly(this, '_baseCache', new Map()) | ||
if (_CID.isCID(cid)) { | ||
readonly(this, 'version', cid.version) | ||
readonly(this, 'multihash', cid.multihash) | ||
readonly(this, 'buffer', cid.buffer) | ||
readonly(this, 'multihash', bytes.coerce(cid.multihash)) | ||
readonly(this, 'buffer', bytes.coerce(cid.buffer)) | ||
if (cid.code) readonly(this, 'code', cid.code) | ||
@@ -65,2 +64,3 @@ else readonly(this, 'code', multiformats.get(cid.codec).code) | ||
} | ||
cid = bytes.coerce(cid) | ||
readonly(this, 'buffer', cid) | ||
@@ -125,3 +125,3 @@ let code | ||
const { encode } = multibase.get('base58btc') | ||
return encode(this.buffer, 'base58btc') | ||
return encode(this.buffer) | ||
} | ||
@@ -145,3 +145,3 @@ if (!base) base = 'base32' | ||
this.version === other.version && | ||
this.multihash.equals(other.multihash) | ||
bytes.equals(this.multihash, other.multihash) | ||
} | ||
@@ -148,0 +148,0 @@ } |
@@ -1,8 +0,6 @@ | ||
const { Buffer } = require('buffer') | ||
module.exports = { | ||
encode: obj => Buffer.from(JSON.stringify(obj)), | ||
decode: buff => JSON.parse(buff.toString()), | ||
encode: obj => new TextEncoder().encode(JSON.stringify(obj)), | ||
decode: buff => JSON.parse(new TextDecoder().decode(buff)), | ||
name: 'json', | ||
code: 0x0200 | ||
} |
@@ -1,7 +0,4 @@ | ||
const { Buffer } = require('buffer') | ||
const { coerce } = require('../bytes') | ||
const raw = buff => { | ||
if (!Buffer.isBuffer(buff)) throw new Error('Only buffer instances can be used w/ raw codec') | ||
return buff | ||
} | ||
const raw = buff => coerce(buff) | ||
@@ -8,0 +5,0 @@ module.exports = { |
@@ -1,3 +0,2 @@ | ||
const { Buffer } = require('buffer') | ||
const sha = name => async data => Buffer.from(await window.crypto.subtle.digest(name, data)) | ||
const sha = name => async data => new Uint8Array(await window.crypto.subtle.digest(name, data)) | ||
@@ -4,0 +3,0 @@ module.exports = [ |
const crypto = require('crypto') | ||
const sha256 = async data => crypto.createHash('sha256').update(data).digest() | ||
const sha512 = async data => crypto.createHash('sha512').update(data).digest() | ||
const bufferToUint8Array = (buffer) => { | ||
return new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.byteLength) | ||
} | ||
const sha256 = async data => bufferToUint8Array(crypto.createHash('sha256').update(data).digest()) | ||
const sha512 = async data => bufferToUint8Array(crypto.createHash('sha512').update(data).digest()) | ||
module.exports = [ | ||
@@ -7,0 +11,0 @@ { |
48
index.js
@@ -1,4 +0,4 @@ | ||
const { Buffer } = require('buffer') | ||
const varints = require('varint') | ||
const createCID = require('./cid') | ||
const bytes = require('./bytes') | ||
@@ -13,3 +13,3 @@ const cache = new Map() | ||
if (cache.has(int)) return cache.get(int) | ||
const buff = Buffer.from(varints.encode(int)) | ||
const buff = Uint8Array.from(varints.encode(int)) | ||
cache.set(int, buff) | ||
@@ -38,6 +38,6 @@ return buff | ||
const length = varint.encode(digest.length) | ||
return Buffer.concat([code, length, digest]) | ||
return Uint8Array.from([...code, ...length, ...digest]) | ||
} | ||
const hash = async (buff, key) => { | ||
if (!Buffer.isBuffer(buff)) throw new Error('Can only hash Buffer instances') | ||
buff = bytes.coerce(buff) | ||
const info = get(key) | ||
@@ -48,2 +48,3 @@ if (!info || !info.encode) throw new Error(`Missing hash implementation for "${key}"`) | ||
const validate = async (_hash, buff) => { | ||
_hash = bytes.coerce(_hash) | ||
const { length, digest, code } = decode(_hash) | ||
@@ -54,3 +55,3 @@ if (digest.length !== length) throw new Error('Incorrect length') | ||
buff = await encode(buff) | ||
if (buff.compare(digest)) throw new Error('Buffer does not match hash') | ||
if (!bytes.equals(buff, digest)) throw new Error('Buffer does not match hash') | ||
} | ||
@@ -88,4 +89,10 @@ return true | ||
} | ||
const has = id => { | ||
if (id.length === 1) { | ||
return prefixMap.has(id) | ||
} | ||
return nameMap.has(id) | ||
} | ||
const encode = (buffer, id) => { | ||
if (!Buffer.isBuffer(buffer)) throw new Error('Can only multibase encode buffer instances') | ||
buffer = bytes.coerce(buffer) | ||
const { prefix, encode } = get(id) | ||
@@ -98,2 +105,3 @@ return prefix + encode(buffer) | ||
string = string.slice(1) | ||
if (string.length === 0) return new Uint8Array(0) | ||
const { decode } = get(prefix) | ||
@@ -103,3 +111,3 @@ return decode(string) | ||
const encoding = string => get(string[0]) | ||
return { add, get, encode, decode, encoding } | ||
return { add, has, get, encode, decode, encoding } | ||
} | ||
@@ -130,2 +138,3 @@ | ||
const parse = buff => { | ||
buff = bytes.coerce(buff) | ||
const [code, len] = varint.decode(buff) | ||
@@ -153,14 +162,24 @@ let name, encode, decode | ||
} | ||
if (Buffer.isBuffer(obj)) { | ||
return parse(obj)[0] | ||
if (bytes.isBinary(obj)) { | ||
return parse(bytes.coerce(obj))[0] | ||
} | ||
throw new Error('Unknown key type') | ||
} | ||
const has = id => { | ||
if (typeof id === 'string') { | ||
return nameMap.has(id) | ||
} else if (typeof id === 'number') { | ||
return intMap.has(id) | ||
} | ||
throw new Error('Unknown type') | ||
} | ||
// Ideally we can remove the coercion here once | ||
// all the codecs have been updated to use Uint8Array | ||
const encode = (value, id) => { | ||
const { encode } = get(id) | ||
return encode(value) | ||
return bytes.coerce(encode(value)) | ||
} | ||
const decode = (value, id) => { | ||
const { decode } = get(id) | ||
return decode(value) | ||
return decode(bytes.coerce(value)) | ||
} | ||
@@ -178,11 +197,10 @@ const add = obj => { | ||
const multiformats = { parse, add, get, encode, decode } | ||
multiformats.varint = varint | ||
multiformats.multicodec = { add, get, encode, decode } | ||
const multiformats = { parse, add, get, has, encode, decode, varint, bytes } | ||
multiformats.multicodec = { add, get, has, encode, decode } | ||
multiformats.multibase = createMultibase() | ||
multiformats.multihash = createMultihash(multiformats) | ||
multiformats.CID = createCID(multiformats) | ||
return multiformats | ||
} | ||
module.exports.bytes = bytes | ||
module.exports.varint = varint |
const CID = require('cids') | ||
const bytes = require('./bytes') | ||
const { Buffer } = require('buffer') | ||
const legacy = (multiformats, name) => { | ||
const toLegacy = obj => { | ||
if (CID.isCID(obj)) { | ||
if (!obj.code) return obj | ||
const { name } = multiformats.multicodec.get(obj.code) | ||
return new CID(obj.version, name, Buffer.from(obj.multihash)) | ||
} | ||
if (bytes.isBinary(obj)) return Buffer.from(obj) | ||
if (obj && typeof obj === 'object') { | ||
for (const [key, value] of Object.entries(obj)) { | ||
obj[key] = toLegacy(value) | ||
} | ||
} | ||
return obj | ||
} | ||
const fromLegacy = obj => { | ||
if (CID.isCID(obj)) return new multiformats.CID(obj) | ||
if (bytes.isBinary(obj)) return bytes.coerce(obj) | ||
if (obj && typeof obj === 'object') { | ||
for (const [key, value] of Object.entries(obj)) { | ||
obj[key] = fromLegacy(value) | ||
} | ||
} | ||
return obj | ||
} | ||
const format = multiformats.multicodec.get(name) | ||
const serialize = format.encode | ||
const deserialize = format.decode | ||
const serialize = o => Buffer.from(format.encode(fromLegacy(o))) | ||
const deserialize = b => toLegacy(format.decode(bytes.coerce(b))) | ||
const cid = async (buff, opts) => { | ||
@@ -11,3 +37,3 @@ const defaults = { cidVersion: 1, hashAlg: 'sha2-256' } | ||
const hash = await multiformats.multihash.hash(buff, hashAlg) | ||
return new CID(cidVersion, name, hash) | ||
return new CID(cidVersion, name, Buffer.from(hash)) | ||
} | ||
@@ -14,0 +40,0 @@ const resolve = (buff, path) => { |
{ | ||
"name": "multiformats", | ||
"version": "0.0.0", | ||
"description": "", | ||
"version": "0.0.1", | ||
"description": "Interface for multihash, multicodec, multibase and CID.", | ||
"main": "index.js", | ||
@@ -17,3 +17,4 @@ "scripts": { | ||
"browser": { | ||
"./hashes/sha2.js": "./hashes/sha2-browser.js" | ||
"./hashes/sha2.js": "./hashes/sha2-browser.js", | ||
"./bases/_base64.js": "./bases/_base64-browser.js" | ||
}, | ||
@@ -27,8 +28,19 @@ "devDependencies": { | ||
"dependencies": { | ||
"cids": "^0.8.0", | ||
"base-x": "^3.0.8", | ||
"buffer": "^5.6.0", | ||
"cids": "^0.8.0", | ||
"class-is": "^1.1.0", | ||
"varint": "^5.0.0" | ||
} | ||
}, | ||
"directories": { | ||
"test": "test" | ||
}, | ||
"repository": { | ||
"type": "git", | ||
"url": "git+https://github.com/multiformats/js-multiformats.git" | ||
}, | ||
"bugs": { | ||
"url": "https://github.com/multiformats/js-multiformats/issues" | ||
}, | ||
"homepage": "https://github.com/multiformats/js-multiformats#readme" | ||
} |
@@ -1,5 +0,1 @@ | ||
# WIP | ||
Most of what is documented below is not yet implemented. | ||
# multiformats | ||
@@ -36,8 +32,7 @@ | ||
```js | ||
const multiformats = require('multiformats')() | ||
const sha2 = require('@multiformats/sha2') | ||
// Import basics package with dep-free codecs, hashes, and base encodings | ||
const multiformats = require('multiformats/basics') | ||
const dagcbor = require('@ipld/dag-cbor') | ||
multiformats.multihash.add(sha2) | ||
multiformats.multicodec.add(dagcbor) | ||
const Block = require('@ipld/block/bare')(multiformats) | ||
const Block = require('@ipld/block')(multiformats) | ||
const block = Block.encoder({ hello: world }, 'dag-cbor') | ||
@@ -44,0 +39,0 @@ const cid = await block.cid() |
@@ -8,2 +8,3 @@ /* globals before, describe, it */ | ||
const same = assert.deepStrictEqual | ||
const { toHex } = require('../bytes') | ||
@@ -102,3 +103,3 @@ const testThrow = async (fn, message) => { | ||
assert.ok(buffer) | ||
const str = buffer.toString('hex') | ||
const str = toHex(buffer) | ||
same(str, '1220ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad') | ||
@@ -172,3 +173,3 @@ }) | ||
assert.ok(buffer) | ||
const str = buffer.toString('hex') | ||
const str = toHex(buffer) | ||
same(str, '01711220ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad') | ||
@@ -332,3 +333,3 @@ }) | ||
test('new CID from old CID', () => { | ||
const cid = new CID(new OLDCID(1, 'raw', hash)) | ||
const cid = new CID(new OLDCID(1, 'raw', Buffer.from(hash))) | ||
same(cid.version, 1) | ||
@@ -335,0 +336,0 @@ same(cid.multihash, hash) |
@@ -1,2 +0,2 @@ | ||
/* globals describe, it */ | ||
/* globals before, describe, it */ | ||
'use strict' | ||
@@ -19,17 +19,31 @@ const { Buffer } = require('buffer') | ||
} | ||
describe('multicodec', async () => { | ||
const raw = legacy(multiformats, 'raw') | ||
const json = legacy(multiformats, 'json') | ||
const link = await raw.util.cid(Buffer.from('test')) | ||
multiformats.multicodec.add({ | ||
name: 'custom', | ||
code: 6787678, | ||
encode: o => json.util.serialize({ o, l: link.toString() }), | ||
decode: buff => { | ||
const obj = json.util.deserialize(buff) | ||
obj.l = link | ||
return obj | ||
} | ||
describe('multicodec', () => { | ||
let raw | ||
let json | ||
let custom | ||
let link | ||
before(async () => { | ||
raw = legacy(multiformats, 'raw') | ||
json = legacy(multiformats, 'json') | ||
link = await raw.util.cid(Buffer.from('test')) | ||
multiformats.multicodec.add({ | ||
name: 'custom', | ||
code: 6787678, | ||
encode: o => { | ||
if (o.link) { | ||
assert.ok(o.link.code) | ||
o.link = true | ||
} | ||
return json.util.serialize({ o, l: link.toString() }) | ||
}, | ||
decode: buff => { | ||
const obj = json.util.deserialize(buff) | ||
obj.l = link | ||
if (obj.o.link) obj.link = new multiformats.CID(link) | ||
return obj | ||
} | ||
}) | ||
custom = legacy(multiformats, 'custom') | ||
}) | ||
const custom = legacy(multiformats, 'custom') | ||
test('encode/decode raw', () => { | ||
@@ -49,13 +63,13 @@ const buff = raw.util.serialize(Buffer.from('test')) | ||
same(cid.codec, 'raw') | ||
same(cid.multihash, await multiformats.multihash.hash(Buffer.from('test'), 'sha2-256')) | ||
same(cid.multihash, Buffer.from(await multiformats.multihash.hash(Buffer.from('test'), 'sha2-256'))) | ||
}) | ||
const fixture = custom.util.serialize({ | ||
one: { | ||
two: { | ||
hello: 'world' | ||
}, | ||
three: 3 | ||
} | ||
}) | ||
test('resolve', () => { | ||
const fixture = custom.util.serialize({ | ||
one: { | ||
two: { | ||
hello: 'world' | ||
}, | ||
three: 3 | ||
} | ||
}) | ||
let value = { hello: 'world' } | ||
@@ -70,2 +84,10 @@ same(custom.resolver.resolve(fixture, 'o/one/two'), { value }) | ||
test('tree', () => { | ||
const fixture = custom.util.serialize({ | ||
one: { | ||
two: { | ||
hello: 'world' | ||
}, | ||
three: 3 | ||
} | ||
}) | ||
const arr = a => Array.from(a) | ||
@@ -76,2 +98,8 @@ const links = ['/o', '/o/one', '/o/one/two', '/o/one/two/hello', '/o/one/three', '/l'] | ||
}) | ||
test('cid API change', () => { | ||
const fixture = { link } | ||
const buff = custom.util.serialize(fixture) | ||
const decoded = custom.util.deserialize(buff) | ||
same(decoded.link, link) | ||
}) | ||
}) |
/* globals describe, it */ | ||
'use strict' | ||
const { Buffer } = require('buffer') | ||
const bytes = require('../bytes') | ||
const assert = require('assert') | ||
@@ -26,14 +26,14 @@ const same = assert.deepStrictEqual | ||
describe('basics', () => { | ||
for (const base of ['base16', 'base32', 'base58btc', 'base64']) { | ||
for (const base of ['base16', 'base32', 'base58btc', 'base64']) { | ||
describe(`basics ${base}`, () => { | ||
test('encode/decode', () => { | ||
const string = multibase.encode(Buffer.from('test'), base) | ||
const string = multibase.encode(bytes.fromString('test'), base) | ||
same(string[0], multibase.get(base).prefix) | ||
const buffer = multibase.decode(string) | ||
same(buffer, Buffer.from('test')) | ||
same(buffer, bytes.fromString('test')) | ||
}) | ||
test('empty', () => { | ||
const str = multibase.encode(Buffer.from(''), base) | ||
const str = multibase.encode(bytes.fromString(''), base) | ||
same(str, multibase.get(base).prefix) | ||
same(multibase.decode(str), Buffer.from('')) | ||
same(multibase.decode(str), bytes.fromString('')) | ||
}) | ||
@@ -45,4 +45,4 @@ test('bad chars', () => { | ||
}) | ||
} | ||
}) | ||
}) | ||
} | ||
@@ -56,3 +56,3 @@ test('get fails', () => { | ||
test('encode string failure', () => { | ||
const msg = 'Can only multibase encode buffer instances' | ||
const msg = 'Unknown type, must be binary type' | ||
testThrow(() => multibase.encode('asdf'), msg) | ||
@@ -64,3 +64,3 @@ }) | ||
}) | ||
const buff = Buffer.from('test') | ||
const buff = bytes.fromString('test') | ||
const baseTest = obj => { | ||
@@ -88,2 +88,9 @@ if (Array.isArray(obj)) return obj.forEach(o => baseTest(o)) | ||
}) | ||
test('has', () => { | ||
const { multibase } = require('../basics') | ||
same(multibase.has('E'), false) | ||
same(multibase.has('baseNope'), false) | ||
same(multibase.has('base32'), true) | ||
same(multibase.has('c'), true) | ||
}) | ||
}) |
/* globals describe, it */ | ||
'use strict' | ||
const { Buffer } = require('buffer') | ||
const bytes = require('../bytes') | ||
const assert = require('assert') | ||
@@ -9,2 +9,12 @@ const same = assert.deepStrictEqual | ||
const testThrow = async (fn, message) => { | ||
try { | ||
await fn() | ||
} catch (e) { | ||
if (e.message !== message) throw e | ||
return | ||
} | ||
throw new Error('Test failed to throw') | ||
} | ||
describe('multicodec', () => { | ||
@@ -14,5 +24,5 @@ const { multicodec } = multiformats | ||
test('encode/decode raw', () => { | ||
const buff = multicodec.encode(Buffer.from('test'), 'raw') | ||
same(buff, Buffer.from('test')) | ||
same(multicodec.decode(buff, 'raw'), Buffer.from('test')) | ||
const buff = multicodec.encode(bytes.fromString('test'), 'raw') | ||
same(buff, bytes.fromString('test')) | ||
same(multicodec.decode(buff, 'raw'), bytes.fromString('test')) | ||
}) | ||
@@ -22,16 +32,16 @@ | ||
const buff = multicodec.encode({ hello: 'world' }, 'json') | ||
same(buff, Buffer.from(JSON.stringify({ hello: 'world' }))) | ||
same(buff, bytes.fromString(JSON.stringify({ hello: 'world' }))) | ||
same(multicodec.decode(buff, 'json'), { hello: 'world' }) | ||
}) | ||
test('raw cannot encode string', () => { | ||
assert.throws(() => multicodec.encode('asdf', 'raw'), /^Error: Only buffer instances can be used w\/ raw codec$/) | ||
test('raw cannot encode string', async () => { | ||
await testThrow(() => multicodec.encode('asdf', 'raw'), 'Unknown type, must be binary type') | ||
}) | ||
test('get failure', () => { | ||
assert.throws(() => multicodec.get(true), /^Error: Unknown key type$/) | ||
let msg = /^Error: Do not have multiformat entry for "8237440"$/ | ||
assert.throws(() => multicodec.get(8237440), msg) | ||
msg = /^Error: Do not have multiformat entry for "notfound"$/ | ||
assert.throws(() => multicodec.get('notfound'), msg) | ||
test('get failure', async () => { | ||
await testThrow(() => multicodec.get(true), 'Unknown key type') | ||
let msg = 'Do not have multiformat entry for "8237440"' | ||
await testThrow(() => multicodec.get(8237440), msg) | ||
msg = 'Do not have multiformat entry for "notfound"' | ||
await testThrow(() => multicodec.get('notfound'), msg) | ||
}) | ||
@@ -45,8 +55,15 @@ | ||
assert(args[0] === multiformats, 'called with multiformats as argument') | ||
return { code: 200, name: 'blip', encode: (a) => a[1], decode: (a) => a[2] } | ||
return { code: 200, name: 'blip', encode: (a) => a[1], decode: (a) => a } | ||
}) | ||
same(calls, 1, 'called exactly once') | ||
same(multicodec.encode(['one', 'two', 'three'], 'blip'), 'two', 'new codec encoder was added') | ||
same(multicodec.decode(['one', 'two', 'three'], 200), 'three', 'new codec decoder was added') | ||
const two = bytes.fromString('two') | ||
const three = bytes.fromString('three') | ||
same(multicodec.encode(['one', two, three], 'blip'), two, 'new codec encoder was added') | ||
same(multicodec.decode(three, 200), three, 'new codec decoder was added') | ||
}) | ||
test('has', async () => { | ||
same(multicodec.has('json'), true) | ||
same(multicodec.has(0x0200), true) | ||
await testThrow(() => multicodec.has({}), 'Unknown type') | ||
}) | ||
}) |
/* globals describe, it */ | ||
'use strict' | ||
const bytes = require('../bytes') | ||
const assert = require('assert') | ||
@@ -19,3 +20,3 @@ const same = assert.deepStrictEqual | ||
} | ||
return Buffer.from(`${toHex(code)}${toHex(size)}${hex}`, 'hex') | ||
return bytes.fromHex(`${toHex(code)}${toHex(size)}${hex}`) | ||
} | ||
@@ -34,3 +35,3 @@ | ||
const crypto = require('crypto') | ||
const encode = name => data => crypto.createHash(name).update(data).digest() | ||
const encode = name => data => bytes.coerce(crypto.createHash(name).update(data).digest()) | ||
@@ -41,2 +42,3 @@ describe('multihash', () => { | ||
const { validate } = multihash | ||
const empty = new Uint8Array(0) | ||
@@ -49,27 +51,27 @@ describe('encode', () => { | ||
const buf = sample(varint || code, size, hex) | ||
same(multihash.encode(Buffer.from(hex, 'hex'), code), buf) | ||
same(multihash.encode(Buffer.from(hex, 'hex'), name), buf) | ||
same(multihash.encode(hex ? bytes.fromHex(hex) : empty, code), buf) | ||
same(multihash.encode(hex ? bytes.fromHex(hex) : empty, name), buf) | ||
} | ||
}) | ||
test('hash sha2-256', async () => { | ||
const hash = await multihash.hash(Buffer.from('test'), 'sha2-256') | ||
const hash = await multihash.hash(bytes.fromString('test'), 'sha2-256') | ||
const { digest, code } = multihash.decode(hash) | ||
same(code, multihash.get('sha2-256').code) | ||
same(encode('sha256')(Buffer.from('test')).compare(digest), 0) | ||
same(digest, encode('sha256')(bytes.fromString('test'))) | ||
same(await validate(hash), true) | ||
same(await validate(hash, Buffer.from('test')), true) | ||
same(await validate(hash, bytes.fromString('test')), true) | ||
}) | ||
test('hash sha2-512', async () => { | ||
const hash = await multihash.hash(Buffer.from('test'), 'sha2-512') | ||
const hash = await multihash.hash(bytes.fromString('test'), 'sha2-512') | ||
const { digest, code } = multihash.decode(hash) | ||
same(code, multihash.get('sha2-512').code) | ||
same(encode('sha512')(Buffer.from('test')).compare(digest), 0) | ||
same(digest, encode('sha512')(bytes.fromString('test'))) | ||
same(await validate(hash), true) | ||
same(await validate(hash, Buffer.from('test')), true) | ||
same(await validate(hash, bytes.fromString('test')), true) | ||
}) | ||
test('no such hash', async () => { | ||
let msg = 'Do not have multiformat entry for "notfound"' | ||
await testThrowAsync(() => multihash.hash(Buffer.from('test'), 'notfound'), msg) | ||
await testThrowAsync(() => multihash.hash(bytes.fromString('test'), 'notfound'), msg) | ||
msg = 'Missing hash implementation for "json"' | ||
await testThrowAsync(() => multihash.hash(Buffer.from('test'), 'json'), msg) | ||
await testThrowAsync(() => multihash.hash(bytes.fromString('test'), 'json'), msg) | ||
}) | ||
@@ -83,3 +85,3 @@ }) | ||
const buf = sample(varint || code, size, hex) | ||
const digest = Buffer.from(hex, 'hex') | ||
const digest = hex ? bytes.fromHex(hex) : empty | ||
same(multihash.decode(buf), { code, name, digest, length: size }) | ||
@@ -89,3 +91,3 @@ } | ||
test('get from buffer', async () => { | ||
const hash = await multihash.hash(Buffer.from('test'), 'sha2-256') | ||
const hash = await multihash.hash(bytes.fromString('test'), 'sha2-256') | ||
const { code, name } = multihash.get(hash) | ||
@@ -97,9 +99,9 @@ same({ code, name }, { code: 18, name: 'sha2-256' }) | ||
test('invalid hash sha2-256', async () => { | ||
const hash = await multihash.hash(Buffer.from('test'), 'sha2-256') | ||
const hash = await multihash.hash(bytes.fromString('test'), 'sha2-256') | ||
const msg = 'Buffer does not match hash' | ||
await testThrowAsync(() => validate(hash, Buffer.from('tes2t')), msg) | ||
await testThrowAsync(() => validate(hash, bytes.fromString('tes2t')), msg) | ||
}) | ||
test('invalid fixtures', async () => { | ||
for (const test of invalid) { | ||
const buff = Buffer.from(test.hex, 'hex') | ||
const buff = bytes.fromHex(test.hex) | ||
await testThrowAsync(() => validate(buff), test.message) | ||
@@ -110,3 +112,3 @@ } | ||
test('throw on hashing non-buffer', async () => { | ||
await testThrowAsync(() => multihash.hash('asdf'), 'Can only hash Buffer instances') | ||
await testThrowAsync(() => multihash.hash('asdf'), 'Unknown type, must be binary type') | ||
}) | ||
@@ -113,0 +115,0 @@ if (process.browser) { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No bug tracker
MaintenancePackage does not have a linked bug tracker in package.json.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
No website
QualityPackage does not have a website.
Found 1 instance in 1 package
56389
27
1490
0
1
0
161