@ipld/dag-cbor
Advanced tools
Comparing version 2.0.3 to 3.0.0
@@ -5,4 +5,4 @@ 'use strict'; | ||
var index = require('../index.js'); | ||
var garbage = require('garbage'); | ||
var assert = require('assert'); | ||
var garbage = require('ipld-garbage'); | ||
var chai = require('chai'); | ||
@@ -12,36 +12,8 @@ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
var garbage__default = /*#__PURE__*/_interopDefaultLegacy(garbage); | ||
var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert); | ||
var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai); | ||
'use strict'; | ||
const {assert} = chai__default['default']; | ||
const test = it; | ||
const _same = assert__default['default'].deepStrictEqual; | ||
const same = (x, y) => { | ||
if (typeof x !== 'object') | ||
return _same(x, y); | ||
const skip = { | ||
nested: null, | ||
bytes: null, | ||
multihash: null, | ||
digest: null, | ||
link: null | ||
}; | ||
for (const prop of Object.keys(skip)) { | ||
if (x[prop]) | ||
same(x[prop], y[prop]); | ||
} | ||
if (x.links) { | ||
same(x.links.length, y.links.length); | ||
for (let i = 0; i < x.links.length; i++) { | ||
same(x[i], y[i]); | ||
} | ||
} | ||
skip.links = null; | ||
_same({ | ||
...x, | ||
...skip | ||
}, { | ||
...y, | ||
...skip | ||
}); | ||
}; | ||
const same = assert.deepStrictEqual; | ||
describe('dag-cbor', () => { | ||
@@ -59,3 +31,3 @@ const obj = { | ||
}, | ||
bytes: Buffer.from('asdf') | ||
bytes: new TextEncoder().encode('asdf') | ||
}; | ||
@@ -67,3 +39,3 @@ const serializedObj = index.encode(obj); | ||
const deserializedObj = index.decode(serializedObj); | ||
same(obj, deserializedObj); | ||
same(deserializedObj, obj); | ||
}); | ||
@@ -77,25 +49,3 @@ test('.serialize and .deserialize large objects', () => { | ||
same(largeObj, deserialized); | ||
index.configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
index.configureDecoder({ | ||
size: 64 * 1024, | ||
maxSize: 128 * 1024 | ||
}); | ||
const serialized = index.encode(largeObj); | ||
same(multiformats.bytes.isBinary(serialized), true); | ||
assert__default['default'].throws(() => index.decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
index.configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize - omit size', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
index.configureDecoder({ maxSize: 128 * 1024 }); | ||
const serialized = index.encode(largeObj); | ||
same(multiformats.bytes.isBinary(serialized), true); | ||
assert__default['default'].throws(() => index.decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
index.configureDecoder(); | ||
}); | ||
test('.serialize and .deserialize object with slash as property', () => { | ||
@@ -113,10 +63,114 @@ const slashObject = { '/': true }; | ||
}); | ||
test('error catching', () => { | ||
const circlarObj = {}; | ||
circlarObj.a = circlarObj; | ||
assert__default['default'].throws(() => index.encode(circlarObj), /^Error: The object passed has circular references$/); | ||
test('error on circular references', () => { | ||
const circularObj = {}; | ||
circularObj.a = circularObj; | ||
assert.throws(() => index.encode(circularObj), /object contains circular references/); | ||
const circularArr = [circularObj]; | ||
circularObj.a = circularArr; | ||
assert.throws(() => index.encode(circularArr), /object contains circular references/); | ||
}); | ||
test('fuzz serialize and deserialize with garbage', () => { | ||
test('error on encoding undefined', () => { | ||
assert.throws(() => index.encode(undefined), /\Wundefined\W.*not supported/); | ||
const objWithUndefined = { | ||
a: 'a', | ||
b: undefined | ||
}; | ||
assert.throws(() => index.encode(objWithUndefined), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on decoding undefined', () => { | ||
assert.throws(() => index.decode(multiformats.bytes.fromHex('f7')), /\Wundefined\W.*not supported/); | ||
assert.throws(() => index.decode(multiformats.bytes.fromHex('a2616161616162f7')), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on encoding IEEE 754 specials', () => { | ||
for (const special of [ | ||
NaN, | ||
Infinity, | ||
-Infinity | ||
]) { | ||
assert.throws(() => index.encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const objWithSpecial = { | ||
a: 'a', | ||
b: special | ||
}; | ||
assert.throws(() => index.encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const arrWithSpecial = [ | ||
1, | ||
1.1, | ||
-1, | ||
-1.1, | ||
Number.MAX_SAFE_INTEGER, | ||
special, | ||
Number.MIN_SAFE_INTEGER | ||
]; | ||
assert.throws(() => index.encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
} | ||
}); | ||
test('error on decoding IEEE 754 specials', () => { | ||
const cases = [ | ||
[ | ||
'NaN', | ||
'f97e00' | ||
], | ||
[ | ||
'NaN', | ||
'f97ff8' | ||
], | ||
[ | ||
'NaN', | ||
'fa7ff80000' | ||
], | ||
[ | ||
'NaN', | ||
'fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'a2616161616162fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff80000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'Infinity', | ||
'f97c00' | ||
], | ||
[ | ||
'Infinity', | ||
'fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'a2616161616162fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff00000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'-Infinity', | ||
'f9fc00' | ||
], | ||
[ | ||
'-Infinity', | ||
'fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'a2616161616162fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffbfff00000000000003b001ffffffffffffe' | ||
] | ||
]; | ||
for (const [typ, hex] of cases) { | ||
const byts = multiformats.bytes.fromHex(hex); | ||
assert.throws(() => index.decode(byts), new RegExp(`\\W${ typ.replace(/^-/, '') }\\W.*not supported`)); | ||
} | ||
}); | ||
test('fuzz serialize and deserialize with garbage', function () { | ||
this.timeout(5000); | ||
for (let ii = 0; ii < 1000; ii++) { | ||
const original = { in: garbage__default['default'](100) }; | ||
const original = garbage__default['default'](100); | ||
const encoded = index.encode(original); | ||
@@ -133,3 +187,3 @@ const decoded = index.decode(encoded); | ||
}); | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
const buffer = Buffer.from('some data'); | ||
@@ -143,3 +197,3 @@ const bytes = Uint8Array.from(buffer); | ||
same(Object.keys(s), ['data']); | ||
assert__default['default'](s.data instanceof Uint8Array); | ||
assert(s.data instanceof Uint8Array); | ||
same(s.data.buffer, bytes.buffer); | ||
@@ -151,3 +205,3 @@ }; | ||
test('reject extraneous, but valid CBOR data after initial top-level object', () => { | ||
assert__default['default'].throws(() => { | ||
assert.throws(() => { | ||
index.decode(Buffer.concat([ | ||
@@ -157,4 +211,8 @@ Buffer.from(serializedObj), | ||
])); | ||
}, /^Error: Extraneous CBOR data found beyond initial top-level object/); | ||
}, /too many terminals/); | ||
}); | ||
test('reject bad CID lead-in', () => { | ||
const encoded = multiformats.bytes.fromHex('a1646c696e6bd82a582501017012207252523e6591fb8fe553d67ff55a86f84044b46a3e4176e10c58fa529a4aabd5'); | ||
assert.throws(() => index.decode(encoded), /Invalid CID for CBOR tag 42; expected leading 0x00/); | ||
}); | ||
}); |
145
cjs/index.js
@@ -5,114 +5,69 @@ 'use strict'; | ||
var cbor = require('borc'); | ||
var isCircular = require('@ipld/is-circular'); | ||
var cborg = require('cborg'); | ||
var multiformats = require('multiformats'); | ||
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
var cbor__default = /*#__PURE__*/_interopDefaultLegacy(cbor); | ||
var isCircular__default = /*#__PURE__*/_interopDefaultLegacy(isCircular); | ||
const {asCID} = multiformats.CID; | ||
const decodeCID = multiformats.CID.decode; | ||
const CID_CBOR_TAG = 42; | ||
const code = 113; | ||
const name = 'dag-cbor'; | ||
function tagCID(cid) { | ||
const tag = multiformats.bytes.fromHex('00'); | ||
const buffer = new Uint8Array(tag.byteLength + cid.bytes.byteLength); | ||
buffer.set(tag); | ||
buffer.set(cid.bytes, tag.byteLength); | ||
const tagged = new cbor__default['default'].Tagged(CID_CBOR_TAG, buffer, null); | ||
return tagged; | ||
function cidEncoder(obj) { | ||
if (obj.asCID !== obj) { | ||
return null; | ||
} | ||
const cid = multiformats.CID.asCID(obj); | ||
if (!cid) { | ||
return null; | ||
} | ||
const bytes = new Uint8Array(cid.bytes.byteLength + 1); | ||
bytes.set(cid.bytes, 1); | ||
return [ | ||
new cborg.Token(cborg.Type.tag, CID_CBOR_TAG), | ||
new cborg.Token(cborg.Type.bytes, bytes) | ||
]; | ||
} | ||
function replaceCIDbyTAG(dagNode, config) { | ||
if (dagNode && typeof dagNode === 'object' && isCircular__default['default'](dagNode, { asCID: true })) { | ||
throw new Error('The object passed has circular references'); | ||
function undefinedEncoder() { | ||
throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
function numberEncoder(num) { | ||
if (Number.isNaN(num)) { | ||
throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
function transform(obj) { | ||
if (multiformats.bytes.isBinary(obj)) | ||
return multiformats.bytes.coerce(obj); | ||
if (!obj || typeof obj === 'string') { | ||
return obj; | ||
} | ||
if (Array.isArray(obj)) { | ||
return obj.map(transform); | ||
} | ||
const cid = asCID(obj, config); | ||
if (cid) { | ||
return tagCID(cid); | ||
} | ||
const keys = Object.keys(obj); | ||
if (keys.length > 0) { | ||
const out = {}; | ||
keys.forEach(key => { | ||
if (typeof obj[key] === 'object') { | ||
out[key] = transform(obj[key]); | ||
} else { | ||
out[key] = obj[key]; | ||
} | ||
}); | ||
return out; | ||
} else { | ||
return obj; | ||
} | ||
if (num === Infinity || num === -Infinity) { | ||
throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
return transform(dagNode); | ||
} | ||
const defaultTags = { | ||
[CID_CBOR_TAG]: val => { | ||
return decodeCID(val.subarray(1), cidConfig); | ||
const encodeOptions = { | ||
float64: true, | ||
typeEncoders: { | ||
Object: cidEncoder, | ||
undefined: undefinedEncoder, | ||
number: numberEncoder | ||
} | ||
}; | ||
const defaultSize = 64 * 1024; | ||
const defaultMaxSize = 64 * 1024 * 1024; | ||
let currentSize = defaultSize; | ||
let maxSize = defaultMaxSize; | ||
let decoder = null; | ||
let cidConfig = null; | ||
const configureDecoder = options => { | ||
const tags = defaultTags; | ||
if (options) { | ||
if (typeof options.size === 'number') { | ||
currentSize = options.size; | ||
} | ||
if (typeof options.maxSize === 'number') { | ||
maxSize = options.maxSize; | ||
} | ||
} else { | ||
currentSize = defaultSize; | ||
maxSize = defaultMaxSize; | ||
function encode(node) { | ||
return cborg.encode(node, encodeOptions); | ||
} | ||
function cidDecoder(bytes) { | ||
if (bytes[0] !== 0) { | ||
throw new Error('Invalid CID for CBOR tag 42; expected leading 0x00'); | ||
} | ||
const decoderOptions = { | ||
tags, | ||
size: currentSize | ||
}; | ||
decoder = new cbor__default['default'].Decoder(decoderOptions); | ||
currentSize = decoderOptions.size; | ||
return multiformats.CID.decode(bytes.subarray(1)); | ||
} | ||
const decodeOptions = { | ||
allowIndefinite: false, | ||
allowUndefined: false, | ||
allowNaN: false, | ||
allowInfinity: false, | ||
allowBigInt: true, | ||
strict: true, | ||
useMaps: false, | ||
tags: [] | ||
}; | ||
configureDecoder(); | ||
const encode = (node, config) => { | ||
const nodeTagged = replaceCIDbyTAG(node, config); | ||
const serialized = cbor__default['default'].encode(nodeTagged); | ||
return multiformats.bytes.coerce(serialized); | ||
}; | ||
const decode = (data, config) => { | ||
cidConfig = config; | ||
if (data.length > currentSize && data.length <= maxSize) { | ||
configureDecoder({ size: data.length }); | ||
} | ||
if (data.length > currentSize) { | ||
throw new Error('Data is too large to deserialize with current decoder'); | ||
} | ||
const all = decoder.decodeAll(data); | ||
if (all.length !== 1) { | ||
throw new Error('Extraneous CBOR data found beyond initial top-level object'); | ||
} | ||
return all[0]; | ||
}; | ||
decodeOptions.tags[CID_CBOR_TAG] = cidDecoder; | ||
function decode(data) { | ||
return cborg.decode(data, decodeOptions); | ||
} | ||
exports.code = code; | ||
exports.configureDecoder = configureDecoder; | ||
exports.decode = decode; | ||
exports.encode = encode; | ||
exports.name = name; |
@@ -5,4 +5,4 @@ 'use strict'; | ||
var index = require('../index.js'); | ||
var garbage = require('garbage'); | ||
var assert = require('assert'); | ||
var garbage = require('ipld-garbage'); | ||
var chai = require('chai'); | ||
@@ -12,36 +12,8 @@ function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } | ||
var garbage__default = /*#__PURE__*/_interopDefaultLegacy(garbage); | ||
var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert); | ||
var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai); | ||
'use strict'; | ||
const {assert} = chai__default['default']; | ||
const test = it; | ||
const _same = assert__default['default'].deepStrictEqual; | ||
const same = (x, y) => { | ||
if (typeof x !== 'object') | ||
return _same(x, y); | ||
const skip = { | ||
nested: null, | ||
bytes: null, | ||
multihash: null, | ||
digest: null, | ||
link: null | ||
}; | ||
for (const prop of Object.keys(skip)) { | ||
if (x[prop]) | ||
same(x[prop], y[prop]); | ||
} | ||
if (x.links) { | ||
same(x.links.length, y.links.length); | ||
for (let i = 0; i < x.links.length; i++) { | ||
same(x[i], y[i]); | ||
} | ||
} | ||
skip.links = null; | ||
_same({ | ||
...x, | ||
...skip | ||
}, { | ||
...y, | ||
...skip | ||
}); | ||
}; | ||
const same = assert.deepStrictEqual; | ||
describe('dag-cbor', () => { | ||
@@ -59,3 +31,3 @@ const obj = { | ||
}, | ||
bytes: Buffer.from('asdf') | ||
bytes: new TextEncoder().encode('asdf') | ||
}; | ||
@@ -67,3 +39,3 @@ const serializedObj = index.encode(obj); | ||
const deserializedObj = index.decode(serializedObj); | ||
same(obj, deserializedObj); | ||
same(deserializedObj, obj); | ||
}); | ||
@@ -77,25 +49,3 @@ test('.serialize and .deserialize large objects', () => { | ||
same(largeObj, deserialized); | ||
index.configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
index.configureDecoder({ | ||
size: 64 * 1024, | ||
maxSize: 128 * 1024 | ||
}); | ||
const serialized = index.encode(largeObj); | ||
same(multiformats.bytes.isBinary(serialized), true); | ||
assert__default['default'].throws(() => index.decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
index.configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize - omit size', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
index.configureDecoder({ maxSize: 128 * 1024 }); | ||
const serialized = index.encode(largeObj); | ||
same(multiformats.bytes.isBinary(serialized), true); | ||
assert__default['default'].throws(() => index.decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
index.configureDecoder(); | ||
}); | ||
test('.serialize and .deserialize object with slash as property', () => { | ||
@@ -113,10 +63,114 @@ const slashObject = { '/': true }; | ||
}); | ||
test('error catching', () => { | ||
const circlarObj = {}; | ||
circlarObj.a = circlarObj; | ||
assert__default['default'].throws(() => index.encode(circlarObj), /^Error: The object passed has circular references$/); | ||
test('error on circular references', () => { | ||
const circularObj = {}; | ||
circularObj.a = circularObj; | ||
assert.throws(() => index.encode(circularObj), /object contains circular references/); | ||
const circularArr = [circularObj]; | ||
circularObj.a = circularArr; | ||
assert.throws(() => index.encode(circularArr), /object contains circular references/); | ||
}); | ||
test('fuzz serialize and deserialize with garbage', () => { | ||
test('error on encoding undefined', () => { | ||
assert.throws(() => index.encode(undefined), /\Wundefined\W.*not supported/); | ||
const objWithUndefined = { | ||
a: 'a', | ||
b: undefined | ||
}; | ||
assert.throws(() => index.encode(objWithUndefined), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on decoding undefined', () => { | ||
assert.throws(() => index.decode(multiformats.bytes.fromHex('f7')), /\Wundefined\W.*not supported/); | ||
assert.throws(() => index.decode(multiformats.bytes.fromHex('a2616161616162f7')), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on encoding IEEE 754 specials', () => { | ||
for (const special of [ | ||
NaN, | ||
Infinity, | ||
-Infinity | ||
]) { | ||
assert.throws(() => index.encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const objWithSpecial = { | ||
a: 'a', | ||
b: special | ||
}; | ||
assert.throws(() => index.encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const arrWithSpecial = [ | ||
1, | ||
1.1, | ||
-1, | ||
-1.1, | ||
Number.MAX_SAFE_INTEGER, | ||
special, | ||
Number.MIN_SAFE_INTEGER | ||
]; | ||
assert.throws(() => index.encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
} | ||
}); | ||
test('error on decoding IEEE 754 specials', () => { | ||
const cases = [ | ||
[ | ||
'NaN', | ||
'f97e00' | ||
], | ||
[ | ||
'NaN', | ||
'f97ff8' | ||
], | ||
[ | ||
'NaN', | ||
'fa7ff80000' | ||
], | ||
[ | ||
'NaN', | ||
'fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'a2616161616162fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff80000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'Infinity', | ||
'f97c00' | ||
], | ||
[ | ||
'Infinity', | ||
'fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'a2616161616162fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff00000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'-Infinity', | ||
'f9fc00' | ||
], | ||
[ | ||
'-Infinity', | ||
'fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'a2616161616162fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffbfff00000000000003b001ffffffffffffe' | ||
] | ||
]; | ||
for (const [typ, hex] of cases) { | ||
const byts = multiformats.bytes.fromHex(hex); | ||
assert.throws(() => index.decode(byts), new RegExp(`\\W${ typ.replace(/^-/, '') }\\W.*not supported`)); | ||
} | ||
}); | ||
test('fuzz serialize and deserialize with garbage', function () { | ||
this.timeout(5000); | ||
for (let ii = 0; ii < 1000; ii++) { | ||
const original = { in: garbage__default['default'](100) }; | ||
const original = garbage__default['default'](100); | ||
const encoded = index.encode(original); | ||
@@ -133,3 +187,3 @@ const decoded = index.decode(encoded); | ||
}); | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
const buffer = Buffer.from('some data'); | ||
@@ -143,3 +197,3 @@ const bytes = Uint8Array.from(buffer); | ||
same(Object.keys(s), ['data']); | ||
assert__default['default'](s.data instanceof Uint8Array); | ||
assert(s.data instanceof Uint8Array); | ||
same(s.data.buffer, bytes.buffer); | ||
@@ -151,3 +205,3 @@ }; | ||
test('reject extraneous, but valid CBOR data after initial top-level object', () => { | ||
assert__default['default'].throws(() => { | ||
assert.throws(() => { | ||
index.decode(Buffer.concat([ | ||
@@ -157,4 +211,8 @@ Buffer.from(serializedObj), | ||
])); | ||
}, /^Error: Extraneous CBOR data found beyond initial top-level object/); | ||
}, /too many terminals/); | ||
}); | ||
test('reject bad CID lead-in', () => { | ||
const encoded = multiformats.bytes.fromHex('a1646c696e6bd82a582501017012207252523e6591fb8fe553d67ff55a86f84044b46a3e4176e10c58fa529a4aabd5'); | ||
assert.throws(() => index.decode(encoded), /Invalid CID for CBOR tag 42; expected leading 0x00/); | ||
}); | ||
}); |
'use strict'; | ||
import garbage from 'garbage'; | ||
import assert from 'assert'; | ||
import garbage from 'ipld-garbage'; | ||
import chai from 'chai'; | ||
import { | ||
encode, | ||
decode, | ||
configureDecoder | ||
decode | ||
} from '../index.js'; | ||
@@ -13,33 +12,5 @@ import { | ||
} from 'multiformats'; | ||
const {assert} = chai; | ||
const test = it; | ||
const _same = assert.deepStrictEqual; | ||
const same = (x, y) => { | ||
if (typeof x !== 'object') | ||
return _same(x, y); | ||
const skip = { | ||
nested: null, | ||
bytes: null, | ||
multihash: null, | ||
digest: null, | ||
link: null | ||
}; | ||
for (const prop of Object.keys(skip)) { | ||
if (x[prop]) | ||
same(x[prop], y[prop]); | ||
} | ||
if (x.links) { | ||
same(x.links.length, y.links.length); | ||
for (let i = 0; i < x.links.length; i++) { | ||
same(x[i], y[i]); | ||
} | ||
} | ||
skip.links = null; | ||
_same({ | ||
...x, | ||
...skip | ||
}, { | ||
...y, | ||
...skip | ||
}); | ||
}; | ||
const same = assert.deepStrictEqual; | ||
describe('dag-cbor', () => { | ||
@@ -57,3 +28,3 @@ const obj = { | ||
}, | ||
bytes: Buffer.from('asdf') | ||
bytes: new TextEncoder().encode('asdf') | ||
}; | ||
@@ -65,3 +36,3 @@ const serializedObj = encode(obj); | ||
const deserializedObj = decode(serializedObj); | ||
same(obj, deserializedObj); | ||
same(deserializedObj, obj); | ||
}); | ||
@@ -75,25 +46,3 @@ test('.serialize and .deserialize large objects', () => { | ||
same(largeObj, deserialized); | ||
configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
configureDecoder({ | ||
size: 64 * 1024, | ||
maxSize: 128 * 1024 | ||
}); | ||
const serialized = encode(largeObj); | ||
same(bytes.isBinary(serialized), true); | ||
assert.throws(() => decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize - omit size', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
configureDecoder({ maxSize: 128 * 1024 }); | ||
const serialized = encode(largeObj); | ||
same(bytes.isBinary(serialized), true); | ||
assert.throws(() => decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
configureDecoder(); | ||
}); | ||
test('.serialize and .deserialize object with slash as property', () => { | ||
@@ -111,10 +60,114 @@ const slashObject = { '/': true }; | ||
}); | ||
test('error catching', () => { | ||
const circlarObj = {}; | ||
circlarObj.a = circlarObj; | ||
assert.throws(() => encode(circlarObj), /^Error: The object passed has circular references$/); | ||
test('error on circular references', () => { | ||
const circularObj = {}; | ||
circularObj.a = circularObj; | ||
assert.throws(() => encode(circularObj), /object contains circular references/); | ||
const circularArr = [circularObj]; | ||
circularObj.a = circularArr; | ||
assert.throws(() => encode(circularArr), /object contains circular references/); | ||
}); | ||
test('fuzz serialize and deserialize with garbage', () => { | ||
test('error on encoding undefined', () => { | ||
assert.throws(() => encode(undefined), /\Wundefined\W.*not supported/); | ||
const objWithUndefined = { | ||
a: 'a', | ||
b: undefined | ||
}; | ||
assert.throws(() => encode(objWithUndefined), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on decoding undefined', () => { | ||
assert.throws(() => decode(bytes.fromHex('f7')), /\Wundefined\W.*not supported/); | ||
assert.throws(() => decode(bytes.fromHex('a2616161616162f7')), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on encoding IEEE 754 specials', () => { | ||
for (const special of [ | ||
NaN, | ||
Infinity, | ||
-Infinity | ||
]) { | ||
assert.throws(() => encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const objWithSpecial = { | ||
a: 'a', | ||
b: special | ||
}; | ||
assert.throws(() => encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const arrWithSpecial = [ | ||
1, | ||
1.1, | ||
-1, | ||
-1.1, | ||
Number.MAX_SAFE_INTEGER, | ||
special, | ||
Number.MIN_SAFE_INTEGER | ||
]; | ||
assert.throws(() => encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
} | ||
}); | ||
test('error on decoding IEEE 754 specials', () => { | ||
const cases = [ | ||
[ | ||
'NaN', | ||
'f97e00' | ||
], | ||
[ | ||
'NaN', | ||
'f97ff8' | ||
], | ||
[ | ||
'NaN', | ||
'fa7ff80000' | ||
], | ||
[ | ||
'NaN', | ||
'fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'a2616161616162fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff80000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'Infinity', | ||
'f97c00' | ||
], | ||
[ | ||
'Infinity', | ||
'fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'a2616161616162fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff00000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'-Infinity', | ||
'f9fc00' | ||
], | ||
[ | ||
'-Infinity', | ||
'fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'a2616161616162fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffbfff00000000000003b001ffffffffffffe' | ||
] | ||
]; | ||
for (const [typ, hex] of cases) { | ||
const byts = bytes.fromHex(hex); | ||
assert.throws(() => decode(byts), new RegExp(`\\W${ typ.replace(/^-/, '') }\\W.*not supported`)); | ||
} | ||
}); | ||
test('fuzz serialize and deserialize with garbage', function () { | ||
this.timeout(5000); | ||
for (let ii = 0; ii < 1000; ii++) { | ||
const original = { in: garbage(100) }; | ||
const original = garbage(100); | ||
const encoded = encode(original); | ||
@@ -131,3 +184,3 @@ const decoded = decode(encoded); | ||
}); | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
const buffer = Buffer.from('some data'); | ||
@@ -153,4 +206,8 @@ const bytes = Uint8Array.from(buffer); | ||
])); | ||
}, /^Error: Extraneous CBOR data found beyond initial top-level object/); | ||
}, /too many terminals/); | ||
}); | ||
test('reject bad CID lead-in', () => { | ||
const encoded = bytes.fromHex('a1646c696e6bd82a582501017012207252523e6591fb8fe553d67ff55a86f84044b46a3e4176e10c58fa529a4aabd5'); | ||
assert.throws(() => decode(encoded), /Invalid CID for CBOR tag 42; expected leading 0x00/); | ||
}); | ||
}); |
147
esm/index.js
@@ -1,105 +0,63 @@ | ||
import cbor from 'borc'; | ||
import isCircular from '@ipld/is-circular'; | ||
import { | ||
bytes, | ||
CID | ||
} from 'multiformats'; | ||
const {asCID} = CID; | ||
const decodeCID = CID.decode; | ||
import * as cborg from 'cborg'; | ||
import { CID } from 'multiformats'; | ||
const CID_CBOR_TAG = 42; | ||
const code = 113; | ||
const name = 'dag-cbor'; | ||
function tagCID(cid) { | ||
const tag = bytes.fromHex('00'); | ||
const buffer = new Uint8Array(tag.byteLength + cid.bytes.byteLength); | ||
buffer.set(tag); | ||
buffer.set(cid.bytes, tag.byteLength); | ||
const tagged = new cbor.Tagged(CID_CBOR_TAG, buffer, null); | ||
return tagged; | ||
function cidEncoder(obj) { | ||
if (obj.asCID !== obj) { | ||
return null; | ||
} | ||
const cid = CID.asCID(obj); | ||
if (!cid) { | ||
return null; | ||
} | ||
const bytes = new Uint8Array(cid.bytes.byteLength + 1); | ||
bytes.set(cid.bytes, 1); | ||
return [ | ||
new cborg.Token(cborg.Type.tag, CID_CBOR_TAG), | ||
new cborg.Token(cborg.Type.bytes, bytes) | ||
]; | ||
} | ||
function replaceCIDbyTAG(dagNode, config) { | ||
if (dagNode && typeof dagNode === 'object' && isCircular(dagNode, { asCID: true })) { | ||
throw new Error('The object passed has circular references'); | ||
function undefinedEncoder() { | ||
throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
function numberEncoder(num) { | ||
if (Number.isNaN(num)) { | ||
throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
function transform(obj) { | ||
if (bytes.isBinary(obj)) | ||
return bytes.coerce(obj); | ||
if (!obj || typeof obj === 'string') { | ||
return obj; | ||
} | ||
if (Array.isArray(obj)) { | ||
return obj.map(transform); | ||
} | ||
const cid = asCID(obj, config); | ||
if (cid) { | ||
return tagCID(cid); | ||
} | ||
const keys = Object.keys(obj); | ||
if (keys.length > 0) { | ||
const out = {}; | ||
keys.forEach(key => { | ||
if (typeof obj[key] === 'object') { | ||
out[key] = transform(obj[key]); | ||
} else { | ||
out[key] = obj[key]; | ||
} | ||
}); | ||
return out; | ||
} else { | ||
return obj; | ||
} | ||
if (num === Infinity || num === -Infinity) { | ||
throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded'); | ||
} | ||
return transform(dagNode); | ||
} | ||
const defaultTags = { | ||
[CID_CBOR_TAG]: val => { | ||
return decodeCID(val.subarray(1), cidConfig); | ||
const encodeOptions = { | ||
float64: true, | ||
typeEncoders: { | ||
Object: cidEncoder, | ||
undefined: undefinedEncoder, | ||
number: numberEncoder | ||
} | ||
}; | ||
const defaultSize = 64 * 1024; | ||
const defaultMaxSize = 64 * 1024 * 1024; | ||
let currentSize = defaultSize; | ||
let maxSize = defaultMaxSize; | ||
let decoder = null; | ||
let cidConfig = null; | ||
const configureDecoder = options => { | ||
const tags = defaultTags; | ||
if (options) { | ||
if (typeof options.size === 'number') { | ||
currentSize = options.size; | ||
} | ||
if (typeof options.maxSize === 'number') { | ||
maxSize = options.maxSize; | ||
} | ||
} else { | ||
currentSize = defaultSize; | ||
maxSize = defaultMaxSize; | ||
function encode(node) { | ||
return cborg.encode(node, encodeOptions); | ||
} | ||
function cidDecoder(bytes) { | ||
if (bytes[0] !== 0) { | ||
throw new Error('Invalid CID for CBOR tag 42; expected leading 0x00'); | ||
} | ||
const decoderOptions = { | ||
tags, | ||
size: currentSize | ||
}; | ||
decoder = new cbor.Decoder(decoderOptions); | ||
currentSize = decoderOptions.size; | ||
return CID.decode(bytes.subarray(1)); | ||
} | ||
const decodeOptions = { | ||
allowIndefinite: false, | ||
allowUndefined: false, | ||
allowNaN: false, | ||
allowInfinity: false, | ||
allowBigInt: true, | ||
strict: true, | ||
useMaps: false, | ||
tags: [] | ||
}; | ||
configureDecoder(); | ||
const encode = (node, config) => { | ||
const nodeTagged = replaceCIDbyTAG(node, config); | ||
const serialized = cbor.encode(nodeTagged); | ||
return bytes.coerce(serialized); | ||
}; | ||
const decode = (data, config) => { | ||
cidConfig = config; | ||
if (data.length > currentSize && data.length <= maxSize) { | ||
configureDecoder({ size: data.length }); | ||
} | ||
if (data.length > currentSize) { | ||
throw new Error('Data is too large to deserialize with current decoder'); | ||
} | ||
const all = decoder.decodeAll(data); | ||
if (all.length !== 1) { | ||
throw new Error('Extraneous CBOR data found beyond initial top-level object'); | ||
} | ||
return all[0]; | ||
}; | ||
decodeOptions.tags[CID_CBOR_TAG] = cidDecoder; | ||
function decode(data) { | ||
return cborg.decode(data, decodeOptions); | ||
} | ||
export { | ||
@@ -109,4 +67,3 @@ name, | ||
encode, | ||
decode, | ||
configureDecoder | ||
decode | ||
}; |
'use strict'; | ||
import garbage from 'garbage'; | ||
import assert from 'assert'; | ||
import garbage from 'ipld-garbage'; | ||
import chai from 'chai'; | ||
import { | ||
encode, | ||
decode, | ||
configureDecoder | ||
decode | ||
} from '../index.js'; | ||
@@ -13,33 +12,5 @@ import { | ||
} from 'multiformats'; | ||
const {assert} = chai; | ||
const test = it; | ||
const _same = assert.deepStrictEqual; | ||
const same = (x, y) => { | ||
if (typeof x !== 'object') | ||
return _same(x, y); | ||
const skip = { | ||
nested: null, | ||
bytes: null, | ||
multihash: null, | ||
digest: null, | ||
link: null | ||
}; | ||
for (const prop of Object.keys(skip)) { | ||
if (x[prop]) | ||
same(x[prop], y[prop]); | ||
} | ||
if (x.links) { | ||
same(x.links.length, y.links.length); | ||
for (let i = 0; i < x.links.length; i++) { | ||
same(x[i], y[i]); | ||
} | ||
} | ||
skip.links = null; | ||
_same({ | ||
...x, | ||
...skip | ||
}, { | ||
...y, | ||
...skip | ||
}); | ||
}; | ||
const same = assert.deepStrictEqual; | ||
describe('dag-cbor', () => { | ||
@@ -57,3 +28,3 @@ const obj = { | ||
}, | ||
bytes: Buffer.from('asdf') | ||
bytes: new TextEncoder().encode('asdf') | ||
}; | ||
@@ -65,3 +36,3 @@ const serializedObj = encode(obj); | ||
const deserializedObj = decode(serializedObj); | ||
same(obj, deserializedObj); | ||
same(deserializedObj, obj); | ||
}); | ||
@@ -75,25 +46,3 @@ test('.serialize and .deserialize large objects', () => { | ||
same(largeObj, deserialized); | ||
configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
configureDecoder({ | ||
size: 64 * 1024, | ||
maxSize: 128 * 1024 | ||
}); | ||
const serialized = encode(largeObj); | ||
same(bytes.isBinary(serialized), true); | ||
assert.throws(() => decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
configureDecoder(); | ||
}); | ||
test('.deserialize fail on large objects beyond maxSize - omit size', () => { | ||
const dataSize = 128 * 1024 + 1; | ||
const largeObj = { someKey: [].slice.call(new Uint8Array(dataSize)) }; | ||
configureDecoder({ maxSize: 128 * 1024 }); | ||
const serialized = encode(largeObj); | ||
same(bytes.isBinary(serialized), true); | ||
assert.throws(() => decode(serialized), /^Error: Data is too large to deserialize with current decoder$/); | ||
configureDecoder(); | ||
}); | ||
test('.serialize and .deserialize object with slash as property', () => { | ||
@@ -111,10 +60,114 @@ const slashObject = { '/': true }; | ||
}); | ||
test('error catching', () => { | ||
const circlarObj = {}; | ||
circlarObj.a = circlarObj; | ||
assert.throws(() => encode(circlarObj), /^Error: The object passed has circular references$/); | ||
test('error on circular references', () => { | ||
const circularObj = {}; | ||
circularObj.a = circularObj; | ||
assert.throws(() => encode(circularObj), /object contains circular references/); | ||
const circularArr = [circularObj]; | ||
circularObj.a = circularArr; | ||
assert.throws(() => encode(circularArr), /object contains circular references/); | ||
}); | ||
test('fuzz serialize and deserialize with garbage', () => { | ||
test('error on encoding undefined', () => { | ||
assert.throws(() => encode(undefined), /\Wundefined\W.*not supported/); | ||
const objWithUndefined = { | ||
a: 'a', | ||
b: undefined | ||
}; | ||
assert.throws(() => encode(objWithUndefined), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on decoding undefined', () => { | ||
assert.throws(() => decode(bytes.fromHex('f7')), /\Wundefined\W.*not supported/); | ||
assert.throws(() => decode(bytes.fromHex('a2616161616162f7')), /\Wundefined\W.*not supported/); | ||
}); | ||
test('error on encoding IEEE 754 specials', () => { | ||
for (const special of [ | ||
NaN, | ||
Infinity, | ||
-Infinity | ||
]) { | ||
assert.throws(() => encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const objWithSpecial = { | ||
a: 'a', | ||
b: special | ||
}; | ||
assert.throws(() => encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
const arrWithSpecial = [ | ||
1, | ||
1.1, | ||
-1, | ||
-1.1, | ||
Number.MAX_SAFE_INTEGER, | ||
special, | ||
Number.MIN_SAFE_INTEGER | ||
]; | ||
assert.throws(() => encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`)); | ||
} | ||
}); | ||
test('error on decoding IEEE 754 specials', () => { | ||
const cases = [ | ||
[ | ||
'NaN', | ||
'f97e00' | ||
], | ||
[ | ||
'NaN', | ||
'f97ff8' | ||
], | ||
[ | ||
'NaN', | ||
'fa7ff80000' | ||
], | ||
[ | ||
'NaN', | ||
'fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'a2616161616162fb7ff8000000000000' | ||
], | ||
[ | ||
'NaN', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff80000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'Infinity', | ||
'f97c00' | ||
], | ||
[ | ||
'Infinity', | ||
'fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'a2616161616162fb7ff0000000000000' | ||
], | ||
[ | ||
'Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffb7ff00000000000003b001ffffffffffffe' | ||
], | ||
[ | ||
'-Infinity', | ||
'f9fc00' | ||
], | ||
[ | ||
'-Infinity', | ||
'fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'a2616161616162fbfff0000000000000' | ||
], | ||
[ | ||
'-Infinity', | ||
'8701fb3ff199999999999a20fbbff199999999999a1b001ffffffffffffffbfff00000000000003b001ffffffffffffe' | ||
] | ||
]; | ||
for (const [typ, hex] of cases) { | ||
const byts = bytes.fromHex(hex); | ||
assert.throws(() => decode(byts), new RegExp(`\\W${ typ.replace(/^-/, '') }\\W.*not supported`)); | ||
} | ||
}); | ||
test('fuzz serialize and deserialize with garbage', function () { | ||
this.timeout(5000); | ||
for (let ii = 0; ii < 1000; ii++) { | ||
const original = { in: garbage(100) }; | ||
const original = garbage(100); | ||
const encoded = encode(original); | ||
@@ -131,3 +184,3 @@ const decoded = decode(encoded); | ||
}); | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
test('encode and decode consistency with Uint8Array and Buffer fields', () => { | ||
const buffer = Buffer.from('some data'); | ||
@@ -153,4 +206,8 @@ const bytes = Uint8Array.from(buffer); | ||
])); | ||
}, /^Error: Extraneous CBOR data found beyond initial top-level object/); | ||
}, /too many terminals/); | ||
}); | ||
test('reject bad CID lead-in', () => { | ||
const encoded = bytes.fromHex('a1646c696e6bd82a582501017012207252523e6591fb8fe553d67ff55a86f84044b46a3e4176e10c58fa529a4aabd5'); | ||
assert.throws(() => decode(encoded), /Invalid CID for CBOR tag 42; expected leading 0x00/); | ||
}); | ||
}); |
{ | ||
"name": "@ipld/dag-cbor", | ||
"version": "2.0.3", | ||
"version": "3.0.0", | ||
"description": "JS implementation of dag-cbor", | ||
@@ -33,12 +33,12 @@ "scripts": { | ||
"dependencies": { | ||
"@ipld/is-circular": "^2.0.0", | ||
"borc": "^2.1.2", | ||
"multiformats": "^4.0.0" | ||
"cborg": "^1.0.3", | ||
"multiformats": "^4.4.3" | ||
}, | ||
"devDependencies": { | ||
"garbage": "0.0.0", | ||
"hundreds": "0.0.8", | ||
"mocha": "^8.1.3", | ||
"chai": "^4.2.0", | ||
"hundreds": "^0.0.9", | ||
"ipld-garbage": "^2.0.0", | ||
"mocha": "^8.2.1", | ||
"polendina": "^1.1.0", | ||
"standard": "^14.3.4" | ||
"standard": "^16.0.3" | ||
}, | ||
@@ -45,0 +45,0 @@ "directories": { |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
No README
QualityPackage does not have a README. This may indicate a failed publish or a low quality package.
Found 1 instance in 1 package
33557
2
10
953
0
35
6
+ Addedcborg@^1.0.3
+ Addedcborg@1.10.2(transitive)
- Removed@ipld/is-circular@^2.0.0
- Removedborc@^2.1.2
- Removed@ipld/is-circular@2.0.0(transitive)
- Removedbignumber.js@9.1.2(transitive)
- Removedborc@2.1.2(transitive)
- Removedcommander@2.20.3(transitive)
- Removeddelimit-stream@0.1.0(transitive)
- Removedinherits@2.0.4(transitive)
- Removediso-url@0.4.7(transitive)
- Removedjson-text-sequence@0.1.1(transitive)
- Removedreadable-stream@3.6.2(transitive)
- Removedsafe-buffer@5.2.1(transitive)
- Removedstring_decoder@1.3.0(transitive)
- Removedutil-deprecate@1.0.2(transitive)
Updatedmultiformats@^4.4.3