Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@ipld/dag-json

Package Overview
Dependencies
Maintainers
3
Versions
75
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@ipld/dag-json - npm Package Compare versions

Comparing version 5.0.3 to 6.0.0

195

cjs/browser-test/test-basics.js
'use strict';
var assert = require('assert');
var ipldGarbage = require('ipld-garbage');
var chai = require('chai');
var index = require('../index.js');

@@ -9,39 +10,81 @@ var multiformats = require('multiformats');

var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert);
var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai);
'use strict';
const same = assert__default['default'].deepStrictEqual;
const {assert} = chai__default['default'];
const same = assert.deepStrictEqual;
const test = it;
const recode = buffer => index.encode(index.decode(buffer));
const recode = byts => index.encode(index.decode(byts));
const link = multiformats.CID.parse('bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu');
describe('basic dag-json', () => {
test('encode decode', () => {
let buffer = index.encode({ hello: 'world' });
same(JSON.parse(multiformats.bytes.toString(recode(buffer))), { hello: 'world' });
let byts = index.encode({ hello: 'world' });
same(JSON.parse(multiformats.bytes.toString(recode(byts))), { hello: 'world' });
const o = {
link,
buffer: multiformats.bytes.fromString('asdf'),
byts: multiformats.bytes.fromString('asdf'),
n: null,
o: {}
};
buffer = index.encode(o);
same(index.decode(buffer), o);
same(multiformats.bytes.isBinary(index.decode(buffer).buffer), true);
byts = index.encode(o);
same(index.decode(byts), o);
same(multiformats.bytes.isBinary(index.decode(byts).byts), true);
});
test('circular failure', () => {
const o1 = { hello: 'world' };
const o2 = { o1 };
o1.o2 = o2;
try {
index.encode(o2);
assert__default['default'].ok(false);
} catch (e) {
same(e.message, 'Object contains circular references');
}
test('encode decode 2', () => {
const obj = {
plain: 'olde string',
bytes: new TextEncoder().encode('deadbeef')
};
const expected = '{"bytes":{"/":{"bytes":"ZGVhZGJlZWY"}},"plain":"olde string"}';
const byts = index.encode(obj);
same(JSON.parse(multiformats.bytes.toString(recode(byts))), JSON.parse(expected));
same(multiformats.bytes.toString(recode(byts)), expected);
});
test('use reserved space', () => {
const decoded = index.decode(index.encode({ '/': { type: 'stringName' } }));
same(decoded['/'].type, 'stringName');
describe('reserved space', () => {
test('allow alternative types', () => {
for (const obj of [
true,
false,
null,
1,
-1,
1.1,
{ blip: 'bop' },
['foo']
]) {
same(index.decode(index.encode({ '/': obj })), { '/': obj });
same(index.decode(index.encode({ '/': { bytes: obj } })), { '/': { bytes: obj } });
}
});
test('allow specials within reserved space', () => {
same(index.decode(index.encode({ '/': multiformats.bytes.fromString('asdf') })), { '/': multiformats.bytes.fromString('asdf') });
same(new TextDecoder().decode(index.encode({ '/': multiformats.bytes.fromString('asdf') })), '{"/":{"/":{"bytes":"YXNkZg"}}}');
same(index.decode(index.encode({ '/': link })), { '/': link });
same(new TextDecoder().decode(index.encode({ '/': link })), '{"/":{"/":"bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu"}}');
});
test('disallow extraneous tokens', () => {
assert.throws(() => index.decode(index.encode({
'/': link.toString(),
x: 'bip'
})));
assert.throws(() => index.decode(index.encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
}
})));
assert.throws(() => index.decode(index.encode({
'/': { bytes: 'mS7ldeA' },
x: 'bip'
})));
assert.throws(() => index.decode(index.encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
},
bop: 'bip'
})));
});
});
test('native types', done => {
test('native types', () => {
const flip = obj => index.decode(index.encode(obj));

@@ -57,4 +100,106 @@ same(flip('test'), 'test');

same(flip(['asdf']), ['asdf']);
done();
same(index.decode(new TextEncoder().encode('10.0')), 10);
same(index.decode(new TextEncoder().encode('[-10.0, 1.0, 0.0, 100.0]')), [
-10,
1,
0,
100
]);
});
test('stable map key sorting', () => {
const s1 = multiformats.bytes.toString(index.encode({
a: 1,
b: 2,
bb: 2.2,
c: 3,
c_: 3.3
}));
const s2 = multiformats.bytes.toString(index.encode({
c_: 3.3,
bb: 2.2,
b: 2,
c: 3,
a: 1
}));
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s1);
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s2);
});
test('error on circular references', () => {
const circularObj = {};
circularObj.a = circularObj;
assert.throws(() => index.encode(circularObj), /object contains circular references/);
const circularArr = [circularObj];
circularObj.a = circularArr;
assert.throws(() => index.encode(circularArr), /object contains circular references/);
});
test('error on encoding undefined', () => {
assert.throws(() => index.encode(undefined), /\Wundefined\W.*not supported/);
const objWithUndefined = {
a: 'a',
b: undefined
};
assert.throws(() => index.encode(objWithUndefined), /\Wundefined\W.*not supported/);
});
test('error on encoding IEEE 754 specials', () => {
for (const special of [
NaN,
Infinity,
-Infinity
]) {
assert.throws(() => index.encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const objWithSpecial = {
a: 'a',
b: special
};
assert.throws(() => index.encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const arrWithSpecial = [
1,
1.1,
-1,
-1.1,
Number.MAX_SAFE_INTEGER,
special,
Number.MIN_SAFE_INTEGER
];
assert.throws(() => index.encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
}
});
test('fuzz serialize and deserialize with garbage', function () {
const checkObj = obj => {
if (Array.isArray(obj)) {
return obj.every(checkObj);
}
if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
if (key === '/') {
if (typeof value === 'string') {
return false;
}
if (value && typeof value === 'object' && value.bytes !== undefined) {
return false;
}
}
if (!checkObj(value)) {
return false;
}
}
}
return true;
};
this.timeout(5000);
for (let ii = 0; ii < 1000; ii++) {
const original = ipldGarbage.garbage(300);
if (!checkObj(original)) {
continue;
}
try {
const encoded = index.encode(original);
const decoded = index.decode(encoded);
same(decoded, original);
} catch (err) {
console.log('Failed on fuzz object:', original);
throw err;
}
}
});
});

182

cjs/index.js

@@ -5,62 +5,148 @@ 'use strict';

var json = require('fast-json-stable-stringify');
var isCircular = require('@ipld/is-circular');
var transform = require('lodash.transform');
var multiformats = require('multiformats');
var base64 = require('multiformats/bases/base64');
var cborg = require('cborg');
var cborgJson = require('cborg/json');
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
function _interopNamespace(e) {
if (e && e.__esModule) return e;
var n = Object.create(null);
if (e) {
Object.keys(e).forEach(function (k) {
if (k !== 'default') {
var d = Object.getOwnPropertyDescriptor(e, k);
Object.defineProperty(n, k, d.get ? d : {
enumerable: true,
get: function () {
return e[k];
}
});
}
});
}
n['default'] = e;
return Object.freeze(n);
}
var json__default = /*#__PURE__*/_interopDefaultLegacy(json);
var isCircular__default = /*#__PURE__*/_interopDefaultLegacy(isCircular);
var transform__default = /*#__PURE__*/_interopDefaultLegacy(transform);
var cborgJson__namespace = /*#__PURE__*/_interopNamespace(cborgJson);
const transformEncode = obj => transform__default['default'](obj, (result, value, key) => {
const cid = multiformats.CID.asCID(value);
if (cid) {
result[key] = { '/': cid.toString() };
} else if (multiformats.bytes.isBinary(value)) {
value = multiformats.bytes.coerce(value);
result[key] = { '/': { bytes: base64.base64.encode(value) } };
} else if (typeof value === 'object' && value !== null) {
result[key] = transformEncode(value);
} else {
result[key] = value;
function cidEncoder(obj) {
if (obj.asCID !== obj) {
return null;
}
});
const _encode = obj => {
if (typeof obj === 'object' && !multiformats.bytes.isBinary(obj) && !multiformats.CID.asCID(obj) && obj) {
if (isCircular__default['default'](obj, { asCID: true })) {
throw new Error('Object contains circular references');
const cid = multiformats.CID.asCID(obj);
if (!cid) {
return null;
}
const cidString = cid.toString();
return [
new cborg.Token(cborg.Type.map, Infinity, 1),
new cborg.Token(cborg.Type.string, '/', 1),
new cborg.Token(cborg.Type.string, cidString, cidString.length),
new cborg.Token(cborg.Type.break, undefined, 1)
];
}
function bytesEncoder(bytes) {
const bytesString = base64.base64.encode(bytes).slice(1);
return [
new cborg.Token(cborg.Type.map, Infinity, 1),
new cborg.Token(cborg.Type.string, '/', 1),
new cborg.Token(cborg.Type.map, Infinity, 1),
new cborg.Token(cborg.Type.string, 'bytes', 5),
new cborg.Token(cborg.Type.string, bytesString, bytesString.length),
new cborg.Token(cborg.Type.break, undefined, 1),
new cborg.Token(cborg.Type.break, undefined, 1)
];
}
function undefinedEncoder() {
throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded');
}
function numberEncoder(num) {
if (Number.isNaN(num)) {
throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded');
}
if (num === Infinity || num === -Infinity) {
throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded');
}
return null;
}
const encodeOptions = {
typeEncoders: {
Object: cidEncoder,
Uint8Array: bytesEncoder,
Buffer: bytesEncoder,
undefined: undefinedEncoder,
number: numberEncoder
}
};
class DagJsonTokenizer extends cborgJson__namespace.Tokenizer {
constructor(data, options) {
super(data, options);
this.tokenBuffer = [];
}
done() {
return this.tokenBuffer.length === 0 && super.done();
}
_next() {
if (this.tokenBuffer.length > 0) {
return this.tokenBuffer.pop();
}
obj = transformEncode(obj);
return super.next();
}
return multiformats.bytes.fromString(json__default['default'](obj));
};
const transformDecode = obj => transform__default['default'](obj, (result, value, key) => {
if (typeof value === 'object' && value !== null) {
if (value['/']) {
if (typeof value['/'] === 'string') {
result[key] = multiformats.CID.parse(value['/']);
} else if (typeof value['/'] === 'object' && value['/'].bytes) {
result[key] = base64.base64.decode(value['/'].bytes);
} else {
result[key] = transformDecode(value);
next() {
const token = this._next();
if (token.type === cborg.Type.map) {
const keyToken = this._next();
if (keyToken.type === cborg.Type.string && keyToken.value === '/') {
const valueToken = this._next();
if (valueToken.type === cborg.Type.string) {
const breakToken = this._next();
if (breakToken.type !== cborg.Type.break) {
throw new Error('Invalid encoded CID form');
}
this.tokenBuffer.push(valueToken);
return new cborg.Token(cborg.Type.tag, 42, 0);
}
if (valueToken.type === cborg.Type.map) {
const innerKeyToken = this._next();
if (innerKeyToken.type === cborg.Type.string && innerKeyToken.value === 'bytes') {
const innerValueToken = this._next();
if (innerValueToken.type === cborg.Type.string) {
for (let i = 0; i < 2; i++) {
const breakToken = this._next();
if (breakToken.type !== cborg.Type.break) {
throw new Error('Invalid encoded Bytes form');
}
}
const bytes = base64.base64.decode(`m${ innerValueToken.value }`);
return new cborg.Token(cborg.Type.bytes, bytes, innerValueToken.value.length);
}
this.tokenBuffer.push(innerValueToken);
}
this.tokenBuffer.push(innerKeyToken);
}
this.tokenBuffer.push(valueToken);
}
} else {
result[key] = transformDecode(value);
this.tokenBuffer.push(keyToken);
}
} else {
result[key] = value;
return token;
}
});
const _decode = data => {
const obj = JSON.parse(multiformats.bytes.toString(data));
return transformDecode({ value: obj }).value;
}
const decodeOptions = {
allowIndefinite: false,
allowUndefined: false,
allowNaN: false,
allowInfinity: false,
allowBigInt: true,
strict: true,
useMaps: false,
tags: []
};
const {name, code, decode, encode} = {
name: 'dag-json',
code: 297,
encode: _encode,
decode: _decode
decodeOptions.tags[42] = multiformats.CID.parse;
const name = 'dag-json';
const code = 297;
const encode = node => cborgJson__namespace.encode(node, encodeOptions);
const decode = data => {
const options = Object.assign(decodeOptions, { tokenizer: new DagJsonTokenizer(data) });
return cborgJson__namespace.decode(data, options);
};

@@ -67,0 +153,0 @@

'use strict';
var assert = require('assert');
var ipldGarbage = require('ipld-garbage');
var chai = require('chai');
var index = require('../index.js');

@@ -9,39 +10,81 @@ var multiformats = require('multiformats');

var assert__default = /*#__PURE__*/_interopDefaultLegacy(assert);
var chai__default = /*#__PURE__*/_interopDefaultLegacy(chai);
'use strict';
const same = assert__default['default'].deepStrictEqual;
const {assert} = chai__default['default'];
const same = assert.deepStrictEqual;
const test = it;
const recode = buffer => index.encode(index.decode(buffer));
const recode = byts => index.encode(index.decode(byts));
const link = multiformats.CID.parse('bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu');
describe('basic dag-json', () => {
test('encode decode', () => {
let buffer = index.encode({ hello: 'world' });
same(JSON.parse(multiformats.bytes.toString(recode(buffer))), { hello: 'world' });
let byts = index.encode({ hello: 'world' });
same(JSON.parse(multiformats.bytes.toString(recode(byts))), { hello: 'world' });
const o = {
link,
buffer: multiformats.bytes.fromString('asdf'),
byts: multiformats.bytes.fromString('asdf'),
n: null,
o: {}
};
buffer = index.encode(o);
same(index.decode(buffer), o);
same(multiformats.bytes.isBinary(index.decode(buffer).buffer), true);
byts = index.encode(o);
same(index.decode(byts), o);
same(multiformats.bytes.isBinary(index.decode(byts).byts), true);
});
test('circular failure', () => {
const o1 = { hello: 'world' };
const o2 = { o1 };
o1.o2 = o2;
try {
index.encode(o2);
assert__default['default'].ok(false);
} catch (e) {
same(e.message, 'Object contains circular references');
}
test('encode decode 2', () => {
const obj = {
plain: 'olde string',
bytes: new TextEncoder().encode('deadbeef')
};
const expected = '{"bytes":{"/":{"bytes":"ZGVhZGJlZWY"}},"plain":"olde string"}';
const byts = index.encode(obj);
same(JSON.parse(multiformats.bytes.toString(recode(byts))), JSON.parse(expected));
same(multiformats.bytes.toString(recode(byts)), expected);
});
test('use reserved space', () => {
const decoded = index.decode(index.encode({ '/': { type: 'stringName' } }));
same(decoded['/'].type, 'stringName');
describe('reserved space', () => {
test('allow alternative types', () => {
for (const obj of [
true,
false,
null,
1,
-1,
1.1,
{ blip: 'bop' },
['foo']
]) {
same(index.decode(index.encode({ '/': obj })), { '/': obj });
same(index.decode(index.encode({ '/': { bytes: obj } })), { '/': { bytes: obj } });
}
});
test('allow specials within reserved space', () => {
same(index.decode(index.encode({ '/': multiformats.bytes.fromString('asdf') })), { '/': multiformats.bytes.fromString('asdf') });
same(new TextDecoder().decode(index.encode({ '/': multiformats.bytes.fromString('asdf') })), '{"/":{"/":{"bytes":"YXNkZg"}}}');
same(index.decode(index.encode({ '/': link })), { '/': link });
same(new TextDecoder().decode(index.encode({ '/': link })), '{"/":{"/":"bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu"}}');
});
test('disallow extraneous tokens', () => {
assert.throws(() => index.decode(index.encode({
'/': link.toString(),
x: 'bip'
})));
assert.throws(() => index.decode(index.encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
}
})));
assert.throws(() => index.decode(index.encode({
'/': { bytes: 'mS7ldeA' },
x: 'bip'
})));
assert.throws(() => index.decode(index.encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
},
bop: 'bip'
})));
});
});
test('native types', done => {
test('native types', () => {
const flip = obj => index.decode(index.encode(obj));

@@ -57,4 +100,106 @@ same(flip('test'), 'test');

same(flip(['asdf']), ['asdf']);
done();
same(index.decode(new TextEncoder().encode('10.0')), 10);
same(index.decode(new TextEncoder().encode('[-10.0, 1.0, 0.0, 100.0]')), [
-10,
1,
0,
100
]);
});
test('stable map key sorting', () => {
const s1 = multiformats.bytes.toString(index.encode({
a: 1,
b: 2,
bb: 2.2,
c: 3,
c_: 3.3
}));
const s2 = multiformats.bytes.toString(index.encode({
c_: 3.3,
bb: 2.2,
b: 2,
c: 3,
a: 1
}));
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s1);
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s2);
});
test('error on circular references', () => {
const circularObj = {};
circularObj.a = circularObj;
assert.throws(() => index.encode(circularObj), /object contains circular references/);
const circularArr = [circularObj];
circularObj.a = circularArr;
assert.throws(() => index.encode(circularArr), /object contains circular references/);
});
test('error on encoding undefined', () => {
assert.throws(() => index.encode(undefined), /\Wundefined\W.*not supported/);
const objWithUndefined = {
a: 'a',
b: undefined
};
assert.throws(() => index.encode(objWithUndefined), /\Wundefined\W.*not supported/);
});
test('error on encoding IEEE 754 specials', () => {
for (const special of [
NaN,
Infinity,
-Infinity
]) {
assert.throws(() => index.encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const objWithSpecial = {
a: 'a',
b: special
};
assert.throws(() => index.encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const arrWithSpecial = [
1,
1.1,
-1,
-1.1,
Number.MAX_SAFE_INTEGER,
special,
Number.MIN_SAFE_INTEGER
];
assert.throws(() => index.encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
}
});
test('fuzz serialize and deserialize with garbage', function () {
const checkObj = obj => {
if (Array.isArray(obj)) {
return obj.every(checkObj);
}
if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
if (key === '/') {
if (typeof value === 'string') {
return false;
}
if (value && typeof value === 'object' && value.bytes !== undefined) {
return false;
}
}
if (!checkObj(value)) {
return false;
}
}
}
return true;
};
this.timeout(5000);
for (let ii = 0; ii < 1000; ii++) {
const original = ipldGarbage.garbage(300);
if (!checkObj(original)) {
continue;
}
try {
const encoded = index.encode(original);
const decoded = index.decode(encoded);
same(decoded, original);
} catch (err) {
console.log('Failed on fuzz object:', original);
throw err;
}
}
});
});
'use strict';
import assert from 'assert';
import { garbage } from 'ipld-garbage';
import chai from 'chai';
import {

@@ -11,36 +12,78 @@ encode,

} from 'multiformats';
const {assert} = chai;
const same = assert.deepStrictEqual;
const test = it;
const recode = buffer => encode(decode(buffer));
const recode = byts => encode(decode(byts));
const link = CID.parse('bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu');
describe('basic dag-json', () => {
test('encode decode', () => {
let buffer = encode({ hello: 'world' });
same(JSON.parse(bytes.toString(recode(buffer))), { hello: 'world' });
let byts = encode({ hello: 'world' });
same(JSON.parse(bytes.toString(recode(byts))), { hello: 'world' });
const o = {
link,
buffer: bytes.fromString('asdf'),
byts: bytes.fromString('asdf'),
n: null,
o: {}
};
buffer = encode(o);
same(decode(buffer), o);
same(bytes.isBinary(decode(buffer).buffer), true);
byts = encode(o);
same(decode(byts), o);
same(bytes.isBinary(decode(byts).byts), true);
});
test('circular failure', () => {
const o1 = { hello: 'world' };
const o2 = { o1 };
o1.o2 = o2;
try {
encode(o2);
assert.ok(false);
} catch (e) {
same(e.message, 'Object contains circular references');
}
test('encode decode 2', () => {
const obj = {
plain: 'olde string',
bytes: new TextEncoder().encode('deadbeef')
};
const expected = '{"bytes":{"/":{"bytes":"ZGVhZGJlZWY"}},"plain":"olde string"}';
const byts = encode(obj);
same(JSON.parse(bytes.toString(recode(byts))), JSON.parse(expected));
same(bytes.toString(recode(byts)), expected);
});
test('use reserved space', () => {
const decoded = decode(encode({ '/': { type: 'stringName' } }));
same(decoded['/'].type, 'stringName');
describe('reserved space', () => {
test('allow alternative types', () => {
for (const obj of [
true,
false,
null,
1,
-1,
1.1,
{ blip: 'bop' },
['foo']
]) {
same(decode(encode({ '/': obj })), { '/': obj });
same(decode(encode({ '/': { bytes: obj } })), { '/': { bytes: obj } });
}
});
test('allow specials within reserved space', () => {
same(decode(encode({ '/': bytes.fromString('asdf') })), { '/': bytes.fromString('asdf') });
same(new TextDecoder().decode(encode({ '/': bytes.fromString('asdf') })), '{"/":{"/":{"bytes":"YXNkZg"}}}');
same(decode(encode({ '/': link })), { '/': link });
same(new TextDecoder().decode(encode({ '/': link })), '{"/":{"/":"bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu"}}');
});
test('disallow extraneous tokens', () => {
assert.throws(() => decode(encode({
'/': link.toString(),
x: 'bip'
})));
assert.throws(() => decode(encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
}
})));
assert.throws(() => decode(encode({
'/': { bytes: 'mS7ldeA' },
x: 'bip'
})));
assert.throws(() => decode(encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
},
bop: 'bip'
})));
});
});
test('native types', done => {
test('native types', () => {
const flip = obj => decode(encode(obj));

@@ -56,4 +99,106 @@ same(flip('test'), 'test');

same(flip(['asdf']), ['asdf']);
done();
same(decode(new TextEncoder().encode('10.0')), 10);
same(decode(new TextEncoder().encode('[-10.0, 1.0, 0.0, 100.0]')), [
-10,
1,
0,
100
]);
});
test('stable map key sorting', () => {
const s1 = bytes.toString(encode({
a: 1,
b: 2,
bb: 2.2,
c: 3,
c_: 3.3
}));
const s2 = bytes.toString(encode({
c_: 3.3,
bb: 2.2,
b: 2,
c: 3,
a: 1
}));
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s1);
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s2);
});
test('error on circular references', () => {
const circularObj = {};
circularObj.a = circularObj;
assert.throws(() => encode(circularObj), /object contains circular references/);
const circularArr = [circularObj];
circularObj.a = circularArr;
assert.throws(() => encode(circularArr), /object contains circular references/);
});
test('error on encoding undefined', () => {
assert.throws(() => encode(undefined), /\Wundefined\W.*not supported/);
const objWithUndefined = {
a: 'a',
b: undefined
};
assert.throws(() => encode(objWithUndefined), /\Wundefined\W.*not supported/);
});
test('error on encoding IEEE 754 specials', () => {
for (const special of [
NaN,
Infinity,
-Infinity
]) {
assert.throws(() => encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const objWithSpecial = {
a: 'a',
b: special
};
assert.throws(() => encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const arrWithSpecial = [
1,
1.1,
-1,
-1.1,
Number.MAX_SAFE_INTEGER,
special,
Number.MIN_SAFE_INTEGER
];
assert.throws(() => encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
}
});
test('fuzz serialize and deserialize with garbage', function () {
const checkObj = obj => {
if (Array.isArray(obj)) {
return obj.every(checkObj);
}
if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
if (key === '/') {
if (typeof value === 'string') {
return false;
}
if (value && typeof value === 'object' && value.bytes !== undefined) {
return false;
}
}
if (!checkObj(value)) {
return false;
}
}
}
return true;
};
this.timeout(5000);
for (let ii = 0; ii < 1000; ii++) {
const original = garbage(300);
if (!checkObj(original)) {
continue;
}
try {
const encoded = encode(original);
const decoded = decode(encoded);
same(decoded, original);
} catch (err) {
console.log('Failed on fuzz object:', original);
throw err;
}
}
});
});

@@ -1,57 +0,127 @@

import json from 'fast-json-stable-stringify';
import isCircular from '@ipld/is-circular';
import transform from 'lodash.transform';
import { CID } from 'multiformats';
import { base64 } from 'multiformats/bases/base64';
import {
bytes,
CID
} from 'multiformats';
import { base64 } from 'multiformats/bases/base64';
const transformEncode = obj => transform(obj, (result, value, key) => {
const cid = CID.asCID(value);
if (cid) {
result[key] = { '/': cid.toString() };
} else if (bytes.isBinary(value)) {
value = bytes.coerce(value);
result[key] = { '/': { bytes: base64.encode(value) } };
} else if (typeof value === 'object' && value !== null) {
result[key] = transformEncode(value);
} else {
result[key] = value;
Token,
Type
} from 'cborg';
import * as cborgJson from 'cborg/json';
function cidEncoder(obj) {
if (obj.asCID !== obj) {
return null;
}
});
const _encode = obj => {
if (typeof obj === 'object' && !bytes.isBinary(obj) && !CID.asCID(obj) && obj) {
if (isCircular(obj, { asCID: true })) {
throw new Error('Object contains circular references');
const cid = CID.asCID(obj);
if (!cid) {
return null;
}
const cidString = cid.toString();
return [
new Token(Type.map, Infinity, 1),
new Token(Type.string, '/', 1),
new Token(Type.string, cidString, cidString.length),
new Token(Type.break, undefined, 1)
];
}
function bytesEncoder(bytes) {
const bytesString = base64.encode(bytes).slice(1);
return [
new Token(Type.map, Infinity, 1),
new Token(Type.string, '/', 1),
new Token(Type.map, Infinity, 1),
new Token(Type.string, 'bytes', 5),
new Token(Type.string, bytesString, bytesString.length),
new Token(Type.break, undefined, 1),
new Token(Type.break, undefined, 1)
];
}
function undefinedEncoder() {
throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded');
}
function numberEncoder(num) {
if (Number.isNaN(num)) {
throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded');
}
if (num === Infinity || num === -Infinity) {
throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded');
}
return null;
}
const encodeOptions = {
typeEncoders: {
Object: cidEncoder,
Uint8Array: bytesEncoder,
Buffer: bytesEncoder,
undefined: undefinedEncoder,
number: numberEncoder
}
};
class DagJsonTokenizer extends cborgJson.Tokenizer {
constructor(data, options) {
super(data, options);
this.tokenBuffer = [];
}
done() {
return this.tokenBuffer.length === 0 && super.done();
}
_next() {
if (this.tokenBuffer.length > 0) {
return this.tokenBuffer.pop();
}
obj = transformEncode(obj);
return super.next();
}
return bytes.fromString(json(obj));
};
const transformDecode = obj => transform(obj, (result, value, key) => {
if (typeof value === 'object' && value !== null) {
if (value['/']) {
if (typeof value['/'] === 'string') {
result[key] = CID.parse(value['/']);
} else if (typeof value['/'] === 'object' && value['/'].bytes) {
result[key] = base64.decode(value['/'].bytes);
} else {
result[key] = transformDecode(value);
next() {
const token = this._next();
if (token.type === Type.map) {
const keyToken = this._next();
if (keyToken.type === Type.string && keyToken.value === '/') {
const valueToken = this._next();
if (valueToken.type === Type.string) {
const breakToken = this._next();
if (breakToken.type !== Type.break) {
throw new Error('Invalid encoded CID form');
}
this.tokenBuffer.push(valueToken);
return new Token(Type.tag, 42, 0);
}
if (valueToken.type === Type.map) {
const innerKeyToken = this._next();
if (innerKeyToken.type === Type.string && innerKeyToken.value === 'bytes') {
const innerValueToken = this._next();
if (innerValueToken.type === Type.string) {
for (let i = 0; i < 2; i++) {
const breakToken = this._next();
if (breakToken.type !== Type.break) {
throw new Error('Invalid encoded Bytes form');
}
}
const bytes = base64.decode(`m${ innerValueToken.value }`);
return new Token(Type.bytes, bytes, innerValueToken.value.length);
}
this.tokenBuffer.push(innerValueToken);
}
this.tokenBuffer.push(innerKeyToken);
}
this.tokenBuffer.push(valueToken);
}
} else {
result[key] = transformDecode(value);
this.tokenBuffer.push(keyToken);
}
} else {
result[key] = value;
return token;
}
});
const _decode = data => {
const obj = JSON.parse(bytes.toString(data));
return transformDecode({ value: obj }).value;
}
const decodeOptions = {
allowIndefinite: false,
allowUndefined: false,
allowNaN: false,
allowInfinity: false,
allowBigInt: true,
strict: true,
useMaps: false,
tags: []
};
export const {name, code, decode, encode} = {
name: 'dag-json',
code: 297,
encode: _encode,
decode: _decode
decodeOptions.tags[42] = CID.parse;
export const name = 'dag-json';
export const code = 297;
export const encode = node => cborgJson.encode(node, encodeOptions);
export const decode = data => {
const options = Object.assign(decodeOptions, { tokenizer: new DagJsonTokenizer(data) });
return cborgJson.decode(data, options);
};
'use strict';
import assert from 'assert';
import { garbage } from 'ipld-garbage';
import chai from 'chai';
import {

@@ -11,36 +12,78 @@ encode,

} from 'multiformats';
const {assert} = chai;
const same = assert.deepStrictEqual;
const test = it;
const recode = buffer => encode(decode(buffer));
const recode = byts => encode(decode(byts));
const link = CID.parse('bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu');
describe('basic dag-json', () => {
test('encode decode', () => {
let buffer = encode({ hello: 'world' });
same(JSON.parse(bytes.toString(recode(buffer))), { hello: 'world' });
let byts = encode({ hello: 'world' });
same(JSON.parse(bytes.toString(recode(byts))), { hello: 'world' });
const o = {
link,
buffer: bytes.fromString('asdf'),
byts: bytes.fromString('asdf'),
n: null,
o: {}
};
buffer = encode(o);
same(decode(buffer), o);
same(bytes.isBinary(decode(buffer).buffer), true);
byts = encode(o);
same(decode(byts), o);
same(bytes.isBinary(decode(byts).byts), true);
});
test('circular failure', () => {
const o1 = { hello: 'world' };
const o2 = { o1 };
o1.o2 = o2;
try {
encode(o2);
assert.ok(false);
} catch (e) {
same(e.message, 'Object contains circular references');
}
test('encode decode 2', () => {
const obj = {
plain: 'olde string',
bytes: new TextEncoder().encode('deadbeef')
};
const expected = '{"bytes":{"/":{"bytes":"ZGVhZGJlZWY"}},"plain":"olde string"}';
const byts = encode(obj);
same(JSON.parse(bytes.toString(recode(byts))), JSON.parse(expected));
same(bytes.toString(recode(byts)), expected);
});
test('use reserved space', () => {
const decoded = decode(encode({ '/': { type: 'stringName' } }));
same(decoded['/'].type, 'stringName');
describe('reserved space', () => {
test('allow alternative types', () => {
for (const obj of [
true,
false,
null,
1,
-1,
1.1,
{ blip: 'bop' },
['foo']
]) {
same(decode(encode({ '/': obj })), { '/': obj });
same(decode(encode({ '/': { bytes: obj } })), { '/': { bytes: obj } });
}
});
test('allow specials within reserved space', () => {
same(decode(encode({ '/': bytes.fromString('asdf') })), { '/': bytes.fromString('asdf') });
same(new TextDecoder().decode(encode({ '/': bytes.fromString('asdf') })), '{"/":{"/":{"bytes":"YXNkZg"}}}');
same(decode(encode({ '/': link })), { '/': link });
same(new TextDecoder().decode(encode({ '/': link })), '{"/":{"/":"bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu"}}');
});
test('disallow extraneous tokens', () => {
assert.throws(() => decode(encode({
'/': link.toString(),
x: 'bip'
})));
assert.throws(() => decode(encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
}
})));
assert.throws(() => decode(encode({
'/': { bytes: 'mS7ldeA' },
x: 'bip'
})));
assert.throws(() => decode(encode({
'/': {
bytes: 'mS7ldeA',
x: 'bip'
},
bop: 'bip'
})));
});
});
test('native types', done => {
test('native types', () => {
const flip = obj => decode(encode(obj));

@@ -56,4 +99,106 @@ same(flip('test'), 'test');

same(flip(['asdf']), ['asdf']);
done();
same(decode(new TextEncoder().encode('10.0')), 10);
same(decode(new TextEncoder().encode('[-10.0, 1.0, 0.0, 100.0]')), [
-10,
1,
0,
100
]);
});
test('stable map key sorting', () => {
const s1 = bytes.toString(encode({
a: 1,
b: 2,
bb: 2.2,
c: 3,
c_: 3.3
}));
const s2 = bytes.toString(encode({
c_: 3.3,
bb: 2.2,
b: 2,
c: 3,
a: 1
}));
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s1);
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s2);
});
test('error on circular references', () => {
const circularObj = {};
circularObj.a = circularObj;
assert.throws(() => encode(circularObj), /object contains circular references/);
const circularArr = [circularObj];
circularObj.a = circularArr;
assert.throws(() => encode(circularArr), /object contains circular references/);
});
test('error on encoding undefined', () => {
assert.throws(() => encode(undefined), /\Wundefined\W.*not supported/);
const objWithUndefined = {
a: 'a',
b: undefined
};
assert.throws(() => encode(objWithUndefined), /\Wundefined\W.*not supported/);
});
test('error on encoding IEEE 754 specials', () => {
for (const special of [
NaN,
Infinity,
-Infinity
]) {
assert.throws(() => encode(special), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const objWithSpecial = {
a: 'a',
b: special
};
assert.throws(() => encode(objWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
const arrWithSpecial = [
1,
1.1,
-1,
-1.1,
Number.MAX_SAFE_INTEGER,
special,
Number.MIN_SAFE_INTEGER
];
assert.throws(() => encode(arrWithSpecial), new RegExp(`\\W${ String(special) }\\W.*not supported`));
}
});
test('fuzz serialize and deserialize with garbage', function () {
const checkObj = obj => {
if (Array.isArray(obj)) {
return obj.every(checkObj);
}
if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
if (key === '/') {
if (typeof value === 'string') {
return false;
}
if (value && typeof value === 'object' && value.bytes !== undefined) {
return false;
}
}
if (!checkObj(value)) {
return false;
}
}
}
return true;
};
this.timeout(5000);
for (let ii = 0; ii < 1000; ii++) {
const original = garbage(300);
if (!checkObj(original)) {
continue;
}
try {
const encoded = encode(original);
const decoded = decode(encoded);
same(decoded, original);
} catch (err) {
console.log('Failed on fuzz object:', original);
throw err;
}
}
});
});

@@ -1,102 +0,216 @@

import json from 'fast-json-stable-stringify'
// @ts-ignore
import isCircular from '@ipld/is-circular'
import transform from 'lodash.transform'
import { bytes, CID } from 'multiformats'
import { CID } from 'multiformats'
import { base64 } from 'multiformats/bases/base64'
import { Token, Type } from 'cborg'
import * as cborgJson from 'cborg/json'
/**
* @template {number} Code
* @template T
* @typedef {import('multiformats/codecs/interface').BlockCodec<Code, T>} BlockCodec
* @typedef {import('multiformats/codecs/interface').ByteView<T>} ByteView
*/
/**
* @typedef {import('cborg/interface').DecodeTokenizer} DecodeTokenizer
*/
/**
* @template T
* @param {T} obj
* @returns {T}
* cidEncoder will receive all Objects during encode, it needs to filter out
* anything that's not a CID and return `null` for that so it's encoded as
* normal. Encoding a CID means replacing it with a `{"/":"<CidString>}`
* object as per the DAG-JSON spec.
*
* @param {any} obj
* @returns {Token[]|null}
*/
const transformEncode = (obj) => transform(obj,
/**
* @param {any} result
* @param {any} value
* @param {string} key
*/
(result, value, key) => {
const cid = CID.asCID(value)
if (cid) {
result[key] = { '/': cid.toString() }
} else if (bytes.isBinary(value)) {
value = bytes.coerce(value)
result[key] = { '/': { bytes: base64.encode(value) } }
} else if (typeof value === 'object' && value !== null) {
result[key] = transformEncode(value)
} else {
result[key] = value
}
})
function cidEncoder (obj) {
if (obj.asCID !== obj) {
return null // any other kind of object
}
const cid = CID.asCID(obj)
/* c8 ignore next 4 */
// very unlikely case, and it'll probably throw a recursion error in cborg
if (!cid) {
return null
}
const cidString = cid.toString()
return [
new Token(Type.map, Infinity, 1),
new Token(Type.string, '/', 1), // key
new Token(Type.string, cidString, cidString.length), // value
new Token(Type.break, undefined, 1)
]
}
/**
* @template T
* @param {T} obj
* @returns {Uint8Array}
* bytesEncoder will receive all Uint8Arrays (and friends) during encode, it
* needs to replace it with a `{"/":{"bytes":"Base64ByteString"}}` object as
* per the DAG-JSON spec.
*
* @param {Uint8Array} bytes
* @returns {Token[]|null}
*/
const _encode = (obj) => {
if (typeof obj === 'object' && !bytes.isBinary(obj) && !CID.asCID(obj) && obj) {
if (isCircular(obj, { asCID: true })) {
throw new Error('Object contains circular references')
}
obj = transformEncode(obj)
function bytesEncoder (bytes) {
const bytesString = base64.encode(bytes).slice(1) // no mbase prefix
return [
new Token(Type.map, Infinity, 1),
new Token(Type.string, '/', 1), // key
new Token(Type.map, Infinity, 1), // value
new Token(Type.string, 'bytes', 5), // inner key
new Token(Type.string, bytesString, bytesString.length), // inner value
new Token(Type.break, undefined, 1),
new Token(Type.break, undefined, 1)
]
}
/**
* Intercept all `undefined` values from an object walk and reject the entire
* object if we find one.
*
* @returns {null}
*/
function undefinedEncoder () {
throw new Error('`undefined` is not supported by the IPLD Data Model and cannot be encoded')
}
/**
* Intercept all `number` values from an object walk and reject the entire
* object if we find something that doesn't fit the IPLD data model (NaN &
* Infinity).
*
* @param {number} num
* @returns {null}
*/
function numberEncoder (num) {
if (Number.isNaN(num)) {
throw new Error('`NaN` is not supported by the IPLD Data Model and cannot be encoded')
}
return bytes.fromString(json(obj))
if (num === Infinity || num === -Infinity) {
throw new Error('`Infinity` and `-Infinity` is not supported by the IPLD Data Model and cannot be encoded')
}
return null // process with standard number encoder
}
const encodeOptions = {
typeEncoders: {
Object: cidEncoder,
Uint8Array: bytesEncoder, // TODO: all the typedarrays
Buffer: bytesEncoder, // TODO: all the typedarrays
undefined: undefinedEncoder,
number: numberEncoder
}
}
/**
* @param {object} obj
* @returns {any}
* @implements {DecodeTokenizer}
*/
const transformDecode = (obj) => transform(obj,
class DagJsonTokenizer extends cborgJson.Tokenizer {
/**
* @param {any} result
* @param {any} value
* @param {string} key
* @returns {any}
* @param {Uint8Array} data
* @param {object} [options]
*/
(result, value, key) => {
if (typeof value === 'object' && value !== null) {
if (value['/']) {
if (typeof value['/'] === 'string') {
result[key] = CID.parse(value['/'])
} else if (typeof value['/'] === 'object' && value['/'].bytes) {
result[key] = base64.decode(value['/'].bytes)
} else {
result[key] = transformDecode(value)
constructor (data, options) {
super(data, options)
/** @type {Token[]} */
this.tokenBuffer = []
}
/**
* @returns {boolean}
*/
done () {
return this.tokenBuffer.length === 0 && super.done()
}
/**
* @returns {Token}
*/
_next () {
if (this.tokenBuffer.length > 0) {
// @ts-ignore https://github.com/Microsoft/TypeScript/issues/30406
return this.tokenBuffer.pop()
}
return super.next()
}
/**
* Implements rules outlined in https://github.com/ipld/specs/pull/356
*
* @returns {Token}
*/
next () {
const token = this._next()
if (token.type === Type.map) {
const keyToken = this._next()
if (keyToken.type === Type.string && keyToken.value === '/') {
const valueToken = this._next()
if (valueToken.type === Type.string) { // *must* be a CID
const breakToken = this._next() // swallow the end-of-map token
if (breakToken.type !== Type.break) {
throw new Error('Invalid encoded CID form')
}
this.tokenBuffer.push(valueToken) // CID.parse will pick this up after our tag token
return new Token(Type.tag, 42, 0)
}
} else {
result[key] = transformDecode(value)
if (valueToken.type === Type.map) {
const innerKeyToken = this._next()
if (innerKeyToken.type === Type.string && innerKeyToken.value === 'bytes') {
const innerValueToken = this._next()
if (innerValueToken.type === Type.string) { // *must* be Bytes
for (let i = 0; i < 2; i++) {
const breakToken = this._next() // swallow two end-of-map tokens
if (breakToken.type !== Type.break) {
throw new Error('Invalid encoded Bytes form')
}
}
const bytes = base64.decode(`m${innerValueToken.value}`)
return new Token(Type.bytes, bytes, innerValueToken.value.length)
}
this.tokenBuffer.push(innerValueToken) // bail
}
this.tokenBuffer.push(innerKeyToken) // bail
}
this.tokenBuffer.push(valueToken) // bail
}
} else {
result[key] = value
this.tokenBuffer.push(keyToken) // bail
}
})
return token
}
}
const decodeOptions = {
allowIndefinite: false,
allowUndefined: false,
allowNaN: false,
allowInfinity: false,
allowBigInt: true, // this will lead to BigInt for ints outside of
// safe-integer range, which may surprise users
strict: true,
useMaps: false,
/** @type {import('cborg').TagDecoder[]} */
tags: []
}
// we're going to get TAG(42)STRING("bafy...") from the tokenizer so we only need
// to deal with the STRING("bafy...") at this point
decodeOptions.tags[42] = CID.parse
export const name = 'dag-json'
export const code = 0x0129
/**
* @template T
* @param {Uint8Array} data
* @returns {T}
* @param {T} node
* @returns {ByteView<T>}
*/
const _decode = (data) => {
const obj = JSON.parse(bytes.toString(data))
return transformDecode({ value: obj }).value
}
export const encode = (node) => cborgJson.encode(node, encodeOptions)
/**
* @template T
* @type {BlockCodec<0x0129, T>}
* @param {ByteView<T>} data
* @returns {T}
*/
export const { name, code, decode, encode } = {
name: 'dag-json',
code: 0x0129,
encode: _encode,
decode: _decode
export const decode = (data) => {
// the tokenizer is stateful so we need a single instance of it
const options = Object.assign(decodeOptions, { tokenizer: new DagJsonTokenizer(data) })
return cborgJson.decode(data, options)
}
{
"name": "@ipld/dag-json",
"version": "5.0.3",
"version": "6.0.0",
"description": "JSON Directed Acrylic Graph (DAG-JSON) for IPLD",

@@ -40,12 +40,11 @@ "main": "./cjs/index.js",

"dependencies": {
"@ipld/is-circular": "^2.0.0",
"@types/lodash.transform": "^4.6.6",
"fast-json-stable-stringify": "^2.1.0",
"lodash.transform": "^4.6.0",
"cborg": "^1.3.1",
"multiformats": "^8.0.3"
},
"devDependencies": {
"chai": "^4.3.4",
"hundreds": "^0.0.9",
"ipjs": "^5.0.0",
"mocha": "^8.3.2",
"ipld-garbage": "^3.0.3",
"mocha": "^8.4.0",
"polendina": "^1.1.0",

@@ -52,0 +51,0 @@ "standard": "^16.0.3",

'use strict'
/* globals describe, it */
import assert from 'assert'
/* eslint-env mocha */
import { garbage } from 'ipld-garbage'
import chai from 'chai'
import { encode, decode } from '@ipld/dag-json'
import { bytes, CID } from 'multiformats'
const { assert } = chai
const same = assert.deepStrictEqual
const test = it
const recode = buffer => encode(decode(buffer))
const recode = byts => encode(decode(byts))

@@ -16,28 +18,46 @@ const link = CID.parse('bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu')

test('encode decode', () => {
let buffer = encode({ hello: 'world' })
same(JSON.parse(bytes.toString(recode(buffer))), { hello: 'world' })
const o = { link, buffer: bytes.fromString('asdf'), n: null, o: {} }
buffer = encode(o)
same(decode(buffer), o)
same(bytes.isBinary(decode(buffer).buffer), true)
let byts = encode({ hello: 'world' })
same(JSON.parse(bytes.toString(recode(byts))), { hello: 'world' })
const o = { link, byts: bytes.fromString('asdf'), n: null, o: {} }
byts = encode(o)
same(decode(byts), o)
same(bytes.isBinary(decode(byts).byts), true)
})
test('circular failure', () => {
const o1 = { hello: 'world' }
const o2 = { o1 }
o1.o2 = o2
try {
encode(o2)
assert.ok(false)
} catch (e) {
same(e.message, 'Object contains circular references')
}
test('encode decode 2', () => {
// mirrors a go-ipld-prime test, but with sorted keys
const obj = { plain: 'olde string', bytes: new TextEncoder().encode('deadbeef') }
const expected = '{"bytes":{"/":{"bytes":"ZGVhZGJlZWY"}},"plain":"olde string"}'
const byts = encode(obj)
same(JSON.parse(bytes.toString(recode(byts))), JSON.parse(expected))
same(bytes.toString(recode(byts)), expected)
})
test('use reserved space', () => {
const decoded = decode(encode({ '/': { type: 'stringName' } }))
same(decoded['/'].type, 'stringName')
describe('reserved space', () => {
test('allow alternative types', () => {
// wrong types
for (const obj of [true, false, null, 1, -1, 1.1, { blip: 'bop' }, ['foo']]) {
same(decode(encode({ '/': obj })), { '/': obj })
same(decode(encode({ '/': { bytes: obj } })), { '/': { bytes: obj } })
}
})
test('allow specials within reserved space', () => {
// can we put slash-objects within slashes?
same(decode(encode({ '/': bytes.fromString('asdf') })), { '/': bytes.fromString('asdf') })
same(new TextDecoder().decode(encode({ '/': bytes.fromString('asdf') })), '{"/":{"/":{"bytes":"YXNkZg"}}}')
same(decode(encode({ '/': link })), { '/': link })
same(new TextDecoder().decode(encode({ '/': link })), '{"/":{"/":"bafyreifepiu23okq5zuyvyhsoiazv2icw2van3s7ko6d3ixl5jx2yj2yhu"}}')
})
test('disallow extraneous tokens', () => {
// TODO: test encode() doesn't allow this
assert.throws(() => decode(encode({ '/': link.toString(), x: 'bip' })))
assert.throws(() => decode(encode({ '/': { bytes: 'mS7ldeA', x: 'bip' } })))
assert.throws(() => decode(encode({ '/': { bytes: 'mS7ldeA' }, x: 'bip' })))
assert.throws(() => decode(encode({ '/': { bytes: 'mS7ldeA', x: 'bip' }, bop: 'bip' })))
})
})
test('native types', done => {
test('native types', () => {
const flip = obj => decode(encode(obj))

@@ -53,4 +73,78 @@ same(flip('test'), 'test')

same(flip(['asdf']), ['asdf'])
done()
same(decode(new TextEncoder().encode('10.0')), 10)
same(decode(new TextEncoder().encode('[-10.0, 1.0, 0.0, 100.0]')), [-10, 1, 0, 100])
})
test('stable map key sorting', () => {
const s1 = bytes.toString(encode({ a: 1, b: 2, bb: 2.2, c: 3, c_: 3.3 }))
const s2 = bytes.toString(encode({ c_: 3.3, bb: 2.2, b: 2, c: 3, a: 1 }))
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s1)
same('{"a":1,"b":2,"bb":2.2,"c":3,"c_":3.3}', s2)
})
test('error on circular references', () => {
const circularObj = {}
circularObj.a = circularObj
assert.throws(() => encode(circularObj), /object contains circular references/)
const circularArr = [circularObj]
circularObj.a = circularArr
assert.throws(() => encode(circularArr), /object contains circular references/)
})
test('error on encoding undefined', () => {
assert.throws(() => encode(undefined), /\Wundefined\W.*not supported/)
const objWithUndefined = { a: 'a', b: undefined }
assert.throws(() => encode(objWithUndefined), /\Wundefined\W.*not supported/)
})
test('error on encoding IEEE 754 specials', () => {
for (const special of [NaN, Infinity, -Infinity]) {
assert.throws(() => encode(special), new RegExp(`\\W${String(special)}\\W.*not supported`))
const objWithSpecial = { a: 'a', b: special }
assert.throws(() => encode(objWithSpecial), new RegExp(`\\W${String(special)}\\W.*not supported`))
const arrWithSpecial = [1, 1.1, -1, -1.1, Number.MAX_SAFE_INTEGER, special, Number.MIN_SAFE_INTEGER]
assert.throws(() => encode(arrWithSpecial), new RegExp(`\\W${String(special)}\\W.*not supported`))
}
})
test('fuzz serialize and deserialize with garbage', function () {
// filter out fuzz garbage for objects that are disqualified by DAG-JSON rules
const checkObj = (obj) => {
if (Array.isArray(obj)) {
return obj.every(checkObj)
}
if (obj && typeof obj === 'object') {
for (const [key, value] of Object.entries(obj)) {
if (key === '/') {
if (typeof value === 'string') {
return false
}
if (value && typeof value === 'object' && value.bytes !== undefined) {
return false
}
}
if (!checkObj(value)) {
return false
}
}
}
return true
}
this.timeout(5000)
for (let ii = 0; ii < 1000; ii++) {
const original = garbage(300)
if (!checkObj(original)) {
continue
}
try {
const encoded = encode(original)
const decoded = decode(encoded)
same(decoded, original)
} catch (err) {
console.log('Failed on fuzz object:', original)
throw err
}
}
})
})

@@ -1,6 +0,7 @@

export const name: string;
export const name: "dag-json";
export const code: 297;
export const decode: (bytes: import("multiformats/codecs/interface").ByteView<T>) => T;
export const encode: (data: T) => import("multiformats/codecs/interface").ByteView<T>;
export type BlockCodec<Code extends number, T_1> = import('multiformats/codecs/interface').BlockCodec<Code, T_1>;
export function encode<T>(node: T): ByteView<T>;
export function decode<T>(data: ByteView<T>): T;
export type ByteView<T> = import('multiformats/codecs/interface').ByteView<T>;
export type DecodeTokenizer = import('cborg/interface').DecodeTokenizer;
//# sourceMappingURL=index.d.ts.map

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc