Comparing version 0.2.0 to 0.3.0
359
decode.js
@@ -24,5 +24,2 @@ "use strict" | ||
} | ||
class C1Type {} | ||
const C1 = new C1Type() | ||
C1.name = 'MessagePack 0xC1' | ||
@@ -99,8 +96,85 @@ class Decoder { | ||
let token = src[position++] | ||
if (token < 0xa0) { | ||
if (token < 0x80) { | ||
if (token < 0x40) | ||
return token | ||
else { | ||
let structure = currentStructures[token & 0x3f] | ||
let majorType = token >> 5 | ||
token = token & 0x1f | ||
if (token > 0x17) { | ||
switch (token) { | ||
case 0x18: | ||
token = src[position++] | ||
break | ||
case 0x19: | ||
token = dataView.getUint16(position) | ||
position += 2 | ||
break | ||
case 0x1a: | ||
if (majorType == 7) { | ||
let value = dataView.getFloat32(position) | ||
if (currentDecoder.useFloat32 > 2) { | ||
// this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved | ||
let multiplier = mult10[((src[position] & 0x7f) << 1) | (src[position + 1] >> 7)] | ||
position += 4 | ||
return ((multiplier * value + (value > 0 ? 0.5 : -0.5)) >> 0) / multiplier | ||
} | ||
position += 4 | ||
return value | ||
} | ||
token = dataView.getUint32(position) | ||
position += 4 | ||
break | ||
case 0x1b: | ||
if (majorType == 7) { | ||
let value = dataView.getFloat64(position) | ||
position += 8 | ||
return value | ||
} | ||
if (currentDecoder.uint64AsNumber) | ||
return src[position++] * 0x100000000000000 + src[position++] * 0x1000000000000 + src[position++] * 0x10000000000 + src[position++] * 0x100000000 + | ||
src[position++] * 0x1000000 + (src[position++] << 16) + (src[position++] << 8) + src[position++] | ||
token = dataView.getBigUint64(position) | ||
position += 8 | ||
break | ||
default: | ||
throw new Error('Unknown token ' + token) | ||
} | ||
} | ||
switch (majorType) { | ||
case 0: // positive int | ||
return token | ||
case 1: // negative int | ||
return ~token | ||
case 2: // buffer | ||
return readBin(token) | ||
case 3: // string | ||
if (srcStringEnd >= position) { | ||
return srcString.slice(position - srcStringStart, (position += token) - srcStringStart) | ||
} | ||
if (srcStringEnd == 0 && srcEnd < 120 && token < 16) { | ||
// for small blocks, avoiding the overhead of the extract call is helpful | ||
let string = /*length < 16 ? */shortStringInJS(token)// : longStringInJS(length) | ||
if (string != null) | ||
return string | ||
} | ||
return readFixedString(token) | ||
case 4: // array | ||
let array = new Array(token) | ||
for (let i = 0; i < token; i++) { | ||
array[i] = read() | ||
} | ||
return array | ||
case 5: // map | ||
if (currentDecoder.mapsAsObjects) { | ||
let object = {} | ||
for (let i = 0; i < token; i++) { | ||
object[read()] = read() | ||
} | ||
return object | ||
} else { | ||
let map = new Map() | ||
for (let i = 0; i < token; i++) { | ||
map.set(read(), read()) | ||
} | ||
return map | ||
} | ||
case 6: // extension | ||
if (token >= 0x40 && token < 0x100) { // record structures | ||
let structure = currentStructures[token - 0x40] | ||
if (structure) { | ||
@@ -120,3 +194,3 @@ if (!structure.read) | ||
currentStructures.splice.apply(currentStructures, [0, updatedStructures.length].concat(updatedStructures)) | ||
structure = currentStructures[token & 0x3f] | ||
structure = currentStructures[token - 0x40] | ||
if (structure) { | ||
@@ -130,193 +204,30 @@ if (!structure.read) | ||
return token | ||
} | ||
} else if (token < 0x90) { | ||
// map | ||
token -= 0x80 | ||
if (currentDecoder.mapsAsObjects) { | ||
let object = {} | ||
for (let i = 0; i < token; i++) { | ||
object[read()] = read() | ||
} | ||
return object | ||
} else { | ||
let map = new Map() | ||
for (let i = 0; i < token; i++) { | ||
map.set(read(), read()) | ||
} | ||
return map | ||
} | ||
} else { | ||
token -= 0x90 | ||
let array = new Array(token) | ||
for (let i = 0; i < token; i++) { | ||
array[i] = read() | ||
} | ||
return array | ||
} | ||
} else if (token < 0xc0) { | ||
// fixstr | ||
let length = token - 0xa0 | ||
if (srcStringEnd >= position) { | ||
return srcString.slice(position - srcStringStart, (position += length) - srcStringStart) | ||
} | ||
if (srcStringEnd == 0 && srcEnd < 120 && length < 16) { | ||
// for small blocks, avoiding the overhead of the extract call is helpful | ||
let string = /*length < 16 ? */shortStringInJS(length)// : longStringInJS(length) | ||
if (string != null) | ||
return string | ||
} | ||
return readFixedString(length) | ||
} else { | ||
let value | ||
switch (token) { | ||
case 0xc0: return null | ||
case 0xc1: return C1; // "never-used", return special object to denote that | ||
case 0xc2: return false | ||
case 0xc3: return true | ||
case 0xc4: | ||
// bin 8 | ||
return readBin(src[position++]) | ||
case 0xc5: | ||
// bin 16 | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return readBin(value) | ||
case 0xc6: | ||
// bin 32 | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return readBin(value) | ||
case 0xc7: | ||
// ext 8 | ||
return readExt(src[position++]) | ||
case 0xc8: | ||
// ext 16 | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return readExt(value) | ||
case 0xc9: | ||
// ext 32 | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return readExt(value) | ||
case 0xca: | ||
value = dataView.getFloat32(position) | ||
if (currentUnpackr.useFloat32 > 2) { | ||
// this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved | ||
let multiplier = mult10[((src[position] & 0x7f) << 1) | (src[position + 1] >> 7)] | ||
position += 4 | ||
return ((multiplier * value + (value > 0 ? 0.5 : -0.5)) >> 0) / multiplier | ||
} | ||
position += 4 | ||
return value | ||
case 0xcb: | ||
value = dataView.getFloat64(position) | ||
position += 8 | ||
return value | ||
// uint handlers | ||
case 0xcc: | ||
return src[position++] | ||
case 0xcd: | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return value | ||
case 0xce: | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return value | ||
case 0xcf: | ||
if (currentUnpackr.uint64AsNumber) | ||
return src[position++] * 0x100000000000000 + src[position++] * 0x1000000000000 + src[position++] * 0x10000000000 + src[position++] * 0x100000000 + | ||
src[position++] * 0x1000000 + (src[position++] << 16) + (src[position++] << 8) + src[position++] | ||
value = dataView.getBigUint64(position) | ||
position += 8 | ||
return value | ||
// int handlers | ||
case 0xd0: | ||
return dataView.getInt8(position++) | ||
case 0xd1: | ||
value = dataView.getInt16(position) | ||
position += 2 | ||
return value | ||
case 0xd2: | ||
value = dataView.getInt32(position) | ||
position += 4 | ||
return value | ||
case 0xd3: | ||
value = dataView.getBigInt64(position) | ||
position += 8 | ||
return value | ||
case 0xd4: | ||
// fixext 1 | ||
value = src[position++] | ||
if (value == 0x72) { | ||
if (token == 6) { | ||
return recordDefinition(src[position++]) | ||
} else { | ||
if (currentExtensions[value]) | ||
return currentExtensions[value]([src[position++]]) | ||
if (currentExtensions[token]) | ||
return currentExtensions[token]() | ||
else | ||
throw new Error('Unknown extension ' + value) | ||
throw new Error('Unknown extension ' + token) | ||
} | ||
case 0xd5: | ||
// fixext 2 | ||
return readExt(2) | ||
case 0xd6: | ||
// fixext 4 | ||
return readExt(4) | ||
case 0xd7: | ||
// fixext 8 | ||
return readExt(8) | ||
case 0xd8: | ||
// fixext 16 | ||
return readExt(16) | ||
case 0xd9: | ||
// str 8 | ||
value = src[position++] | ||
if (srcStringEnd >= position) { | ||
return srcString.slice(position - srcStringStart, (position += value) - srcStringStart) | ||
} | ||
return readString8(value) | ||
case 0xda: | ||
// str 16 | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return readString16(value) | ||
case 0xdb: | ||
// str 32 | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return readString32(value) | ||
case 0xdc: | ||
// array 16 | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return readArray(value) | ||
case 0xdd: | ||
// array 32 | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return readArray(value) | ||
case 0xde: | ||
// map 16 | ||
value = dataView.getUint16(position) | ||
position += 2 | ||
return readMap(value) | ||
case 0xdf: | ||
// map 32 | ||
value = dataView.getUint32(position) | ||
position += 4 | ||
return readMap(value) | ||
default: // negative int | ||
if (token >= 0xe0) | ||
return token - 0x100 | ||
if (token === undefined) { | ||
let error = new Error('Unexpected end of MessagePack data') | ||
error.incomplete = true | ||
throw error | ||
} | ||
throw new Error('Unknown MessagePack token ' + token) | ||
} | ||
} | ||
case 7: // fixed value | ||
switch (token) { | ||
case 0x14: return false | ||
case 0x15: return true | ||
case 0x16: return null | ||
case 0x17: return; // undefined | ||
// case 0x19: // half-precision float | ||
default: | ||
throw new Error('Unknown token ' + token) | ||
} | ||
default: // negative int | ||
if (isNaN(token)) { | ||
let error = new Error('Unexpected end of CBOR data') | ||
error.incomplete = true | ||
throw error | ||
} | ||
throw new Error('Unknown CBOR token ' + token) | ||
} | ||
@@ -357,3 +268,3 @@ } | ||
if (string == null) { | ||
strings = extractStrings(position - headerLength, srcEnd, src) | ||
strings = extractStrings(position, srcEnd, length, src) | ||
stringPosition = 0 | ||
@@ -446,26 +357,2 @@ string = strings[stringPosition++] | ||
}*/ | ||
function readArray(length) { | ||
let array = new Array(length) | ||
for (let i = 0; i < length; i++) { | ||
array[i] = read() | ||
} | ||
return array | ||
} | ||
function readMap(length) { | ||
if (currentDecoder.mapsAsObjects) { | ||
let object = {} | ||
for (let i = 0; i < length; i++) { | ||
object[read()] = read() | ||
} | ||
return object | ||
} else { | ||
let map = new Map() | ||
for (let i = 0; i < length; i++) { | ||
map.set(read(), read()) | ||
} | ||
return map | ||
} | ||
} | ||
let fromCharCode = String.fromCharCode | ||
@@ -626,3 +513,3 @@ function longStringInJS(length) { | ||
function readBin(length) { | ||
return currentUnpackr.copyBuffers ? | ||
return currentDecoder.copyBuffers ? | ||
// specifically use the copying slice (not the node one) | ||
@@ -647,5 +534,4 @@ Uint8Array.prototype.slice.call(src, position, position += length) : | ||
let glbl = typeof window == 'object' ? window : global | ||
currentExtensions[0] = (data) => {} // notepack defines extension 0 to mean undefined, so use that as the default here | ||
currentExtensions[0x65] = () => { | ||
currentExtensions[8] = () => { | ||
let data = read() | ||
@@ -655,3 +541,3 @@ return (glbl[data[0]] || Error)(data[1]) | ||
currentExtensions[0x69] = (data) => { | ||
currentExtensions[9] = (data) => { | ||
// id extension (for structured clones) | ||
@@ -679,3 +565,3 @@ let id = dataView.getUint32(position - 4) | ||
currentExtensions[0x70] = (data) => { | ||
currentExtensions[10] = (data) => { | ||
// pointer extension (for structured clones) | ||
@@ -688,7 +574,7 @@ let id = dataView.getUint32(position - 4) | ||
currentExtensions[0x73] = () => new Set(read()) | ||
currentExtensions[11] = () => new Set(read()) | ||
const typedArrays = ['Int8','Uint8 ','Uint8Clamped','Int16','Uint16','Int32','Uint32','Float32','Float64','BigInt64','BigUint64'].map(type => type + 'Array') | ||
currentExtensions[0x74] = (data) => { | ||
currentExtensions[12] = (data) => { | ||
let typeCode = data[0] | ||
@@ -701,3 +587,3 @@ let typedArrayName = typedArrays[typeCode] | ||
} | ||
currentExtensions[0x78] = () => { | ||
currentExtensions[13] = () => { | ||
let data = read() | ||
@@ -707,16 +593,5 @@ return new RegExp(data[0], data[1]) | ||
currentExtensions[0xff] = (data) => { | ||
currentExtensions[1] = (data) => { | ||
// 32-bit date extension | ||
if (data.length == 4) | ||
return new Date((data[0] * 0x1000000 + (data[1] << 16) + (data[2] << 8) + data[3]) * 1000) | ||
else if (data.length == 8) | ||
return new Date( | ||
((data[0] << 22) + (data[1] << 14) + (data[2] << 6) + (data[3] >> 2)) / 1000000 + | ||
((data[3] & 0x3) * 0x100000000 + data[4] * 0x1000000 + (data[5] << 16) + (data[6] << 8) + data[7]) * 1000) | ||
else if (data.length == 12)// TODO: Implement support for negative | ||
return new Date( | ||
((data[0] << 24) + (data[1] << 16) + (data[2] << 8) + data[3]) / 1000000 + | ||
(((data[4] & 0x80) ? -0x1000000000000 : 0) + data[6] * 0x10000000000 + data[7] * 0x100000000 + data[8] * 0x1000000 + (data[9] << 16) + (data[10] << 8) + data[11]) * 1000) | ||
else | ||
throw new Error('Invalid timestamp length') | ||
return new Date(read() * 1000) | ||
} // notepack defines extension 0 to mean undefined, so use that as the default here | ||
@@ -768,3 +643,1 @@ // registration of bulk record definition? | ||
exports.typedArrays = typedArrays | ||
exports.C1 = C1 | ||
exports.C1Type = C1Type |
117
encode.js
@@ -36,3 +36,3 @@ "use strict" | ||
let encoder = this | ||
let maxSharedStructures = 32 | ||
let maxSharedStructures = 64 | ||
let isSequential = options && options.sequential | ||
@@ -65,3 +65,3 @@ if (isSequential) { | ||
if (sharedStructures.uninitialized) | ||
packr.structures = sharedStructures = packr.getStructures() | ||
encoder.structures = sharedStructures = encoder.getStructures() | ||
let sharedStructuresLength = sharedStructures.length | ||
@@ -219,3 +219,2 @@ if (sharedStructuresLength > maxSharedStructures && !isSequential) | ||
} else if (type === 'number') { | ||
if (value >>> 0 === value) {// positive integer, 32-bit or less | ||
@@ -239,13 +238,13 @@ // positive uint | ||
if (value >= -0x18) { | ||
target[position++] = 0x38 + value | ||
target[position++] = 0x1f - value | ||
} else if (value >= -0x100) { | ||
target[position++] = 0x38 | ||
target[position++] = value + 0x100 | ||
target[position++] = ~value | ||
} else if (value >= -0x10000) { | ||
target[position++] = 0x39 | ||
targetView.setUint16(position, -value) | ||
targetView.setUint16(position, ~value) | ||
position += 2 | ||
} else { | ||
target[position++] = 0x3a | ||
targetView.setUint32(position, -value) | ||
targetView.setUint32(position, ~value) | ||
position += 4 | ||
@@ -340,3 +339,3 @@ } | ||
let extension = extensions[i] | ||
let result = extension.pack.call(this, value, (size) => { | ||
let result = extension.encode.call(this, value, (size) => { | ||
position += size | ||
@@ -348,3 +347,3 @@ if (position > safeEnd) | ||
} | ||
}, pack) | ||
}, encode) | ||
if (result) { | ||
@@ -380,10 +379,13 @@ position = writeExtensionData(result, target, position, extension.type) | ||
let length = keys.length | ||
if (length < 0x10) { | ||
target[position++] = 0x80 | length | ||
if (length < 0x18) { | ||
target[position++] = 0xa0 | length | ||
} else if (length < 0x100) { | ||
target[position++] = 0xb8 | ||
target[position++] = length | ||
} else if (length < 0x10000) { | ||
target[position++] = 0xde | ||
target[position++] = 0xb9 | ||
target[position++] = length >> 8 | ||
target[position++] = length & 0xff | ||
} else { | ||
target[position++] = 0xdf | ||
target[position++] = 0xba | ||
targetView.setUint32(position, length) | ||
@@ -394,8 +396,8 @@ position += 4 | ||
for (let i = 0; i < length; i++) { | ||
pack(key = keys[i]) | ||
pack(object[key]) | ||
encode(key = keys[i]) | ||
encode(object[key]) | ||
} | ||
} : | ||
(object, safePrototype) => { | ||
target[position++] = 0xde // always use map 16, so we can preallocate and set the length afterwards | ||
target[position++] = 0xb9 // always use map 16, so we can preallocate and set the length afterwards | ||
let objectOffset = position - start | ||
@@ -469,2 +471,3 @@ position += 2 | ||
if (recordId) { | ||
target[position++] = 0xd8 | ||
target[position++] = recordId | ||
@@ -477,3 +480,3 @@ } else { | ||
} | ||
if (recordId >= 0x80) {// cycle back around | ||
if (recordId >= 0x100) {// cycle back around | ||
structures.nextId = (recordId = maxSharedStructures + 0x40) + 1 | ||
@@ -484,7 +487,7 @@ } | ||
if (sharedStructures && sharedStructures.length <= maxSharedStructures) { | ||
target[position++] = recordId | ||
target[position++] = 0xd8 // tag one byte | ||
target[position++] = recordId // tag number | ||
hasSharedUpdate = true | ||
} else { | ||
target[position++] = 0xd4 // fixext 1 | ||
target[position++] = 0x72 // "r" record defintion extension type | ||
target[position++] = 0xc6 // tag 6 | ||
target[position++] = recordId | ||
@@ -494,3 +497,3 @@ if (hasNewTransition) | ||
// record the removal of the id, we can maintain our shared structure | ||
if (recordIdsToRemove.length >= 0x40 - maxSharedStructures) | ||
if (recordIdsToRemove.length >= 0xc0 - maxSharedStructures) | ||
recordIdsToRemove.shift()[RECORD_SYMBOL] = 0 // we are cycling back through, and have to remove old ones | ||
@@ -519,5 +522,2 @@ recordIdsToRemove.push(transition) | ||
} | ||
encode(value) { | ||
return this.pack(value) | ||
} | ||
resetMemory() { | ||
@@ -541,3 +541,3 @@ // this means we are finished using our local buffer and we can write over it safely | ||
extensions = [{ | ||
pack(date, allocateForWrite) { | ||
encode(date, allocateForWrite) { | ||
let seconds = date.getTime() / 1000 | ||
@@ -547,55 +547,40 @@ if ((this.useTimestamp32 || date.getMilliseconds() === 0) && seconds >= 0 && seconds < 0x100000000) { | ||
let { target, targetView, position} = allocateForWrite(6) | ||
target[position++] = 0xd6 | ||
target[position++] = 0xff | ||
target[position++] = 0xc1 | ||
target[position++] = 0x1a | ||
targetView.setUint32(position, seconds) | ||
} else if (seconds > 0 && seconds < 0x400000000) { | ||
// Timestamp 64 | ||
} else { | ||
// Timestamp float64 | ||
let { target, targetView, position} = allocateForWrite(10) | ||
target[position++] = 0xd7 | ||
target[position++] = 0xff | ||
targetView.setUint32(position, date.getMilliseconds() * 4000000 + ((seconds / 1000 / 0x100000000) >> 0)) | ||
targetView.setUint32(position + 4, seconds) | ||
} else { | ||
// Timestamp 96 | ||
let { target, targetView, position} = allocateForWrite(15) | ||
target[position++] = 0xc7 | ||
target[position++] = 12 | ||
target[position++] = 0xff | ||
targetView.setUint32(position, date.getMilliseconds() * 1000000) | ||
targetView.setBigInt64(position + 4, BigInt(Math.floor(seconds))) | ||
target[position++] = 0xc1 | ||
target[position++] = 0xfb | ||
targetView.setFloat64(position, seconds) | ||
} | ||
} | ||
}, { | ||
pack(set, allocateForWrite, pack) { | ||
encode(set, allocateForWrite, encode) { | ||
let array = Array.from(set) | ||
if (this.structuredClone) { | ||
let { target, position} = allocateForWrite(3) | ||
target[position++] = 0xd4 | ||
target[position++] = 0x73 // 's' for Set | ||
target[position++] = 0 | ||
let { target, position} = allocateForWrite(1) | ||
target[position++] = 0xd1 // 's' for Set | ||
} | ||
pack(array) | ||
encode(array) | ||
} | ||
}, { | ||
pack(error, allocateForWrite, pack) { | ||
encode(error, allocateForWrite, encode) { | ||
if (this.structuredClone) { | ||
let { target, position} = allocateForWrite(3) | ||
target[position++] = 0xd4 | ||
target[position++] = 0x65 // 'e' for error | ||
target[position++] = 0 | ||
let { target, position} = allocateForWrite(1) | ||
target[position++] = 0xce // 'e' for error | ||
} | ||
pack([ error.name, error.message ]) | ||
encode([ error.name, error.message ]) | ||
} | ||
}, { | ||
pack(regex, allocateForWrite, pack) { | ||
encode(regex, allocateForWrite, encode) { | ||
if (this.structuredClone) { | ||
let { target, position} = allocateForWrite(3) | ||
target[position++] = 0xd4 | ||
target[position++] = 0x78 // 'x' for regeXp | ||
target[position++] = 0 | ||
let { target, position} = allocateForWrite(1) | ||
target[position++] = 0xd3 | ||
} | ||
pack([ regex.source, regex.flags ]) | ||
encode([ regex.source, regex.flags ]) | ||
} | ||
}, { | ||
pack(arrayBuffer, allocateForWrite) { | ||
encode(arrayBuffer, allocateForWrite) { | ||
if (this.structuredClone) | ||
@@ -607,3 +592,3 @@ writeExtBuffer(arrayBuffer, 0x10, allocateForWrite) | ||
}, { | ||
pack(typedArray, allocateForWrite) { | ||
encode(typedArray, allocateForWrite) { | ||
let constructor = typedArray.constructor | ||
@@ -616,3 +601,3 @@ if (constructor !== ByteArray && this.structuredClone) | ||
}, { | ||
pack(c1, allocateForWrite) { // specific 0xC1 object | ||
encode(c1, allocateForWrite) { // specific 0xC1 object | ||
let { target, position} = allocateForWrite(1) | ||
@@ -737,8 +722,8 @@ target[position] = 0xc1 | ||
if (extension.Class) { | ||
if (!extension.pack) | ||
throw new Error('Extension has no pack function') | ||
if (!extension.encode) | ||
throw new Error('Extension has no encode function') | ||
extensionClasses.unshift(extension.Class) | ||
extensions.unshift(extension) | ||
} | ||
unpackModule.addExtension(extension) | ||
} | ||
decoderModule.addExtension(extension) | ||
} |
10
index.js
@@ -1,10 +0,10 @@ | ||
exports.encoder = require('./encode').encoder | ||
exports.Encoder = require('./encode').Encoder | ||
let decodeModule = require('./decode') | ||
let extractor = tryRequire('msgpackr-extract') | ||
let extractor = tryRequire('cbor-extract') | ||
if (extractor) | ||
decodeModule.setExtractor(extractor.extractStrings) | ||
exports.decoder = decodeModule.decoder | ||
exports.Decoder = decodeModule.Decoder | ||
exports.EncoderStream = require('./stream').EncoderStream | ||
exports.DecoderStream = require('./stream').DecoderStream | ||
let encoder = new exports.encoder({ objectsAsMaps: true }) | ||
let encoder = new exports.Encoder({ useRecords: false }) | ||
exports.decode = encoder.decode | ||
@@ -28,2 +28,2 @@ exports.encode = encoder.encode | ||
} | ||
} | ||
} |
{ | ||
"name": "cbor-x", | ||
"author": "Kris Zyp", | ||
"version": "0.2.0", | ||
"version": "0.3.0", | ||
"description": "Ultra-fast CBOR implementation with extensions for records and structured cloning", | ||
@@ -30,5 +30,6 @@ "license": "MIT", | ||
"optionalDependencies": { | ||
"msgpackr-extract": "^0.3.4" | ||
"cbor-extract": "^0.1.0" | ||
}, | ||
"devDependencies": { | ||
"cbor": "^5", | ||
"chai": "^4", | ||
@@ -35,0 +36,0 @@ "mocha": "^4", |
@@ -1,2 +0,2 @@ | ||
# cbor-x (Uncompleted, not ready for use, being ported from msgpack) | ||
# cbor-x | ||
[![license](https://img.shields.io/badge/license-MIT-brightgreen)](LICENSE) | ||
@@ -9,3 +9,3 @@ [![npm version](https://img.shields.io/npm/v/cbor-x.svg?style=flat-square)](https://www.npmjs.org/package/cbor-x) | ||
The cbor-x package is an extremely fast CBOR NodeJS/JavaScript implementation. Currently, it is significantly faster than any other known implementations, faster than Avro (for JS), and generally faster than native V8 JSON.stringify/parse. It also includes an optional record extension (the `r` in cbor-x), for defining record structures that makes MessagePack even faster and more compact, often over twice as fast as even native JSON functions, several times faster than other JS implementations, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is also supported through optional extensions. | ||
The cbor-x package is an extremely fast CBOR NodeJS/JavaScript implementation. Currently, it is significantly faster than any other known implementations, faster than Avro (for JS), and generally faster than native V8 JSON.stringify/parse. It also includes an optional record extension (the `x` in cbor-x), for defining record structures that makes CBOR even faster and more compact, often over twice as fast as even native JSON functions, several times faster than other JS implementations, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is also supported through optional extensions. | ||
@@ -35,3 +35,3 @@ ## Basic Usage | ||
import { EncoderStream } from 'cbor-x'; | ||
let stream = EncoderStream(); | ||
let stream = new EncoderStream(); | ||
stream.write(myData); | ||
@@ -43,4 +43,4 @@ | ||
import { EncoderStream } from 'cbor-x'; | ||
let sendingStream = EncoderStream(); | ||
let receivingStream = DecoderStream(); | ||
let sendingStream = new EncoderStream(); | ||
let receivingStream = new DecoderStream(); | ||
// we just piping to our own stream, but normally you would send and | ||
@@ -70,5 +70,5 @@ // receive over some type of inter-process or network connection. | ||
obj.self = obj; | ||
let packr = new Packr({ structuredClone: true }); | ||
let serialized = packr.pack(obj); | ||
let copy = packr.unpack(serialized); | ||
let encoder = new Encoder({ structuredClone: true }); | ||
let serialized = encoder.encode(obj); | ||
let copy = encoder.decode(serialized); | ||
copy.self === copy // true | ||
@@ -83,6 +83,6 @@ copy.set.has('a') // true | ||
## Record / Object Structures | ||
There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in MessagePack and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. cbor-x automatically generates record definitions that are reused and referenced by objects with the same structure. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Packr` instance. By default a new `Packr` instance will have the record extension enabled: | ||
There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in CBOR and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. cbor-x automatically generates record definitions that are reused and referenced by objects with the same structure. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Encoder` instance. By default a new `Encoder` instance will have the record extension enabled: | ||
``` | ||
import { Encoder } from 'cbor-x'; | ||
let encoder = Encoder(); | ||
let encoder = new Encoder(); | ||
encoder.encode(myBigData); | ||
@@ -101,10 +101,10 @@ | ||
import { Encoder } from 'cbor-x'; | ||
let encoder = Encoder({ | ||
let encoder = new Encoder({ | ||
structures: [... structures that were last generated ...] | ||
}); | ||
``` | ||
If you are working with persisted data, you will need to persist the `structures` data when it is updated. Msgpackr provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future: | ||
If you are working with persisted data, you will need to persist the `structures` data when it is updated. Cbor-x provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future: | ||
``` | ||
import { Encoder } from 'cbor-x'; | ||
let encoder = Encoder({ | ||
let encoder = new Encoder({ | ||
getStructures() { | ||
@@ -123,11 +123,11 @@ // storing our data in file (but we could also store in a db or key-value store) | ||
## Options | ||
The following options properties can be provided to the Packr or Unpackr constructor: | ||
The following options properties can be provided to the Encoder or Decoder constructor: | ||
* `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as MessagePack maps, and unpacks maps as JavaScript `Object`s, which ensures compatibilty with other decoders. | ||
* `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as CBOR maps, and decodes maps as JavaScript `Object`s, which ensures compatibilty with other decoders. | ||
* `structures` - Provides the array of structures that is to be used for record extension, if you want the structures saved and used again. This array will be modified in place with new record structures that are serialized (if less than 32 structures are in the array). | ||
* `structuredClone` - This enables the structured cloning extensions that will encode object/cyclic references and additional built-in types/classes. | ||
* `mapsAsObjects` - If `true`, this will decode MessagePack maps and JS `Object`s with the map entries decoded to object properties. If `false`, maps are decoded as JavaScript `Map`s. This is disabled by default if `useRecords` is enabled (which allows `Map`s to be preserved), and is enabled by default if `useRecords` is disabled. | ||
* `mapsAsObjects` - If `true`, this will decode CBOR maps and JS `Object`s with the map entries decoded to object properties. If `false`, maps are decoded as JavaScript `Map`s. This is disabled by default if `useRecords` is enabled (which allows `Map`s to be preserved), and is enabled by default if `useRecords` is disabled. | ||
* `useFloat32` - This will enable cbor-x to encode non-integer numbers as `float32`. See next section for possible values. | ||
* `variableMapSize` - This will use varying map size definition (fixmap, map16, map32) based on the number of keys when encoding objects, which yields slightly more compact encodings (for small objects), but is typically 5-10% slower during encoding. This is only relevant when record extension is disabled. | ||
* `copyBuffers` - When decoding a MessagePack with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying). | ||
* `copyBuffers` - When decoding a CBOR with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying). | ||
* `useTimestamp32` - Encode JS `Date`s in 32-bit format when possible by dropping the milliseconds. This is a more efficient encoding of dates. You can also cause dates to use 32-bit format by manually setting the milliseconds to zero (`date.setMilliseconds(0)`). | ||
@@ -143,3 +143,3 @@ | ||
* `DECIMAL_ROUND` (3) - Always will encode non-integers as 32-bit float, and when decoding 32-bit float, round to the significant decimal digits (usually 7, but 6 or 8 digits for some ranges). | ||
* `DECIMAL_FIT` (4) - Only encode non-integers as 32-bit float if all significant digits (usually up to 7) can be unamiguously encoded as a 32-bit float, and decode/unpack with decimal rounding (same as above). This will ensure round-trip encoding/decoding without loss in precision and use 32-bit when possible. | ||
* `DECIMAL_FIT` (4) - Only encode non-integers as 32-bit float if all significant digits (usually up to 7) can be unamiguously encoded as a 32-bit float, and decode with decimal rounding (same as above). This will ensure round-trip encoding/decoding without loss in precision and use 32-bit when possible. | ||
@@ -189,20 +189,20 @@ Note, that the performance is decreased with decimal rounding by about 20-25%, although if only 5% of your values are floating point, that will only have about a 1% impact overall. | ||
## Custom Extensions | ||
You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension type code (should be a number from 1-100, reserving negatives for MessagePack, 101-127 for cbor-x), and your pack and unpack functions (or just the one you need). You can use cbor-x encoding and decoding within your extensions, but if you do so, you must create a separate Packr instance, otherwise you could override data in the same encoding buffer: | ||
You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension type code (should be a number from 1-100, reserving negatives for CBOR, 101-127 for cbor-x), and your encode and decode functions (or just the one you need). You can use cbor-x encoding and decoding within your extensions, but if you do so, you must create a separate Encoder instance, otherwise you could override data in the same encoding buffer: | ||
``` | ||
import { addExtension, Packr } from 'cbor-x'; | ||
import { addExtension, Encoder } from 'cbor-x'; | ||
class MyCustomClass {...} | ||
let extPackr = new Packr(); | ||
let extEncoder = new Encoder(); | ||
addExtension({ | ||
Class: MyCustomClass, | ||
type: 11, // register our own extension code (a type code from 1-100) | ||
pack(instance) { | ||
encode(instance) { | ||
// define how your custom class should be encoded | ||
return extPackr.pack(instance.myData); // return a buffer | ||
return extEncoder.encode(instance.myData); // return a buffer | ||
} | ||
unpack(buffer) { | ||
decode(buffer) { | ||
// define how your custom class should be decoded | ||
let instance = new MyCustomClass(); | ||
instance.myData = extPackr.unpack(buffer); | ||
instance.myData = extEncoder.decode(buffer); | ||
return instance; // decoded value from buffer | ||
@@ -245,3 +245,3 @@ } | ||
### Dates | ||
cbor-x saves all JavaScript `Date`s using the standard MessagePack date extension (type -1), using the smallest of 32-bit, 64-bit or 96-bit format needed to store the date without data loss (or using 32-bit if useTimestamp32 options is specified). | ||
cbor-x saves all JavaScript `Date`s using the standard CBOR date extension (type -1), using the smallest of 32-bit, 64-bit or 96-bit format needed to store the date without data loss (or using 32-bit if useTimestamp32 options is specified). | ||
@@ -256,3 +256,3 @@ ### Structured Cloning | ||
### Browser Consideration | ||
It is worth noting that while cbor-x works well in modern browsers, the MessagePack format itself is often not an ideal format for web use. If you want compact data, brotli or gzip are most effective in compressing, and MessagePack's character frequency tends to defeat Huffman encoding used by these standard compression algorithms, resulting in less compact data than compressed JSON. The modern browser architecture is heavily optimized for parsing JSON from HTTP traffic, and it is difficult to achieve the same level of overall efficiency and ease with MessagePack. | ||
It is worth noting that while cbor-x works well in modern browsers, the CBOR format itself is often not an ideal format for web use. If you want compact data, brotli or gzip are most effective in compressing, and CBOR's character frequency tends to defeat Huffman encoding used by these standard compression algorithms, resulting in less compact data than compressed JSON. The modern browser architecture is heavily optimized for parsing JSON from HTTP traffic, and it is difficult to achieve the same level of overall efficiency and ease with CBOR. | ||
@@ -259,0 +259,0 @@ ### Credits |
@@ -1,2 +0,2 @@ | ||
var msgpackr = tryRequire(".."); | ||
var cborX = tryRequire(".."); | ||
var msgpack_node = tryRequire("msgpack"); | ||
@@ -51,15 +51,15 @@ var msgpack_msgpack = tryRequire("@msgpack/msgpack"); | ||
if (msgpackr) { | ||
if (cborX) { | ||
// let packr = new msgpackr.Packr({ objectsAsMaps: true }) | ||
buf = bench('require("msgpackr").pack(obj);', msgpackr.pack, data); | ||
buf = bench('require("cbor-x").encode(obj);', cborX.encode, data); | ||
// buf = bench('require("msgpack").serialize(obj);', data => {let result = packr.serialize(data); packr.resetMemory(); return result;}, data); | ||
obj = bench('require("msgpackr").unpack(buf);', msgpackr.unpack, buf); | ||
obj = bench('require("cbor-x").decode(buf);', cborX.decode, buf); | ||
test(obj); | ||
packr = new msgpackr.Packr({ structures: [] }) | ||
buf = bench('msgpackr w/ shared structures: packr.pack(obj);', packr.pack.bind(packr), data); | ||
encoder = new cborX.Encoder({ structures: [] }) | ||
buf = bench('cbor-x w/ shared structures: packr.encode(obj);', encoder.encode.bind(encoder), data); | ||
// buf = bench('msgpackr w/ shared structures: packr.pack(obj);', data => {let result = packr.pack(data); packr.resetMemory(); return result;}, data); | ||
obj = bench('msgpackr w/ shared structures: packr.unpack(buf);', packr.unpack.bind(packr), buf); | ||
obj = bench('cbor-x w/ shared structures: packr.decode(buf);', encoder.decode.bind(encoder), buf); | ||
test(obj); | ||
@@ -66,0 +66,0 @@ } |
@@ -12,39 +12,25 @@ var data = require('./example4.json'); | ||
assert = chai.assert | ||
if (typeof msgpackr === 'undefined') { msgpackr = require('..') } | ||
var msgpack_msgpack = tryRequire('@msgpack/msgpack'); | ||
var msgpack_lite = tryRequire('msgpack-lite'); | ||
var unpack = msgpackr.unpack | ||
var pack = msgpackr.pack | ||
if (typeof cborX === 'undefined') { cborX = require('..') } | ||
var cbor_module = tryRequire('cbor'); | ||
var decode = cborX.decode | ||
var encode = cborX.encode | ||
addCompatibilitySuite = (data) => () => { | ||
if (msgpack_msgpack) { | ||
test('from @msgpack/msgpack', function(){ | ||
var serialized = msgpack_msgpack.encode(data) | ||
var deserialized = unpack(serialized) | ||
if (cbor_module) { | ||
test('from cbor', function(){ | ||
var serialized = cbor_module.encode(data) | ||
var deserialized = decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
test('to @msgpack/msgpack', function(){ | ||
var serialized = pack(data) | ||
var deserialized = msgpack_msgpack.decode(serialized) | ||
test('to cbor', function(){ | ||
var serialized = encode(data) | ||
var deserialized = cbor_module.decodeFirstSync(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
} | ||
if (msgpack_lite) { | ||
test('from msgpack-lite', function(){ | ||
var serialized = msgpack_lite.encode(data) | ||
var deserialized = unpack(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
test('to msgpack-lite', function(){ | ||
var serialized = pack(data) | ||
var deserialized = msgpack_lite.decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
} | ||
} | ||
suite('msgpackr compatibility tests (example)', addCompatibilitySuite(require('./example.json'))) | ||
suite('msgpackr compatibility tests (example4)', addCompatibilitySuite(require('./example4.json'))) | ||
suite('msgpackr compatibility tests (example5)', addCompatibilitySuite(require('./example5.json'))) | ||
suite('cbor-x compatibility tests (example)', addCompatibilitySuite(require('./example.json'))) | ||
suite('cbor-x compatibility tests (example4)', addCompatibilitySuite(require('./example4.json'))) | ||
suite('cbor-x compatibility tests (example5)', addCompatibilitySuite(require('./example5.json'))) |
@@ -1,3 +0,3 @@ | ||
//var inspector = require('inspector') | ||
//inspector.open(9330, null, true) | ||
var inspector = require('inspector') | ||
inspector.open(9330, null, true) | ||
@@ -13,11 +13,11 @@ function tryRequire(module) { | ||
assert = chai.assert | ||
if (typeof msgpackr === 'undefined') { msgpackr = require('..') } | ||
var Packr = msgpackr.Packr | ||
var PackrStream = msgpackr.PackrStream | ||
var UnpackrStream = msgpackr.UnpackrStream | ||
var unpack = msgpackr.unpack | ||
var pack = msgpackr.pack | ||
var DECIMAL_FIT = msgpackr.DECIMAL_FIT | ||
if (typeof cborX === 'undefined') { cborX = require('..') } | ||
var Encoder = cborX.Encoder | ||
var EncoderStream = cborX.EncoderStream | ||
var DecoderStream = cborX.DecoderStream | ||
var decode = cborX.decode | ||
var encode = cborX.encode | ||
var DECIMAL_FIT = cborX.DECIMAL_FIT | ||
var addExtension = msgpackr.addExtension | ||
var addExtension = cborX.addExtension | ||
@@ -32,3 +32,3 @@ var zlib = tryRequire('zlib') | ||
try { | ||
// var { decode, encode } = require('msgpack-lite') | ||
// var { decode, encode } = require('msgencode-lite') | ||
} catch (error) {} | ||
@@ -47,4 +47,4 @@ | ||
suite('msgpackr basic tests', function(){ | ||
test('pack/unpack data', function(){ | ||
suite('cborX basic tests', function(){ | ||
test('encode/decode data', function(){ | ||
var data = { | ||
@@ -72,5 +72,5 @@ data: [ | ||
let structures = [] | ||
let packr = new Packr({ structures }) | ||
var serialized = packr.pack(data) | ||
var deserialized = packr.unpack(serialized) | ||
let encoder = new Encoder({ structures }) | ||
var serialized = encoder.encode(data) | ||
var deserialized = encoder.decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
@@ -95,14 +95,14 @@ }) | ||
let structures = [] | ||
let packr = new Packr({ structures }) | ||
var serialized = packr.pack(data) | ||
var deserialized = packr.unpack(serialized) | ||
let encoder = new Encoder({ structures }) | ||
var serialized = encoder.encode(data) | ||
var deserialized = encoder.decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
test('pack/unpack sample data', function(){ | ||
test('encode/decode sample data', function(){ | ||
var data = sampleData | ||
let structures = [] | ||
let packr = new Packr({ structures, useRecords: true }) | ||
var serialized = packr.pack(data) | ||
var deserialized = packr.unpack(serialized) | ||
let encoder = new Encoder({ structures, useRecords: true }) | ||
var serialized = encoder.encode(data) | ||
var deserialized = encoder.decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
@@ -127,9 +127,9 @@ }) | ||
} | ||
let packr = new Packr() | ||
let encoder = new Encoder() | ||
addExtension({ | ||
Class: Extended, | ||
type: 11, | ||
unpack: function(buffer) { | ||
decode: function(buffer) { | ||
let e = new Extended() | ||
let data = packr.unpack(buffer) | ||
let data = encoder.decode(buffer) | ||
e.value = data[0] | ||
@@ -139,8 +139,8 @@ e.string = data[1] | ||
}, | ||
pack: function(instance) { | ||
return packr.pack([instance.value, instance.string]) | ||
encode: function(instance) { | ||
return encoder.encode([instance.value, instance.string]) | ||
} | ||
}) | ||
var serialized = pack(data) | ||
var deserialized = unpack(serialized) | ||
var serialized = encode(data) | ||
var deserialized = decode(serialized) | ||
assert.deepEqual(data, deserialized) | ||
@@ -158,3 +158,3 @@ assert.equal(deserialized.extendedInstance.getDouble(), 8) | ||
let s = td.decode(b) | ||
if (!require('msgpackr-extract').isOneByte(s)) { | ||
if (!require('cborX-extract').isOneByte(s)) { | ||
console.log(i.toString(16), s.length) | ||
@@ -177,7 +177,7 @@ total++ | ||
object.childrenAgain = object.children | ||
let packr = new Packr({ | ||
let encoder = new Encoder({ | ||
structuredClone: true, | ||
}) | ||
var serialized = packr.pack(object) | ||
var deserialized = packr.unpack(serialized) | ||
var serialized = encoder.encode(object) | ||
var deserialized = encoder.decode(serialized) | ||
assert.equal(deserialized.self, deserialized) | ||
@@ -198,7 +198,7 @@ assert.equal(deserialized.children[0].name, 'child') | ||
} | ||
let packr = new Packr({ | ||
let encoder = new Encoder({ | ||
structuredClone: true, | ||
}) | ||
var serialized = packr.pack(object) | ||
var deserialized = packr.unpack(serialized) | ||
var serialized = encoder.encode(object) | ||
var deserialized = encoder.decode(serialized) | ||
assert.deepEqual(Array.from(deserialized.set), Array.from(object.set)) | ||
@@ -227,5 +227,5 @@ assert.equal(deserialized.error.message, object.error.message) | ||
} | ||
let packr = new Packr() | ||
var serialized = packr.pack(data) | ||
var deserialized = packr.unpack(serialized) | ||
let encoder = new Encoder() | ||
var serialized = encoder.encode(data) | ||
var deserialized = encoder.decode(serialized) | ||
assert.equal(deserialized.map.get(4), 'four') | ||
@@ -247,8 +247,8 @@ assert.equal(deserialized.map.get('three'), 3) | ||
} | ||
let packr = new Packr({ | ||
let encoder = new Encoder({ | ||
mapsAsObjects: true, | ||
useTimestamp32: true, | ||
}) | ||
var serialized = packr.pack(data) | ||
var deserialized = packr.unpack(serialized) | ||
var serialized = encoder.encode(data) | ||
var deserialized = encoder.decode(serialized) | ||
assert.equal(deserialized.map[4], 'four') | ||
@@ -265,8 +265,8 @@ assert.equal(deserialized.map.three, 3) | ||
} | ||
let packr = new Packr({ | ||
let encoder = new Encoder({ | ||
useFloat32: DECIMAL_FIT | ||
}) | ||
var serialized = packr.pack(data) | ||
assert.equal(serialized.length, 32) | ||
var deserialized = packr.unpack(serialized) | ||
var serialized = encoder.encode(data) | ||
assert.equal(serialized.length, 31) | ||
var deserialized = encoder.decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
@@ -288,4 +288,4 @@ }) | ||
} | ||
var serialized = pack(data) | ||
var deserialized = unpack(serialized) | ||
var serialized = encode(data) | ||
var deserialized = decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
@@ -297,10 +297,10 @@ }) | ||
buffer1: new Uint8Array([2,3,4]), | ||
buffer2: new Uint8Array(pack(sampleData)) | ||
buffer2: new Uint8Array(encode(sampleData)) | ||
} | ||
var serialized = pack(data) | ||
var deserialized = unpack(serialized) | ||
var serialized = encode(data) | ||
var deserialized = decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
test('notepack test', function() { | ||
test('noteencode test', function() { | ||
const data = { | ||
@@ -319,6 +319,6 @@ foo: 1, | ||
}; | ||
var serialized = pack(data) | ||
var deserialized = unpack(serialized) | ||
var deserialized = unpack(serialized) | ||
var deserialized = unpack(serialized) | ||
var serialized = encode(data) | ||
var deserialized = decode(serialized) | ||
var deserialized = decode(serialized) | ||
var deserialized = decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
@@ -330,11 +330,11 @@ }) | ||
let data = {fixstr: 'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ', str8:'ᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝᾐᾑᾒᾓᾔᾕᾖᾗᾘᾙᾚᾛᾜᾝ'} | ||
var serialized = pack(data) | ||
deserialized = unpack(serialized) | ||
var serialized = encode(data) | ||
deserialized = decode(serialized) | ||
assert.deepEqual(deserialized, data) | ||
}) | ||
if (PackrStream) { | ||
if (EncoderStream) { | ||
test('serialize/parse stream', () => { | ||
const serializeStream = new PackrStream({ | ||
const serializeStream = new EncoderStream({ | ||
}) | ||
const parseStream = new UnpackrStream() | ||
const parseStream = new DecoderStream() | ||
serializeStream.pipe(parseStream) | ||
@@ -367,3 +367,3 @@ const received = [] | ||
}) | ||
suite('msgpackr performance tests', function(){ | ||
suite('cborX performance tests', function(){ | ||
test('performance JSON.parse', function() { | ||
@@ -386,25 +386,25 @@ var data = sampleData | ||
}) | ||
test('performance unpack', function() { | ||
test('performance decode', function() { | ||
var data = sampleData | ||
this.timeout(10000) | ||
let structures = [] | ||
var serialized = pack(data) | ||
console.log('MessagePack size', serialized.length) | ||
let packr = new Packr({ structures }) | ||
var serialized = packr.pack(data) | ||
console.log('msgpackr w/ record ext size', serialized.length) | ||
var serialized = encode(data) | ||
console.log('CBOR size', serialized.length) | ||
let encoder = new Encoder({ structures }) | ||
var serialized = encoder.encode(data) | ||
console.log('cborX w/ record ext size', serialized.length) | ||
for (var i = 0; i < ITERATIONS; i++) { | ||
var deserialized = packr.unpack(serialized) | ||
var deserialized = encoder.decode(serialized) | ||
} | ||
}) | ||
test('performance pack', function() { | ||
test('performance encode', function() { | ||
var data = sampleData | ||
this.timeout(10000) | ||
let structures = [] | ||
let packr = new Packr({ structures }) | ||
let encoder = new Encoder({ structures }) | ||
for (var i = 0; i < ITERATIONS; i++) { | ||
//serialized = pack(data, { shared: sharedStructure }) | ||
var serialized = packr.pack(data) | ||
packr.resetMemory() | ||
//serialized = encode(data, { shared: sharedStructure }) | ||
var serialized = encoder.encode(data) | ||
encoder.resetMemory() | ||
//var serializedGzip = deflateSync(serialized) | ||
@@ -414,2 +414,2 @@ } | ||
}) | ||
}) | ||
}) |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Debug access
Supply chain riskUses debug, reflection and dynamic code execution features.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
123464
6
2698
8