Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

cbor-x

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cbor-x - npm Package Compare versions

Comparing version 0.9.4 to 1.0.0

2

decode.d.ts

@@ -16,2 +16,3 @@ export enum FLOAT32_OPTIONS {

copyBuffers?: boolean
bundleStrings?: boolean
useTimestamp32?: boolean

@@ -25,2 +26,3 @@ largeBigIntToFloat?: boolean

saveStructures?(structures: {}[]): boolean | void
onInvalidDate?: () => any
}

@@ -27,0 +29,0 @@ interface Extension {

415

decode.js

@@ -10,3 +10,6 @@ let decoder

const EMPTY_ARRAY = []
const RECORD_TAG_ID = 0x69
const LEGACY_RECORD_INLINE_ID = 105
const RECORD_DEFINITIONS_ID = 0xdffe
const RECORD_INLINE_ID = 0xdfff // temporary first-come first-serve tag // proposed tag: 0x7265 // 're'
const BUNDLED_STRINGS_ID = 0xdff9
const PACKED_TABLE_TAG_ID = 51

@@ -22,2 +25,3 @@ const PACKED_REFERENCE_TAG_ID = 6

let srcStringEnd = 0
let bundledStrings
let referenceMap

@@ -29,3 +33,2 @@ let currentExtensions = []

let restoreMapsAsObject
let sharedValues
let defaultOptions = {

@@ -40,9 +43,66 @@ useRecords: false,

if (options) {
if ((options.keyMap || options._keyMap) && !options.useRecords) {
options.useRecords = false
options.mapsAsObjects = true
}
if (options.useRecords === false && options.mapsAsObjects === undefined)
options.mapsAsObjects = true
if (options.getStructures && !options.structures)
if (options.getStructures)
options.getShared = options.getStructures
if (options.getShared && !options.structures)
(options.structures = []).uninitialized = true // this is what we use to denote an uninitialized structures
if (options.keyMap) {
this.mapKey = new Map()
for (let [k,v] of Object.entries(options.keyMap)) this.mapKey.set(v,k)
}
}
Object.assign(this, options)
}
/*
decodeKey(key) {
return this.keyMap
? Object.keys(this.keyMap)[Object.values(this.keyMap).indexOf(key)] || key
: key
}
*/
decodeKey(key) {
return this.keyMap ? this.mapKey.get(key) || key : key
}
encodeKey(key) {
return this.keyMap && this.keyMap.hasOwnProperty(key) ? this.keyMap[key] : key
}
encodeKeys(rec) {
if (!this._keyMap) return rec
let map = new Map()
for (let [k,v] of Object.entries(rec)) map.set((this._keyMap.hasOwnProperty(k) ? this._keyMap[k] : k), v)
return map
}
decodeKeys(map) {
if (!this._keyMap || map.constructor.name != 'Map') return map
if (!this._mapKey) {
this._mapKey = new Map()
for (let [k,v] of Object.entries(this._keyMap)) this._mapKey.set(v,k)
}
let res = {}
//map.forEach((v,k) => res[Object.keys(this._keyMap)[Object.values(this._keyMap).indexOf(k)] || k] = v)
map.forEach((v,k) => res[this._mapKey.has(k) ? this._mapKey.get(k) : k] = v)
return res
}
mapDecode(source, end) {
let res = this.decode(source)
if (this._keyMap) {
//Experiemntal support for Optimised KeyMap decoding
switch (res.constructor.name) {
case 'Array': return res.map(r => this.decodeKeys(r))
//case 'Map': return this.decodeKeys(res)
}
}
return res
}
decode(source, end) {

@@ -62,2 +122,3 @@ if (src) {

strings = EMPTY_ARRAY
bundledStrings = null
src = source

@@ -136,2 +197,5 @@ // this provides cached access to the data view for a buffer if it is getting reused, which is a recommend

let result = read()
if (bundledStrings) // bundled strings to skip past
position = bundledStrings.postBundlePosition
if (position == srcEnd) {

@@ -211,2 +275,3 @@ // finished reading this source, cleanup references

case 3: // text string
throw new Error('Indefinite length not supported for byte or text strings')
case 4: // array

@@ -223,4 +288,4 @@ let array = []

let object = {}
while ((key = readKey()) != STOP_CODE)
object[key] = read()
if (currentDecoder.keyMap) while((key = read()) != STOP_CODE) object[currentDecoder.decodeKey(key)] = read()
else while ((key = read()) != STOP_CODE) object[key] = read()
return object

@@ -233,4 +298,4 @@ } else {

let map = new Map()
while ((key = read()) != STOP_CODE)
map.set(key, read())
if (currentDecoder.keyMap) while((key = read()) != STOP_CODE) map.set(currentDecoder.decodeKey(key), read())
else while ((key = read()) != STOP_CODE) map.set(key, read())
return map

@@ -267,5 +332,5 @@ }

let array = new Array(token)
for (let i = 0; i < token; i++) {
array[i] = read()
}
//if (currentDecoder.keyMap) for (let i = 0; i < token; i++) array[i] = currentDecoder.decodeKey(read())
//else
for (let i = 0; i < token; i++) array[i] = read()
return array

@@ -275,5 +340,4 @@ case 5: // map

let object = {}
for (let i = 0; i < token; i++) {
object[readKey()] = read()
}
if (currentDecoder.keyMap) for (let i = 0; i < token; i++) object[currentDecoder.decodeKey(read())] = read()
else for (let i = 0; i < token; i++) object[read()] = read()
return object

@@ -286,50 +350,53 @@ } else {

let map = new Map()
for (let i = 0; i < token; i++) {
map.set(read(), read())
}
if (currentDecoder.keyMap) for (let i = 0; i < token; i++) map.set(currentDecoder.decodeKey(read()),read())
else for (let i = 0; i < token; i++) map.set(read(), read())
return map
}
case 6: // extension
if ((token >> 8) == RECORD_TAG_ID) { // record structures
let structure = currentStructures[token & 0xff]
if (token >= BUNDLED_STRINGS_ID) {
let structure = currentStructures[token & 0x1fff] // check record structures first
// At some point we may provide an option for dynamic tag assignment with a range like token >= 8 && (token < 16 || (token > 0x80 && token < 0xc0) || (token > 0x130 && token < 0x4000))
if (structure) {
if (!structure.read)
structure.read = createStructureReader(structure)
if (!structure.read) structure.read = createStructureReader(structure)
return structure.read()
} else if (currentDecoder.getStructures) {
let updatedStructures = saveState(() => {
// save the state in case getStructures modifies our buffer
src = null
return currentDecoder.getStructures()
})
if (currentStructures === true)
currentDecoder.structures = currentStructures = updatedStructures
else
currentStructures.splice.apply(currentStructures, [0, updatedStructures.length].concat(updatedStructures))
structure = currentStructures[token & 0xff]
if (structure) {
if (!structure.read)
structure.read = createStructureReader(structure)
return structure.read()
} else
return token
} else
return token
} else {
let extension = currentExtensions[token]
if (extension) {
if (extension.handlesRead)
return extension(read)
else
return extension(read())
} else {
let input = read()
for (let i = 0; i < currentExtensionRanges.length; i++) {
let value = currentExtensionRanges[i](token, input)
if (value !== undefined)
return value
}
if (token < 0x10000) {
if (token == RECORD_INLINE_ID) // we do a special check for this so that we can keep the currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements)
return recordDefinition(read())
else if (token == RECORD_DEFINITIONS_ID) {
let length = readJustLength()
let id = read()
for (let i = 2; i < length; i++) {
recordDefinition([id++, read()])
}
return read()
} else if (token == BUNDLED_STRINGS_ID) {
return readBundleExt()
}
return new Tag(input)
if (currentDecoder.getShared) {
loadShared()
structure = currentStructures[token & 0x1fff]
if (structure) {
if (!structure.read)
structure.read = createStructureReader(structure)
return structure.read()
}
}
}
}
let extension = currentExtensions[token]
if (extension) {
if (extension.handlesRead)
return extension(read)
else
return extension(read())
} else {
let input = read()
for (let i = 0; i < currentExtensionRanges.length; i++) {
let value = currentExtensionRanges[i](token, input)
if (value !== undefined)
return value
}
return new Tag(input, token)
}
case 7: // fixed value

@@ -341,5 +408,5 @@ switch (token) {

case 0x17: return; // undefined
case 0x1f:
case 0x1f:
default:
let packedValue = packedValues[token]
let packedValue = (packedValues || getPackedValues())[token]
if (packedValue !== undefined)

@@ -360,37 +427,43 @@ return packedValue

function createStructureReader(structure) {
let l = structure.length
function readObject() {
// get the array size from the header
let length = src[position++]
//let majorType = token >> 5
length = length & 0x1f
if (length > 0x17) {
switch (length) {
case 0x18:
length = src[position++]
break
case 0x19:
length = dataView.getUint16(position)
position += 2
break
case 0x1a:
length = dataView.getUint32(position)
position += 4
break
default:
throw new Error('Expected array header, but got ' + src[position - 1])
}
}
// This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function
if (readObject.count++ > 2) {
this.read = (new Function('a', 'r', 'return function(){a();return {' + structure.map(key => validName.test(key) ? key + ':r()' : ('[' + JSON.stringify(key) + ']:r()')).join(',') + '}}'))(readArrayHeader, read)
return this.read()
if (this.objectLiteralSize === length) // we have a fast object literal reader, use it (assuming it is the right length)
return this.objectLiteral(read)
if (this.count++ == 3) { // create a fast reader
this.objectLiteralSize = length
this.objectLiteral = currentDecoder.keyMap
? new Function('r', 'return {' + this.map(k => currentDecoder.decodeKey(k)).map(k => validName.test(k) ? k + ':r()' : ('[' + JSON.stringify(k) + ']:r()')).join(',') + '}')
: new Function('r', 'return {' + this.map(key => validName.test(key) ? key + ':r()' : ('[' + JSON.stringify(key) + ']:r()')).join(',') + '}')
return this.objectLiteral(read)
}
readArrayHeader(l)
let object = {}
for (let i = 0; i < l; i++) {
let key = structure[i]
object[key] = read()
}
if (currentDecoder.keyMap) for (let i = 0; i < length; i++) object[currentDecoder.decodeKey(this[i])] = read()
else for (let i = 0; i < length; i++) object[this[i]] = read()
return object
}
readObject.count = 0
structure.count = 0
return readObject
}
function readArrayHeader(expectedLength) {
// consume the array header, TODO: check expected length
let token = src[position++]
//let majorType = token >> 5
token = token & 0x1f
if (token > 0x17) {
switch (token) {
case 0x18: position++
break
case 0x19: position += 2
break
case 0x1a: position += 4
}
}
}
let readFixedString = readStringJS

@@ -412,2 +485,4 @@ let readString8 = readStringJS

if (string == null) {
if (bundledStrings)
return readStringJS(length)
let extraction = extractStrings(position, srcEnd, length, src)

@@ -739,8 +814,9 @@ if (typeof extraction == 'string') {

export class Tag {
constructor(value) {
constructor(value, tag) {
this.value = value
this.tag = tag
}
}
let glbl = typeof window == 'object' ? window : global
let glbl = typeof self == 'object' ? self : global

@@ -759,3 +835,7 @@ currentExtensions[0] = (dateString) => {

// bigint extension
return new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getBigUint64(0)
let value = BigInt(0)
for (let i = 0, l = buffer.byteLength; i < l; i++) {
value = BigInt(buffer[i]) + value << BigInt(8)
}
return value
}

@@ -765,23 +845,43 @@

// negative bigint extension
return BigInt(-1) - (new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength).getBigUint64(0))
return BigInt(-1) - currentExtensions[2](buffer)
}
currentExtensions[4] = (fraction) => {
// best to reparse to maintain accuracy
return +(fraction[1] + 'e' + fraction[0])
}
currentExtensions[5] = (fraction) => {
// probably not sufficiently accurate
return fraction[1] * Math.exp(fraction[0] * Math.log(2))
}
// the registration of the record definition extension (tag 105)
const recordDefinition = () => {
let definition = read()
let structure = definition[0]
let id = definition[1]
currentStructures[id & 0xff] = structure
const recordDefinition = (definition) => {
let id = definition[0] - 0xe000
let structure = definition[1]
currentStructures[id] = structure
structure.read = createStructureReader(structure)
let object = {}
for (let i = 2,l = definition.length; i < l; i++) {
let key = structure[i - 2]
object[key] = definition[i]
}
if (currentDecoder.keyMap) for (let i = 2,l = definition.length; i < l; i++) {
let key = currentDecoder.decodeKey(structure[i - 2])
object[key] = definition[i]
}
else for (let i = 2,l = definition.length; i < l; i++) {
let key = structure[i - 2]
object[key] = definition[i]
}
return object
}
currentExtensions[LEGACY_RECORD_INLINE_ID] = recordDefinition
currentExtensions[14] = (value) => {
if (bundledStrings)
return bundledStrings[0].slice(bundledStrings.position0, bundledStrings.position0 += value)
return new Tag(value, 14)
}
currentExtensions[15] = (value) => {
if (bundledStrings)
return bundledStrings[1].slice(bundledStrings.position1, bundledStrings.position1 += value)
return new Tag(value, 15)
}
recordDefinition.handlesRead = true
currentExtensions[RECORD_TAG_ID] = recordDefinition
currentExtensions[27] = (data) => { // http://cbor.schmorp.de/generic-object

@@ -792,3 +892,3 @@ return (glbl[data[0]] || Error)(data[1], data[2])

if (src[position++] != 0x84)
throw new Error('Packed values structure must be followed by 4 element array')
throw new Error('Packed values structure must be followed by a 4 element array')
let newPackedValues = read() // packed values

@@ -804,2 +904,8 @@ packedValues = packedValues ? newPackedValues.concat(packedValues.slice(newPackedValues.length)) : newPackedValues

currentExtensions[PACKED_REFERENCE_TAG_ID] = (data) => { // packed reference
if (!packedValues) {
if (currentDecoder.getShared)
loadShared()
else
return new Tag(data, PACKED_REFERENCE_TAG_ID)
}
if (typeof data == 'number')

@@ -809,7 +915,22 @@ return packedValues[16 + (data >= 0 ? 2 * data : (-2 * data - 1))]

}
currentExtensions[25] = (id) => {
return stringRefs[id]
}
currentExtensions[256] = (read) => {
stringRefs = []
try {
return read()
} finally {
stringRefs = null
}
}
currentExtensions[256].handlesRead = true
currentExtensions[40009] = (id) => {
// id extension (for structured clones)
if (!referenceMap)
currentExtensions[28] = (read) => {
// shareable http://cbor.schmorp.de/value-sharing (for structured clones)
if (!referenceMap) {
referenceMap = new Map()
referenceMap.id = 0
}
let id = referenceMap.id++
let token = src[position]

@@ -832,5 +953,6 @@ let target

}
currentExtensions[28].handlesRead = true
currentExtensions[40010] = (id) => {
// pointer extension (for structured clones)
currentExtensions[29] = (id) => {
// sharedref http://cbor.schmorp.de/value-sharing (for structured clones)
let refEntry = referenceMap.get(id)

@@ -858,15 +980,32 @@ refEntry.used = true

}
function getPackedValues() {
if (!packedValues) {
if (currentDecoder.getShared)
loadShared()
else
throw new Error('No packed values available')
}
return packedValues
}
const SHARED_DATA_TAG_ID = 0x53687264 // ascii 'Shrd'
currentExtensionRanges.push((tag, input) => {
if (tag >= 225 && tag <= 255)
return combine(packedTable.prefixes[tag - 224], input)
return combine(getPackedValues().prefixes[tag - 224], input)
if (tag >= 28704 && tag <= 32767)
return combine(packedTable.prefixes[tag - 28672], input)
return combine(getPackedValues().prefixes[tag - 28672], input)
if (tag >= 1879052288 && tag <= 2147483647)
return combine(packedTable.prefixes[tag - 1879048192], input)
return combine(getPackedValues().prefixes[tag - 1879048192], input)
if (tag >= 216 && tag <= 223)
return combine(input, packedTable.suffixes[tag - 216])
return combine(input, getPackedValues().suffixes[tag - 216])
if (tag >= 27647 && tag <= 28671)
return combine(input, packedTable.suffixes[tag - 27639])
return combine(input, getPackedValues().suffixes[tag - 27639])
if (tag >= 1811940352 && tag <= 1879048191)
return combine(input, packedTable.suffixes[tag - 1811939328])
return combine(input, getPackedValues().suffixes[tag - 1811939328])
if (tag == SHARED_DATA_TAG_ID) {// we do a special check for this so that we can keep the currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements)
return {
packedValues: packedValues,
structures: currentStructures.slice(0),
version: input,
}
}
})

@@ -888,2 +1027,57 @@

function readBundleExt() {
let length = readJustLength()
let bundlePosition = position + read()
for (let i = 2; i < length; i++) {
// skip past bundles that were already read
let bundleLength = readJustLength() // this will increment position, so must add to position afterwards
position += bundleLength
}
let dataPosition = position
position = bundlePosition
bundledStrings = [readStringJS(readJustLength()), readStringJS(readJustLength())]
bundledStrings.position0 = 0
bundledStrings.position1 = 0
bundledStrings.postBundlePosition = position
position = dataPosition
return read()
}
function readJustLength() {
let token = src[position++] & 0x1f
if (token > 0x17) {
switch (token) {
case 0x18:
token = src[position++]
break
case 0x19:
token = dataView.getUint16(position)
position += 2
break
case 0x1a:
token = dataView.getUint32(position)
position += 4
break
}
}
return token
}
function loadShared() {
if (currentDecoder.getShared) {
let sharedData = saveState(() => {
// save the state in case getShared modifies our buffer
src = null
return currentDecoder.getShared()
}) || {}
let updatedStructures = sharedData.structures || []
currentDecoder.sharedVersion = sharedData.version
packedValues = currentDecoder.sharedValues = sharedData.packedValues
if (currentStructures === true)
currentDecoder.structures = currentStructures = updatedStructures
else
currentStructures.splice.apply(currentStructures, [0, updatedStructures.length].concat(updatedStructures))
}
}
function saveState(callback) {

@@ -898,2 +1092,4 @@ let savedSrcEnd = srcEnd

let savedReferenceMap = referenceMap
let savedBundledStrings = bundledStrings
// TODO: We may need to revisit this if we do more external calls to user code (since it could be slow)

@@ -913,2 +1109,3 @@ let savedSrc = new Uint8Array(src.slice(0, srcEnd)) // we copy the data in case it changes while external data is processed

referenceMap = savedReferenceMap
bundledStrings = savedBundledStrings
src = savedSrc

@@ -915,0 +1112,0 @@ sequentialMode = savedSequentialMode

@@ -1,35 +0,41 @@

(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.CBOR={}))})(this,function(a){'use strict';function b(){try{let a=c();if(B==A)// finished reading this source, cleanup references
E=null,z=null,G&&(G=null);else if(B>A){// over read
let a=new Error("Unexpected end of CBOR data");throw a.incomplete=!0,a}else if(!Q)throw new Error("Data read, but end of buffer not reached");// else more to read, but we are reading sequentially, so don't clear source yet
return a}catch(a){throw o(),(a instanceof RangeError||a.message.startsWith("Unexpected end of buffer"))&&(a.incomplete=!0),a}}function c(){let a=z[B++],b=a>>5;if(a&=31,23<a)switch(a){case 24:a=z[B++];break;case 25:if(7==b)return j();a=I.getUint16(B),B+=2;break;case 26:if(7==b){let a=I.getFloat32(B);if(2<K.useFloat32){// this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved
let b=aa[(127&z[B])<<1|z[B+1]>>7];return B+=4,(b*a+(0<a?.5:-.5)>>0)/b}return B+=4,a}a=I.getUint32(B),B+=4;break;case 27:if(7==b){let a=I.getFloat64(B);return B+=8,a}K.int64AsNumber?(a=4294967296*I.getUint32(B),a+=I.getUint32(B+4)):a=I.getBigUint64(B),B+=8;break;case 31:// indefinite length
switch(b){case 2:// byte string
(function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):(e=e||self,t(e.CBOR={}))})(this,function(e){'use strict';var t=Math.log,s=Math.exp;function n(){try{let e=a();if(T&&(// bundled strings to skip past
x=T.postBundlePosition),x==j)D=null,_=null,w&&(w=null);else if(x>j){// over read
let e=new Error("Unexpected end of CBOR data");throw e.incomplete=!0,e}else if(!Q)throw new Error("Data read, but end of buffer not reached");// else more to read, but we are reading sequentially, so don't clear source yet
return e}catch(e){throw m(),(e instanceof RangeError||e.message.startsWith("Unexpected end of buffer"))&&(e.incomplete=!0),e}}function a(){let e=_[x++],t=e>>5;if(e&=31,23<e)switch(e){case 24:e=_[x++];break;case 25:if(7==t)return c();e=z.getUint16(x),x+=2;break;case 26:if(7==t){let e=z.getFloat32(x);if(2<Y.useFloat32){// this does rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved
let t=ue[(127&_[x])<<1|_[x+1]>>7];return x+=4,(t*e+(0<e?.5:-.5)>>0)/t}return x+=4,e}e=z.getUint32(x),x+=4;break;case 27:if(7==t){let e=z.getFloat64(x);return x+=8,e}Y.int64AsNumber?(e=4294967296*z.getUint32(x),e+=z.getUint32(x+4)):e=z.getBigUint64(x),x+=8;break;case 31:// indefinite length
switch(t){case 2:// byte string
case 3:// text string
case 4:// array
let a,d=[],e=0;for(;(a=c())!=D;)d[e++]=a;return 4==b?d:3==b?d.join(""):Buffer.concat(d);case 5:// map
let f;if(K.mapsAsObjects){let a={};for(;(f=k())!=D;)a[f]=c();return a}else{J&&(K.mapsAsObjects=!0,J=!1);let a=new Map;for(;(f=c())!=D;)a.set(f,c());return a}case 7:return D;default:throw new Error("Invalid major type for indefinite length "+b);}default:throw new Error("Unknown token "+a);}switch(b){case 0:// positive int
return a;case 1:// negative int
return~a;case 2:// buffer
return i(a);case 3:// string
if(M>=B)return F.slice(B-L,(B+=a)-L);if(0==M&&140>A&&32>a){// for small blocks, avoiding the overhead of the extract call is helpful
let b=16>a?h(a):g(a);if(null!=b)return b}return T(a);case 4:// array
let e=Array(a);for(let b=0;b<a;b++)e[b]=c();return e;case 5:// map
if(K.mapsAsObjects){let b={};for(let d=0;d<a;d++)b[k()]=c();return b}else{J&&(K.mapsAsObjects=!0,J=!1);let b=new Map;for(let d=0;d<a;d++)b.set(c(),c());return b}case 6:// extension
if(a>>8==C){// record structures
let b=E[255&a];if(b)return b.read||(b.read=d(b)),b.read();if(K.getStructures){let c=n(()=>(z=null,K.getStructures()));return!0===E?K.structures=E=c:E.splice.apply(E,[0,c.length].concat(c)),b=E[255&a],b?(b.read||(b.read=d(b)),b.read()):a}return a}else{let b=N[a];if(b)return b.handlesRead?b(c):b(c());else{let b=c();for(let c,d=0;d<O.length;d++)if(c=O[d](a,b),void 0!==c)return c;return new W(b)}}case 7:// fixed value
switch(a){case 20:return!1;case 21:return!0;case 22:return null;case 23:return;// undefined
case 31:default:let b=H[a];if(void 0!==b)return b;throw new Error("Unknown token "+a);}default:// negative int
if(isNaN(a)){let a=new Error("Unexpected end of CBOR data");throw a.incomplete=!0,a}throw new Error("Unknown CBOR token "+a);}}function d(a){function b(){// This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function
if(2<b.count++)return this.read=new Function("a","r","return function(){a();return {"+a.map(a=>S.test(a)?a+":r()":"["+JSON.stringify(a)+"]:r()").join(",")+"}}")(e,c),this.read();e();let f={};for(let b,e=0;e<d;e++)b=a[e],f[b]=c();return f}let d=a.length;return b.count=0,b}function e(){// consume the array header, TODO: check expected length
let a=z[B++];//let majorType = token >> 5
a&=31;23<a&&(24===a?B++:25===a?B+=2:26===a?B+=4:void 0)}function f(a){let b;if(16>a&&(b=h(a)))return b;if(64<a&&y)return y.decode(z.subarray(B,B+=a));const c=B+a,d=[];for(b="";B<c;){const a=z[B++];if(0==(128&a))d.push(a);else if(192==(224&a)){// 2 bytes
const b=63&z[B++];d.push((31&a)<<6|b)}else if(224==(240&a)){// 3 bytes
const b=63&z[B++],c=63&z[B++];d.push((31&a)<<12|b<<6|c)}else if(240==(248&a)){// 4 bytes
const b=63&z[B++],c=63&z[B++],e=63&z[B++];let f=(7&a)<<18|b<<12|c<<6|e;65535<f&&(f-=65536,d.push(55296|1023&f>>>10),f=56320|1023&f),d.push(f)}else d.push(a);4096<=d.length&&(b+=U.apply(String,d),d.length=0)}return 0<d.length&&(b+=U.apply(String,d)),b}function g(a){let b=B,c=Array(a);for(let d=0;d<a;d++){const a=z[B++];if(0<(128&a))return void(B=b);c[d]=a}return U.apply(String,c)}function h(p){if(4>p){if(!(2>p)){let d=z[B++],a=z[B++];if(0<(128&d)||0<(128&a))return void(B-=2);if(3>p)return U(d,a);let b=z[B++];return 0<(128&b)?void(B-=3):U(d,a,b)}if(0===p)return"";else{let b=z[B++];return 1<(128&b)?void(B-=1):U(b)}}else{let q=z[B++],a=z[B++],b=z[B++],c=z[B++];if(0<(128&q)||0<(128&a)||0<(128&b)||0<(128&c))return void(B-=4);if(6>p){if(4===p)return U(q,a,b,c);else{let d=z[B++];return 0<(128&d)?void(B-=5):U(q,a,b,c,d)}}else if(8>p){let d=z[B++],e=z[B++];if(0<(128&d)||0<(128&e))return void(B-=6);if(7>p)return U(q,a,b,c,d,e);let f=z[B++];return 0<(128&f)?void(B-=7):U(q,a,b,c,d,e,f)}else{let d=z[B++],e=z[B++],f=z[B++],g=z[B++];if(0<(128&d)||0<(128&e)||0<(128&f)||0<(128&g))return void(B-=8);if(10>p){if(8===p)return U(q,a,b,c,d,e,f,g);else{let h=z[B++];return 0<(128&h)?void(B-=9):U(q,a,b,c,d,e,f,g,h)}}else if(12>p){let h=z[B++],i=z[B++];if(0<(128&h)||0<(128&i))return void(B-=10);if(11>p)return U(q,a,b,c,d,e,f,g,h,i);let j=z[B++];return 0<(128&j)?void(B-=11):U(q,a,b,c,d,e,f,g,h,i,j)}else{let h=z[B++],i=z[B++],j=z[B++],k=z[B++];if(0<(128&h)||0<(128&i)||0<(128&j)||0<(128&k))return void(B-=12);if(!(14>p)){let l=z[B++],m=z[B++];if(0<(128&l)||0<(128&m))return void(B-=14);if(15>p)return U(q,a,b,c,d,e,f,g,h,i,j,k,l,m);let n=z[B++];return 0<(128&n)?void(B-=15):U(q,a,b,c,d,e,f,g,h,i,j,k,l,m,n)}if(12===p)return U(q,a,b,c,d,e,f,g,h,i,j,k);else{let l=z[B++];return 0<(128&l)?void(B-=13):U(q,a,b,c,d,e,f,g,h,i,j,k,l)}}}}}function i(a){return K.copyBuffers?// specifically use the copying slice (not the node one)
Uint8Array.prototype.slice.call(z,B,B+=a):z.subarray(B,B+=a)}function j(){var a=Math.exp;let b,c=z[B++],d=z[B++],e=(c<<8)+d,f=31&e>>10,g=1023&e;return b=0==f?a(g,-24):31==f?0==g?1/0:NaN:a(g+1024,f-25),32768&e?-b:b}function k(){let a=z[B++];if(96<=a&&120>a){if(a-=96,M>=B)// if it has been extracted, must use it (and faster anyway)
return F.slice(B-L,(B+=a)-L);if(!(0==M&&180>A))return T(a)}else return B--,c();let b,d=4095&(a<<5^(1<a?I.getUint16(B):0<a?z[B]:0)),e=V[d],f=B,j=B+a-3,k=0;if(e&&e.bytes==a){for(;f<j;){if(b=I.getUint32(f),b!=e[k++]){f=1879048192;break}f+=4}for(j+=3;f<j;)if(b=z[f++],b!=e[k++]){f=1879048192;break}if(f===j)return B=f,e.string;j-=3,f=B}for(e=[],V[d]=e,e.bytes=a;f<j;)b=I.getUint32(f),e.push(b),f+=4;for(j+=3;f<j;)b=z[f++],e.push(b);// for small blocks, avoiding the overhead of the extract call is helpful
let l=16>a?h(a):g(a);return null==l?e.string=T(a):e.string=l}function l(c,a){return"string"==typeof c?c+a:c instanceof Array?c.concat(a):Object.assign({},c,a)}function m(a,b){N[b]=b=>{if(!a)throw new Error("Could not find typed array for code "+typeCode);// we have to always slice/copy here to get a new ArrayBuffer that is word/byte aligned
return new X[a](Uint8Array.prototype.slice.call(b,0).buffer)}}function n(a){let b=A,c=B,d=L,e=M,f=F,g=G,h=new Uint8Array(z.slice(0,A)),i=E,j=K,k=Q,l=a();return A=b,B=c,L=d,M=e,F=f,G=g,z=h,Q=k,E=i,K=j,I=new DataView(z.buffer,z.byteOffset,z.byteLength),l}function o(){z=null,G=null,E=null}function p(a){N[a.tag]=a.decode}function q(a){24>a?qa[ta++]=128|a:256>a?(qa[ta++]=152,qa[ta++]=a):65536>a?(qa[ta++]=153,qa[ta++]=a>>8,qa[ta++]=255&a):(qa[ta++]=154,ra.setUint32(ta,a),ta+=4)}function r(a,b){switch(typeof a){case"string":if(3<a.length){if(-1<b.objectMap[a]||b.values.length>=b.maxValues)return;let c=b.get(a);if(c)2==++c.count&&b.values.push(a);else if(b.set(a,{count:1}),b.samplingPackedValues){let c=b.samplingPackedValues.get(a);c?c.count++:b.samplingPackedValues.set(a,{count:1})}}break;case"object":if(a)if(a instanceof Array)for(let c=0,d=a.length;c<d;c++)r(a[c],b);else{let d=!b.encoder.useRecords;for(var c in a)a.hasOwnProperty(c)&&(d&&r(c,b),r(a[c],b))}break;case"function":console.log(a);}}function s(a){return{tag:a,encode:function(a,b){let c=a.byteLength,d=a.byteOffset||0,e=a.buffer||a;b(ka?Buffer.from(e,d,c):new Uint8Array(e,d,c))}}}function t(a,b){let c=a.byteLength;24>c?qa[ta++]=64+c:256>c?(qa[ta++]=88,qa[ta++]=c):65536>c?(qa[ta++]=89,qa[ta++]=c>>8,qa[ta++]=255&c):(qa[ta++]=90,ra.setUint32(ta,c),ta+=4),ta+c>=qa.length&&b(ta+c),qa.set(a,ta),ta+=c}function u(a,b){// insert the ids that need to be referenced for structured clones
let c,d=8*b.length,e=a.length-d;for(b.sort((c,a)=>c.offset>a.offset?1:-1);c=b.pop();){let b=c.offset,f=c.id;a.copyWithin(b+d,b,e),d-=8;let g=b+d;// uint32
a[g++]=217,a[g++]=156,a[g++]=73,a[g++]=26,a[g++]=f>>24,a[g++]=255&f>>16,a[g++]=255&f>>8,a[g++]=255&f,e=b}return a}function v(a){if(a.Class){if(!a.encode)throw new Error("Extension has no encode function");ja.unshift(a.Class),ia.unshift(a)}p(a)}function*w(a,b){const c=new va(b);for(const d of a)yield c.encode(d)}async function*x(a,b){const c=new va(b);for await(const d of a)yield c.encode(d)}/**
throw new Error("Indefinite length not supported for byte or text strings");case 4:// array
let e,s=[],n=0;for(;(e=a())!=F;)s[n++]=e;return 4==t?s:3==t?s.join(""):Buffer.concat(s);case 5:// map
let r;if(Y.mapsAsObjects){let e={};if(Y.keyMap)for(;(r=a())!=F;)e[Y.decodeKey(r)]=a();else for(;(r=a())!=F;)e[r]=a();return e}else{W&&(Y.mapsAsObjects=!0,W=!1);let e=new Map;if(Y.keyMap)for(;(r=a())!=F;)e.set(Y.decodeKey(r),a());else for(;(r=a())!=F;)e.set(r,a());return e}case 7:return F;default:throw new Error("Invalid major type for indefinite length "+t);}default:throw new Error("Unknown token "+e);}switch(t){case 0:// positive int
return e;case 1:// negative int
return~e;case 2:// buffer
return u(e);case 3:// string
if(G>=x)return K.slice(x-q,(x+=e)-q);if(0==G&&140>j&&32>e){// for small blocks, avoiding the overhead of the extract call is helpful
let t=16>e?d(e):o(e);if(null!=t)return t}return ee(e);case 4:// array
let s=Array(e);//if (currentDecoder.keyMap) for (let i = 0; i < token; i++) array[i] = currentDecoder.decodeKey(read())
//else
for(let t=0;t<e;t++)s[t]=a();return s;case 5:// map
if(Y.mapsAsObjects){let t={};if(Y.keyMap)for(let s=0;s<e;s++)t[Y.decodeKey(a())]=a();else for(let s=0;s<e;s++)t[a()]=a();return t}else{W&&(Y.mapsAsObjects=!0,W=!1);let t=new Map;if(Y.keyMap)for(let s=0;s<e;s++)t.set(Y.decodeKey(a()),a());else for(let s=0;s<e;s++)t.set(a(),a());return t}case 6:// extension
if(e>=L){let t=D[8191&e];// check record structures first
// At some point we may provide an option for dynamic tag assignment with a range like token >= 8 && (token < 16 || (token > 0x80 && token < 0xc0) || (token > 0x130 && token < 0x4000))
if(t)return t.read||(t.read=r(t)),t.read();if(65536>e){if(e==P)// we do a special check for this so that we can keep the currentExtensions as densely stored array (v8 stores arrays densely under about 3000 elements)
return re(a());if(e==C){let e=h(),t=a();for(let s=2;s<e;s++)re([t++,a()]);return a()}if(e==L)return g();if(Y.getShared&&(y(),t=D[8191&e],t))return t.read||(t.read=r(t)),t.read()}}let n=$[e];if(n)return n.handlesRead?n(a):n(a());else{let t=a();for(let s,n=0;n<H.length;n++)if(s=H[n](e,t),void 0!==s)return s;return new ne(t,e)}case 7:// fixed value
switch(e){case 20:return!1;case 21:return!0;case 22:return null;case 23:return;// undefined
case 31:default:let t=(N||p())[e];if(void 0!==t)return t;throw new Error("Unknown token "+e);}default:// negative int
if(isNaN(e)){let e=new Error("Unexpected end of CBOR data");throw e.incomplete=!0,e}throw new Error("Unknown CBOR token "+e);}}function r(e){function t(){// get the array size from the header
let e=_[x++];//let majorType = token >> 5
if(e&=31,23<e)switch(e){case 24:e=_[x++];break;case 25:e=z.getUint16(x),x+=2;break;case 26:e=z.getUint32(x),x+=4;break;default:throw new Error("Expected array header, but got "+_[x-1]);}// This initial function is quick to instantiate, but runs slower. After several iterations pay the cost to build the faster function
if(this.objectLiteralSize===e)// we have a fast object literal reader, use it (assuming it is the right length)
return this.objectLiteral(a);if(3==this.count++)return this.objectLiteralSize=e,this.objectLiteral=Y.keyMap?new Function("r","return {"+this.map(e=>Y.decodeKey(e)).map(e=>Z.test(e)?e+":r()":"["+JSON.stringify(e)+"]:r()").join(",")+"}"):new Function("r","return {"+this.map(e=>Z.test(e)?e+":r()":"["+JSON.stringify(e)+"]:r()").join(",")+"}"),this.objectLiteral(a);let t={};if(Y.keyMap)for(let s=0;s<e;s++)t[Y.decodeKey(this[s])]=a();else for(let s=0;s<e;s++)t[this[s]]=a();return t}return e.count=0,t}function i(e){let t;if(16>e&&(t=d(e)))return t;if(64<e&&E)return E.decode(_.subarray(x,x+=e));const s=x+e,n=[];for(t="";x<s;){const e=_[x++];if(0==(128&e))n.push(e);else if(192==(224&e)){// 2 bytes
const t=63&_[x++];n.push((31&e)<<6|t)}else if(224==(240&e)){// 3 bytes
const t=63&_[x++],s=63&_[x++];n.push((31&e)<<12|t<<6|s)}else if(240==(248&e)){// 4 bytes
const t=63&_[x++],s=63&_[x++],a=63&_[x++];let r=(7&e)<<18|t<<12|s<<6|a;65535<r&&(r-=65536,n.push(55296|1023&r>>>10),r=56320|1023&r),n.push(r)}else n.push(e);4096<=n.length&&(t+=te.apply(String,n),n.length=0)}return 0<n.length&&(t+=te.apply(String,n)),t}function o(e){let t=x,s=Array(e);for(let n=0;n<e;n++){const e=_[x++];if(0<(128&e))return void(x=t);s[n]=e}return te.apply(String,s)}function d(t){if(4>t){if(!(2>t)){let e=_[x++],s=_[x++];if(0<(128&e)||0<(128&s))return void(x-=2);if(3>t)return te(e,s);let n=_[x++];return 0<(128&n)?void(x-=3):te(e,s,n)}if(0===t)return"";else{let e=_[x++];return 1<(128&e)?void(x-=1):te(e)}}else{let s=_[x++],a=_[x++],r=_[x++],u=_[x++];if(0<(128&s)||0<(128&a)||0<(128&r)||0<(128&u))return void(x-=4);if(6>t){if(4===t)return te(s,a,r,u);else{let t=_[x++];return 0<(128&t)?void(x-=5):te(s,a,r,u,t)}}else if(8>t){let n=_[x++],e=_[x++];if(0<(128&n)||0<(128&e))return void(x-=6);if(7>t)return te(s,a,r,u,n,e);let i=_[x++];return 0<(128&i)?void(x-=7):te(s,a,r,u,n,e,i)}else{let d=_[x++],e=_[x++],c=_[x++],p=_[x++];if(0<(128&d)||0<(128&e)||0<(128&c)||0<(128&p))return void(x-=8);if(10>t){if(8===t)return te(s,a,r,u,d,e,c,p);else{let t=_[x++];return 0<(128&t)?void(x-=9):te(s,a,r,u,d,e,c,p,t)}}else if(12>t){let n=_[x++],i=_[x++];if(0<(128&n)||0<(128&i))return void(x-=10);if(11>t)return te(s,a,r,u,d,e,c,p,n,i);let o=_[x++];return 0<(128&o)?void(x-=11):te(s,a,r,u,d,e,c,p,n,i,o)}else{let f=_[x++],i=_[x++],g=_[x++],h=_[x++];if(0<(128&f)||0<(128&i)||0<(128&g)||0<(128&h))return void(x-=12);if(!(14>t)){let l=_[x++],y=_[x++];if(0<(128&l)||0<(128&y))return void(x-=14);if(15>t)return te(s,a,r,u,d,e,c,p,f,i,g,h,l,y);let n=_[x++];return 0<(128&n)?void(x-=15):te(s,a,r,u,d,e,c,p,f,i,g,h,l,y,n)}if(12===t)return te(s,a,r,u,d,e,c,p,f,i,g,h);else{let t=_[x++];return 0<(128&t)?void(x-=13):te(s,a,r,u,d,e,c,p,f,i,g,h,t)}}}}}function u(e){return Y.copyBuffers?// specifically use the copying slice (not the node one)
Uint8Array.prototype.slice.call(_,x,x+=e):_.subarray(x,x+=e)}function c(){let e,t=_[x++],n=_[x++],a=(t<<8)+n,r=31&a>>10,i=1023&a;return e=0==r?s(i,-24):31==r?0==i?1/0:NaN:s(i+1024,r-25),32768&a?-e:e}function l(e,t){return"string"==typeof e?e+t:e instanceof Array?e.concat(t):Object.assign({},e,t)}function p(){if(!N)if(Y.getShared)y();else throw new Error("No packed values available");return N}function f(e,t){$[t]=t=>{if(!e)throw new Error("Could not find typed array for code "+typeCode);// we have to always slice/copy here to get a new ArrayBuffer that is word/byte aligned
return new ae[e](Uint8Array.prototype.slice.call(t,0).buffer)}}function g(){let e=h(),t=x+a();for(let t,s=2;s<e;s++)// skip past bundles that were already read
// this will increment position, so must add to position afterwards
t=h(),x+=t;let s=x;return x=t,T=[i(h()),i(h())],T.position0=0,T.position1=0,T.postBundlePosition=x,x=s,a()}function h(){let e=31&_[x++];return 23<e&&(24===e?e=_[x++]:25===e?(e=z.getUint16(x),x+=2):26===e?(e=z.getUint32(x),x+=4):void 0),e}function y(){if(Y.getShared){let e=b(()=>(_=null,Y.getShared()))||{},t=e.structures||[];Y.sharedVersion=e.version,N=Y.sharedValues=e.packedValues,!0===D?Y.structures=D=t:D.splice.apply(D,[0,t.length].concat(t))}}function b(e){let t=j,s=x,n=q,a=G,r=K,i=w,o=T,d=new Uint8Array(_.slice(0,j)),u=D,c=Y,l=Q,p=e();return j=t,x=s,q=n,G=a,K=r,w=i,T=o,_=d,Q=l,D=u,Y=c,z=new DataView(_.buffer,_.byteOffset,_.byteLength),p}function m(){_=null,w=null,D=null}function k(e){$[e.tag]=e.decode}function M(e){24>e?ve[Se++]=128|e:256>e?(ve[Se++]=152,ve[Se++]=e):65536>e?(ve[Se++]=153,ve[Se++]=e>>8,ve[Se++]=255&e):(ve[Se++]=154,Ae.setUint32(Se,e),Se+=4)}function I(e,t){switch(typeof e){case"string":if(3<e.length){if(-1<t.objectMap[e]||t.values.length>=t.maxValues)return;let s=t.get(e);if(s)2==++s.count&&t.values.push(e);else if(t.set(e,{count:1}),t.samplingPackedValues){let s=t.samplingPackedValues.get(e);s?s.count++:t.samplingPackedValues.set(e,{count:1})}}break;case"object":if(e)if(e instanceof Array)for(let s=0,n=e.length;s<n;s++)I(e[s],t);else{let n=!t.encoder.useRecords;for(var s in e)e.hasOwnProperty(s)&&(n&&I(s,t),I(e[s],t))}break;case"function":console.log(e);}}function O(e){return{tag:e,encode:function(e,t){let s=e.byteLength,n=e.byteOffset||0,a=e.buffer||e;t(ke?Buffer.from(a,n,s):new Uint8Array(a,n,s))}}}function U(e,t){let s=e.byteLength;24>s?ve[Se++]=64+s:256>s?(ve[Se++]=88,ve[Se++]=s):65536>s?(ve[Se++]=89,ve[Se++]=s>>8,ve[Se++]=255&s):(ve[Se++]=90,Ae.setUint32(Se,s),Se+=4),Se+s>=ve.length&&t(Se+s),ve.set(e,Se),Se+=s}function v(e,t){// insert the ids that need to be referenced for structured clones
let s,n=2*t.length,a=e.length-n;t.sort((e,t)=>e.offset>t.offset?1:-1);for(let s,n=0;n<t.length;n++){s=t[n],s.id=n;for(let t of s.references)e[t++]=n>>8,e[t]=255&n}for(;s=t.pop();){let t=s.offset;e.copyWithin(t+n,t,a),n-=2;let r=t+n;// http://cbor.schmorp.de/value-sharing
e[r++]=216,e[r++]=28,a=t}return e}function A(e,t){Ae.setUint32(Ve.position+e,Se-Ve.position-e+1);// the offset to bundle
let s=Ve;Ve=null,t(s[0]),t(s[1])}function R(e){if(e.Class){if(!e.encode)throw new Error("Extension has no encode function");me.unshift(e.Class),be.unshift(e)}k(e)}function*S(e,t){const s=new _e(t);for(const n of e)yield s.encode(n)}async function*V(e,t){const s=new _e(t);for await(const n of e)yield s.encode(n)}/**
* Given an Iterable/Iterator input which yields buffers, returns an IterableIterator which yields sync decoded objects

@@ -40,19 +46,38 @@ * Or, given an Async Iterable/Iterator which yields promises resolving in buffers, returns an AsyncIterableIterator.

* @returns {IterableIterator|Promise.<AsyncIterableIterator}
*/let y;try{y=new TextDecoder}catch(a){}let z,A,B=0;const C=105,D={};let E,F,G,H,I,J,K={},L=0,M=0,N=[],O=[],P={useRecords:!1,mapsAsObjects:!0},Q=!1;class R{constructor(a){a&&(!1===a.useRecords&&a.mapsAsObjects===void 0&&(a.mapsAsObjects=!0),a.getStructures&&!a.structures&&((a.structures=[]).uninitialized=!0)),Object.assign(this,a)}decode(a,c){if(z)// re-entrant execution, save the state and restore it after we do this decode
return n(()=>(o(),this?this.decode(a,c):R.prototype.decode.call(P,a,c)));A=-1<c?c:a.length,B=0,M=0,F=null,z=a;// this provides cached access to the data view for a buffer if it is getting reused, which is a recommend
*/let E;try{E=new TextDecoder}catch(e){}let _,j,x=0;const C=57342,P=57343,L=57337,B=6,F={};let D,K,T,w,N,z,W,Y={},q=0,G=0,$=[],H=[],J={useRecords:!1,mapsAsObjects:!0},Q=!1;class X{constructor(e){if(e&&((e.keyMap||e._keyMap)&&!e.useRecords&&(e.useRecords=!1,e.mapsAsObjects=!0),!1===e.useRecords&&void 0===e.mapsAsObjects&&(e.mapsAsObjects=!0),e.getStructures&&(e.getShared=e.getStructures),e.getShared&&!e.structures&&((e.structures=[]).uninitialized=!0),e.keyMap))// this is what we use to denote an uninitialized structures
{this.mapKey=new Map;for(let[t,s]of Object.entries(e.keyMap))this.mapKey.set(s,t)}Object.assign(this,e)}/*
decodeKey(key) {
return this.keyMap
? Object.keys(this.keyMap)[Object.values(this.keyMap).indexOf(key)] || key
: key
}
*/decodeKey(e){return this.keyMap?this.mapKey.get(e)||e:e}encodeKey(e){return this.keyMap&&this.keyMap.hasOwnProperty(e)?this.keyMap[e]:e}encodeKeys(e){if(!this._keyMap)return e;let t=new Map;for(let[s,n]of Object.entries(e))t.set(this._keyMap.hasOwnProperty(s)?this._keyMap[s]:s,n);return t}decodeKeys(e){if(!this._keyMap||"Map"!=e.constructor.name)return e;if(!this._mapKey){this._mapKey=new Map;for(let[e,t]of Object.entries(this._keyMap))this._mapKey.set(t,e)}let t={};//map.forEach((v,k) => res[Object.keys(this._keyMap)[Object.values(this._keyMap).indexOf(k)] || k] = v)
return e.forEach((e,s)=>t[this._mapKey.has(s)?this._mapKey.get(s):s]=e),t}mapDecode(e){let t=this.decode(e);if(this._keyMap)//Experiemntal support for Optimised KeyMap decoding
switch(t.constructor.name){case"Array":return t.map(e=>this.decodeKeys(e));//case 'Map': return this.decodeKeys(res)
}return t}decode(e,t){if(_)// re-entrant execution, save the state and restore it after we do this decode
return b(()=>(m(),this?this.decode(e,t):X.prototype.decode.call(J,e,t)));j=-1<t?t:e.length,x=0,G=0,K=null,T=null,_=e;// this provides cached access to the data view for a buffer if it is getting reused, which is a recommend
// technique for getting data from a database where it can be copied into an existing buffer instead of creating
// new ones
try{I=a.dataView||(a.dataView=new DataView(a.buffer,a.byteOffset,a.byteLength))}catch(b){if(z=null,a instanceof Uint8Array)throw b;throw new Error("Source must be a Uint8Array or Buffer but was a "+(a&&"object"==typeof a?a.constructor.name:typeof a))}if(this instanceof R){if(K=this,H=this.sharedValues&&(this.pack?Array(this.maxPrivatePackedValues||16).concat(this.sharedValues):this.sharedValues),this.structures)return E=this.structures,b();(!E||0<E.length)&&(E=[])}else K=P,(!E||0<E.length)&&(E=[]),H=null;return b()}decodeMultiple(a,c){let d,e=0;try{let f=a.length;Q=!0;let g=this?this.decode(a,f):ba.decode(a,f);if(c){if(!1===c(g))return;for(;B<f;)if(e=B,!1===c(b()))return}else{for(d=[g];B<f;)e=B,d.push(b());return d}}catch(a){throw a.lastPosition=e,a.values=d,a}finally{Q=!1,o()}}}const S=/^[a-zA-Z_$][a-zA-Z\d_$]*$/;let T=f,U=String.fromCharCode,V=Array(4096);class W{constructor(a){this.value=a}}let X="object"==typeof window?window:global;N[0]=a=>new Date(a),N[1]=a=>new Date(1e3*a),N[2]=a=>new DataView(a.buffer,a.byteOffset,a.byteLength).getBigUint64(0),N[3]=a=>BigInt(-1)-new DataView(a.buffer,a.byteOffset,a.byteLength).getBigUint64(0);// the registration of the record definition extension (tag 105)
const Y=()=>{let a=c(),b=a[0],e=a[1];E[255&e]=b,b.read=d(b);let f={};for(let c,d=2,e=a.length;d<e;d++)c=b[d-2],f[c]=a[d];return f};Y.handlesRead=!0,N[105]=Y,N[27]=a=>(X[a[0]]||Error)(a[1],a[2]);const Z=a=>{if(132!=z[B++])throw new Error("Packed values structure must be followed by 4 element array");let b=a();// packed values
return H=H?b.concat(H.slice(b.length)):b,H.prefixes=a(),H.suffixes=a(),a();// read the rump
};Z.handlesRead=!0,N[51]=Z,N[6]=a=>{// packed reference
if("number"==typeof a)return H[16+(0<=a?2*a:-2*a-1)];throw new Error("No support for non-integer packed references yet")},N[40009]=a=>{G||(G=new Map);let b,d=z[B];b=4==d>>5?[]:{};let e={target:b};// a placeholder object
G.set(a,e);let f=c();// read the next value as the target object to id
return e.used?Object.assign(b,f):(e.target=f,f);// no cycle, can just use the returned read object
},N[40010]=a=>{// pointer extension (for structured clones)
let b=G.get(a);return b.used=!0,b.target},N[258]=a=>new Set(a),(N[259]=a=>(K.mapsAsObjects&&(K.mapsAsObjects=!1,J=!0),a())).handlesRead=!0,O.push((a,b)=>225<=a&&255>=a?l(Z.prefixes[a-224],b):28704<=a&&32767>=a?l(Z.prefixes[a-28672],b):1879052288<=a&&2147483647>=a?l(Z.prefixes[a-1879048192],b):216<=a&&223>=a?l(b,Z.suffixes[a-216]):27647<=a&&28671>=a?l(b,Z.suffixes[a-27639]):1811940352<=a&&1879048191>=a?l(b,Z.suffixes[a-1811939328]):void 0);const $=["Uint8","Uint8Clamped","Uint16","Uint32","BigUint64","Int8","Int16","Int32","BigInt64","Float32","Float64"].map(a=>a+"Array"),_=[64,68,69,70,71,72,77,78,79,81,82];for(let b=0;b<$.length;b++)m($[b],_[b]);const aa=Array(147);// this is a table matching binary exponents to the multiplier to determine significant digit rounding
for(let b=0;256>b;b++)aa[b]=+("1e"+Math.floor(45.15-.30103*b));let ba=new R({useRecords:!1});const ca=ba.decode,da=ba.decodeMultiple,ea={NEVER:0,ALWAYS:1,DECIMAL_ROUND:3,DECIMAL_FIT:4};let fa,ga=new Float32Array(1),ha=new Uint8Array(ga.buffer,0,4);try{fa=new TextEncoder}catch(a){}let ia,ja;const ka="undefined"!=typeof Buffer,la=ka?Buffer.allocUnsafeSlow:Uint8Array,ma=ka?Buffer:Uint8Array,na=105,oa=256,pa=ka?4294967296:2144337920;let qa,ra,sa,ta=0;const ua=Symbol("record-id");class va extends R{constructor(a){super(a),this.offset=0;let b,c,d,e,f;a=a||{};let g=0,h=ma.prototype.utf8Write?function(a,b,c){return qa.utf8Write(a,b,c)}:!!(fa&&fa.encodeInto)&&function(a,b){return fa.encodeInto(a,qa.subarray(b)).written},i=this,j=64,k=a.sequential;k&&(j=0,this.structures=[]);let l,m,n,o=a.sharedValues;if(o){n=Object.create(null);for(let a=0,b=o.length;a<b;a++)n[o[a]]=a}let p=[],s=0,t=0;this.encode=function(h,w){if(qa||(qa=new la(8192),ra=new DataView(qa.buffer,0,8192),ta=0),sa=qa.length-10,2048>sa-ta?(qa=new la(qa.length),ra=new DataView(qa.buffer,0,qa.length),sa=qa.length-10,ta=0):w===Ca&&(ta=2147483640&ta+7),b=ta,f=i.structuredClone?new Map:null,c=i.structures,c){c.uninitialized&&(i.structures=c=i.getStructures());let a=c.length;if(a>j&&!k&&(a=j),!c.transitions){c.transitions=Object.create(null);for(let b,d=0;d<a;d++){if(b=c[d],!b)continue;let a,e=c.transitions;for(let c,d=0,f=b.length;d<f;d++)c=b[d],a=e[c],a||(a=e[c]=Object.create(null)),e=a;e[ua]=d}g=c.length}k||(c.nextId=a)}if(d&&(d=!1),e=c||[],m=n,a.pack){let b=new Map;if(b.values=[],b.encoder=i,b.maxValues=a.maxPrivatePackedValues||(n?16:1/0),b.objectMap=n||!1,b.samplingPackedValues=l,r(h,b),0<b.values.length){qa[ta++]=216,qa[ta++]=51,q(4);let a=b.values;v(a),q(0),q(0),m=Object.create(n||null);for(let b=0,c=a.length;b<c;b++)m[a[b]]=b}}try{// update the offset so next serialization doesn't write over our buffer, but can continue writing to same buffer sequentially
if(v(h),i.offset=ta,f&&f.idsToInsert){ta+=8*f.idsToInsert.length,ta>sa&&x(ta),i.offset=ta;let a=u(qa.subarray(b,ta),f.idsToInsert);return f=null,a}return w===Ca?(qa.start=b,qa.end=ta,qa):qa.subarray(b,ta);// position can change if we call pack again in saveStructures, so we get the buffer now
}finally{if(c){if(10>t&&t++,1e4<s)c.transitions=null,t=0,s=0,0<p.length&&(p=[]);else if(0<p.length&&!k){for(let a=0,b=p.length;a<b;a++)p[a][ua]=void 0;p=[]}if(d&&i.saveStructures){i.structures.length>j&&(i.structures=i.structures.slice(0,j));// we can't rely on start/end with REUSE_BUFFER_MODE since they will (probably) change when we save
let a=qa.subarray(b,ta),c=i.structures||[];return(o&&(c=c.concat(o)),!1===i.saveStructures(i.structures,g))?(i.structures=i.getStructures()||[],i.encode(h)):(g=c.length,a)}}}},this.findCommonStringsToPack=()=>(l=new Map,n||(n=Object.create(null)),({threshold:b})=>{b=b||4;let c=this.pack?a.maxPrivatePackedValues||16:0;o||(o=this.sharedValues=[]);for(let[a,e]of l)e.count>b&&(n[a]=c++,o.push(a),d=!0);l=null});const v=c=>{ta>sa&&(qa=x(ta));var d,e=typeof c;if("string"==e){if(m){let b=m[c];if(0<=b)return void(16>b?qa[ta++]=b+224:(qa[ta++]=198,1&b?v(15-b>>1):v(b-16>>1)));/* } else if (packedStatus.serializationId != serializationId) {
try{z=e.dataView||(e.dataView=new DataView(e.buffer,e.byteOffset,e.byteLength))}catch(t){if(_=null,e instanceof Uint8Array)throw t;throw new Error("Source must be a Uint8Array or Buffer but was a "+(e&&"object"==typeof e?e.constructor.name:typeof e))}if(this instanceof X){if(Y=this,N=this.sharedValues&&(this.pack?Array(this.maxPrivatePackedValues||16).concat(this.sharedValues):this.sharedValues),this.structures)return D=this.structures,n();(!D||0<D.length)&&(D=[])}else Y=J,(!D||0<D.length)&&(D=[]),N=null;return n()}decodeMultiple(e,t){let s,a=0;try{let r=e.length;Q=!0;let i=this?this.decode(e,r):ce.decode(e,r);if(t){if(!1===t(i))return;for(;x<r;)if(a=x,!1===t(n()))return}else{for(s=[i];x<r;)a=x,s.push(n());return s}}catch(e){throw e.lastPosition=a,e.values=s,e}finally{Q=!1,m()}}}const Z=/^[a-zA-Z_$][a-zA-Z\d_$]*$/;let ee=i,te=String.fromCharCode,se=Array(4096);class ne{constructor(e,t){this.value=e,this.tag=t}}let ae="object"==typeof self?self:global;$[0]=e=>new Date(e),$[1]=e=>new Date(1e3*e),$[2]=e=>{// bigint extension
let t=BigInt(0);for(let s=0,n=e.byteLength;s<n;s++)t=BigInt(e[s])+t<<BigInt(8);return t},$[3]=e=>BigInt(-1)-$[2](e),$[4]=e=>+(e[1]+"e"+e[0]),$[5]=e=>e[1]*s(e[0]*t(2));// the registration of the record definition extension (tag 105)
const re=e=>{let t=e[0]-57344,s=e[1];D[t]=s,s.read=r(s);let n={};if(Y.keyMap)for(let t,a=2,r=e.length;a<r;a++)t=Y.decodeKey(s[a-2]),n[t]=e[a];else for(let t,a=2,r=e.length;a<r;a++)t=s[a-2],n[t]=e[a];return n};$[105]=re,$[14]=e=>T?T[0].slice(T.position0,T.position0+=e):new ne(e,14),$[15]=e=>T?T[1].slice(T.position1,T.position1+=e):new ne(e,15),$[27]=e=>(ae[e[0]]||Error)(e[1],e[2]);const ie=e=>{if(132!=_[x++])throw new Error("Packed values structure must be followed by a 4 element array");let t=e();// packed values
return N=N?t.concat(N.slice(t.length)):t,N.prefixes=e(),N.suffixes=e(),e();// read the rump
};ie.handlesRead=!0,$[51]=ie,$[B]=e=>{// packed reference
if(!N)if(Y.getShared)y();else return new ne(e,B);if("number"==typeof e)return N[16+(0<=e?2*e:-2*e-1)];throw new Error("No support for non-integer packed references yet")},$[25]=e=>stringRefs[e],$[256]=e=>{stringRefs=[];try{return e()}finally{stringRefs=null}},$[256].handlesRead=!0,$[28]=e=>{w||(w=new Map,w.id=0);let t,s=w.id++,n=_[x];t=4==n>>5?[]:{};let a={target:t};// a placeholder object
w.set(s,a);let r=e();// read the next value as the target object to id
return a.used?Object.assign(t,r):(a.target=r,r);// no cycle, can just use the returned read object
},$[28].handlesRead=!0,$[29]=e=>{// sharedref http://cbor.schmorp.de/value-sharing (for structured clones)
let t=w.get(e);return t.used=!0,t.target},$[258]=e=>new Set(e),($[259]=e=>(Y.mapsAsObjects&&(Y.mapsAsObjects=!1,W=!0),e())).handlesRead=!0;H.push((e,t)=>225<=e&&255>=e?l(p().prefixes[e-224],t):28704<=e&&32767>=e?l(p().prefixes[e-28672],t):1879052288<=e&&2147483647>=e?l(p().prefixes[e-1879048192],t):216<=e&&223>=e?l(t,p().suffixes[e-216]):27647<=e&&28671>=e?l(t,p().suffixes[e-27639]):1811940352<=e&&1879048191>=e?l(t,p().suffixes[e-1811939328]):e==1399353956?{packedValues:N,structures:D.slice(0),version:t}:void 0);const oe=["Uint8","Uint8Clamped","Uint16","Uint32","BigUint64","Int8","Int16","Int32","BigInt64","Float32","Float64"].map(e=>e+"Array"),de=[64,68,69,70,71,72,77,78,79,81,82];for(let t=0;t<oe.length;t++)f(oe[t],de[t]);const ue=Array(147);// this is a table matching binary exponents to the multiplier to determine significant digit rounding
for(let t=0;256>t;t++)ue[t]=+("1e"+Math.floor(45.15-.30103*t));let ce=new X({useRecords:!1});const le=ce.decode,pe=ce.decodeMultiple,fe={NEVER:0,ALWAYS:1,DECIMAL_ROUND:3,DECIMAL_FIT:4};let ge,he=new Float32Array(1),ye=new Uint8Array(he.buffer,0,4);try{ge=new TextEncoder}catch(e){}let be,me;const ke="undefined"!=typeof Buffer,Me=ke?Buffer.allocUnsafeSlow:Uint8Array,Ie=ke?Buffer:Uint8Array,Oe=256,Ue=ke?4294967296:2144337920;let ve,Ae,Re,Se=0,Ve=null;const Ee=Symbol("record-id");class _e extends X{constructor(e){super(e),this.offset=0;let t,s,n,a,r;e=e||{};let i=Ie.prototype.utf8Write?function(e,t,s){return ve.utf8Write(e,t,s)}:!!(ge&&ge.encodeInto)&&function(e,t){return ge.encodeInto(e,ve.subarray(t)).written},o=this,d=e.maxSharedStructures||128,u=e.sequential;u&&(d=0,this.structures=[]),this.saveStructures&&(this.saveShared=this.saveStructures);let c,p,f,g=e.sharedValues;if(g){f=Object.create(null);for(let e=0,t=g.length;e<t;e++)f[g[e]]=e}let h=[],y=0,b=0;this.mapEncode=function(e,t){// Experimental support for premapping keys using _keyMap instad of keyMap - not optiimised yet)
if(this._keyMap&&!this._mapped)//console.log('encoding ', value)
switch(e.constructor.name){case"Array":e=e.map(e=>this.encodeKeys(e));//case 'Map':
// value = this.encodeKeys(value)
// break
}//this._mapped = true
return this.encode(e,t)},this.encode=function(i,l){if(ve||(ve=new Me(8192),Ae=new DataView(ve.buffer,0,8192),Se=0),Re=ve.length-10,2048>Re-Se?(ve=new Me(ve.length),Ae=new DataView(ve.buffer,0,ve.length),Re=ve.length-10,Se=0):l===De&&(Se=2147483640&Se+7),t=Se,r=o.structuredClone?new Map:null,o.bundleStrings&&"string"!=typeof i?(Ve=[],Ve.size=1/0):Ve=null,s=o.structures,s){if(s.uninitialized){let e=o.getShared()||{};o.structures=s=e.structures||[],o.sharedVersion=e.version;let t=o.sharedValues=e.packedValues;if(t){f={};for(let e=0,s=t.length;e<s;e++)f[t[e]]=e}}let e=s.length;if(e>d&&!u&&(e=d),!s.transitions){s.transitions=Object.create(null);for(let t,n=0;n<e;n++){//console.log('shared struct keys:', keys)
if(t=s[n],!t)continue;let e,a=s.transitions;for(let s,n=0,r=t.length;n<r;n++)s=t[n],e=a[s],e||(e=a[s]=Object.create(null)),a=e;a[Ee]=n}}u||(s.nextId=e)}if(n&&(n=!1),a=s||[],p=f,e.pack){let t=new Map;if(t.values=[],t.encoder=o,t.maxValues=e.maxPrivatePackedValues||(f?16:1/0),t.objectMap=f||!1,t.samplingPackedValues=c,I(i,t),0<t.values.length){ve[Se++]=216,ve[Se++]=51,M(4);let e=t.values;m(e),M(0),M(0),p=Object.create(f||null);for(let t=0,s=e.length;t<s;t++)p[e[t]]=t}}try{// update the offset so next serialization doesn't write over our buffer, but can continue writing to same buffer sequentially
if(m(i),Ve&&A(t,m),o.offset=Se,r&&r.idsToInsert){Se+=2*r.idsToInsert.length,Se>Re&&O(Se),o.offset=Se;let e=v(ve.subarray(t,Se),r.idsToInsert);return r=null,e}return l&De?(ve.start=t,ve.end=Se,ve):ve.subarray(t,Se);// position can change if we call pack again in saveShared, so we get the buffer now
}finally{if(s)if(10>b&&b++,1e4<y)s.transitions=null,b=0,y=0,0<h.length&&(h=[]);else if(0<h.length&&!u){for(let e=0,t=h.length;e<t;e++)h[e][Ee]=void 0;h=[]}if(n&&o.saveShared){o.structures.length>d&&(o.structures=o.structures.slice(0,d));// we can't rely on start/end with REUSE_BUFFER_MODE since they will (probably) change when we save
let e=ve.subarray(t,Se);return!1===o.updateSharedData()?o.encode(i):e;// re-encode if it fails
}l&Ke&&(Se=t)}},this.findCommonStringsToPack=()=>(c=new Map,f||(f=Object.create(null)),e=>{let t=e&&e.threshold||4,s=this.pack?e.maxPrivatePackedValues||16:0;g||(g=this.sharedValues=[]);for(let[a,r]of c)r.count>t&&(f[a]=s++,g.push(a),n=!0);for(;this.saveShared&&!1===this.updateSharedData(););c=null});const m=s=>{Se>Re&&(ve=O(Se));var n,a=typeof s;if("string"==a){if(p){let t=p[s];if(0<=t)return void(16>t?ve[Se++]=t+224:(ve[Se++]=198,1&t?m(15-t>>1):m(t-16>>1)));/* } else if (packedStatus.serializationId != serializationId) {
packedStatus.serializationId = serializationId

@@ -70,11 +95,14 @@ packedStatus.count = 1

}
} // else any in-doc incrementation?*/if(l&&!a.pack){let a=l.get(c);a?a.count++:l.set(c,{count:1})}}let b,e=c.length;b=32>e?1:256>e?2:65536>e?3:5;let f=3*e;if(ta+f>sa&&(qa=x(ta+f)),64>e||!h){let a,f,g,h=ta+b;for(a=0;a<e;a++)f=c.charCodeAt(a),128>f?qa[h++]=f:2048>f?(qa[h++]=192|f>>6,qa[h++]=128|63&f):55296==(64512&f)&&56320==(64512&(g=c.charCodeAt(a+1)))?(f=65536+((1023&f)<<10)+(1023&g),a++,qa[h++]=240|f>>18,qa[h++]=128|63&f>>12,qa[h++]=128|63&f>>6,qa[h++]=128|63&f):(qa[h++]=224|f>>12,qa[h++]=128|63&f>>6,qa[h++]=128|63&f);d=h-ta-b}else d=h(c,ta+b,f);24>d?qa[ta++]=96|d:256>d?(2>b&&qa.copyWithin(ta+2,ta+1,ta+1+d),qa[ta++]=120,qa[ta++]=d):65536>d?(3>b&&qa.copyWithin(ta+3,ta+2,ta+2+d),qa[ta++]=121,qa[ta++]=d>>8,qa[ta++]=255&d):(5>b&&qa.copyWithin(ta+5,ta+3,ta+3+d),qa[ta++]=122,ra.setUint32(ta,d),ta+=4),ta+=d}else if("number"===e){if(c>>>0===c)24>c?qa[ta++]=c:256>c?(qa[ta++]=24,qa[ta++]=c):65536>c?(qa[ta++]=25,qa[ta++]=c>>8,qa[ta++]=255&c):(qa[ta++]=26,ra.setUint32(ta,c),ta+=4);else if(c>>0===c)-24<=c?qa[ta++]=31-c:-256<=c?(qa[ta++]=56,qa[ta++]=~c):-65536<=c?(qa[ta++]=57,ra.setUint16(ta,~c),ta+=2):(qa[ta++]=58,ra.setUint32(ta,~c),ta+=4);else{let a;if(0<(a=this.useFloat32)&&4294967296>c&&-2147483648<=c){qa[ta++]=250,ra.setFloat32(ta,c);let b;if(4>a||// this checks for rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved
(b=c*aa[(127&qa[ta])<<1|qa[ta+1]>>7])>>0===b)return void(ta+=4);// move back into position for writing a double
ta--}qa[ta++]=251,ra.setFloat64(ta,c),ta+=8}}else if("object"===e){if(!c)qa[ta++]=246;else{if(f){let a=f.get(c);if(a){if(!a.id){let b=f.idsToInsert||(f.idsToInsert=[]);a.id=b.push(a)}return qa[ta++]=217,qa[ta++]=156,qa[ta++]=74,qa[ta++]=26,ra.setUint32(ta,a.id),void(ta+=4)}f.set(c,{offset:ta-b})}let a=c.constructor;if(a===Object)w(c,!0);else if(a===Array){d=c.length,24>d?qa[ta++]=128|d:q(d);for(let a=0;a<d;a++)v(c[a])}else if(a===Map){(this.mapsAsObjects?!1!==this.useTag259ForMaps:this.useTag259ForMaps)&&(qa[ta++]=217,qa[ta++]=1,qa[ta++]=3),d=c.size,24>d?qa[ta++]=160|d:256>d?(qa[ta++]=184,qa[ta++]=d):65536>d?(qa[ta++]=185,qa[ta++]=d>>8,qa[ta++]=255&d):(qa[ta++]=186,ra.setUint32(ta,d),ta+=4);for(let[a,b]of c)v(a),v(b)}else{for(let a,b=0,d=ia.length;b<d;b++)if(a=ja[b],c instanceof a){let a=ia[b],d=a.tag;return 24>d?qa[ta++]=192|d:256>d?(qa[ta++]=216,qa[ta++]=d):65536>d?(qa[ta++]=217,qa[ta++]=d>>8,qa[ta++]=255&d):-1<d&&(qa[ta++]=218,ra.setUint32(ta,d),ta+=4),void a.encode.call(this,c,v,x)}if(c[Symbol.iterator]){qa[ta++]=159;// indefinite length array
for(let a of c)v(a);// stop-code
return void(qa[ta++]=255)}// no extension found, write as object
w(c,!c.hasOwnProperty)}}}else if("boolean"===e)qa[ta++]=c?245:244;else if("bigint"===e){if(c<BigInt(1)<<BigInt(64)&&0<=c)qa[ta++]=27,ra.setBigUint64(ta,c);else if(c>-(BigInt(1)<<BigInt(64))&&0>c)qa[ta++]=59,ra.setBigUint64(ta,-c-BigInt(1));else// overflow
if(this.largeBigIntToFloat)qa[ta++]=251,ra.setFloat64(ta,+c);else throw new RangeError(c+" was too large to fit in CBOR 64-bit integer format, set largeBigIntToFloat to convert to float-64");ta+=8}else if("undefined"===e)qa[ta++]=247;else throw new Error("Unknown type: "+e)},w=!1===this.useRecords?this.variableMapSize?a=>{// this method is slightly slower, but generates "preferred serialization" (optimally small for smaller objects)
let b=Object.keys(a),c=b.length;24>c?qa[ta++]=160|c:256>c?(qa[ta++]=184,qa[ta++]=c):65536>c?(qa[ta++]=185,qa[ta++]=c>>8,qa[ta++]=255&c):(qa[ta++]=186,ra.setUint32(ta,c),ta+=4);let d;for(let e=0;e<c;e++)v(d=b[e]),v(a[d])}:(a,c)=>{qa[ta++]=185;// always use map 16, so we can preallocate and set the length afterwards
let d=ta-b;ta+=2;let e=0;for(let b in a)(c||a.hasOwnProperty(b))&&(v(b),v(a[b]),e++);qa[d++ +b]=e>>8,qa[d+b]=255&e}:/* sharedStructures ? // For highly stable structures, using for-in can a little bit faster
} // else any in-doc incrementation?*/if(c&&!e.pack){let e=c.get(s);e?e.count++:c.set(s,{count:1})}}let a=s.length;if(Ve&&4<=a&&1024>a){if((Ve.size+=a)>61440){let e,s=(Ve[0]?3*Ve[0].length+Ve[1].length:0)+10;Se+s>Re&&(ve=O(Se+s)),ve[Se++]=217,ve[Se++]=223,ve[Se++]=249,ve[Se++]=Ve.position?132:130,ve[Se++]=26,e=Se-t,Se+=4,Ve.position&&A(t,m),Ve=["",""],Ve.size=0,Ve.position=e}let e=/[\u0080-\uFFFF]/.test(s);return Ve[e?0:1]+=s,ve[Se++]=e?206:207,void m(a)}let r=32>a?1:256>a?2:65536>a?3:5;// first we estimate the header size, so we can write to the correct location
let o=3*a;if(Se+o>Re&&(ve=O(Se+o)),64>a||!i){let e,t,i,o=Se+r;for(e=0;e<a;e++)t=s.charCodeAt(e),128>t?ve[o++]=t:2048>t?(ve[o++]=192|t>>6,ve[o++]=128|63&t):55296==(64512&t)&&56320==(64512&(i=s.charCodeAt(e+1)))?(t=65536+((1023&t)<<10)+(1023&i),e++,ve[o++]=240|t>>18,ve[o++]=128|63&t>>12,ve[o++]=128|63&t>>6,ve[o++]=128|63&t):(ve[o++]=224|t>>12,ve[o++]=128|63&t>>6,ve[o++]=128|63&t);n=o-Se-r}else n=i(s,Se+r,o);24>n?ve[Se++]=96|n:256>n?(2>r&&ve.copyWithin(Se+2,Se+1,Se+1+n),ve[Se++]=120,ve[Se++]=n):65536>n?(3>r&&ve.copyWithin(Se+3,Se+2,Se+2+n),ve[Se++]=121,ve[Se++]=n>>8,ve[Se++]=255&n):(5>r&&ve.copyWithin(Se+5,Se+3,Se+3+n),ve[Se++]=122,Ae.setUint32(Se,n),Se+=4),Se+=n}else if("number"===a){if(s>>>0===s)24>s?ve[Se++]=s:256>s?(ve[Se++]=24,ve[Se++]=s):65536>s?(ve[Se++]=25,ve[Se++]=s>>8,ve[Se++]=255&s):(ve[Se++]=26,Ae.setUint32(Se,s),Se+=4);else if(s>>0===s)-24<=s?ve[Se++]=31-s:-256<=s?(ve[Se++]=56,ve[Se++]=~s):-65536<=s?(ve[Se++]=57,Ae.setUint16(Se,~s),Se+=2):(ve[Se++]=58,Ae.setUint32(Se,~s),Se+=4);else{let e;if(0<(e=this.useFloat32)&&4294967296>s&&-2147483648<=s){ve[Se++]=250,Ae.setFloat32(Se,s);let t;if(4>e||// this checks for rounding of numbers that were encoded in 32-bit float to nearest significant decimal digit that could be preserved
(t=s*ue[(127&ve[Se])<<1|ve[Se+1]>>7])>>0===t)return void(Se+=4);// move back into position for writing a double
Se--}ve[Se++]=251,Ae.setFloat64(Se,s),Se+=8}}else if("object"===a){if(!s)ve[Se++]=246;else{if(r){let e=r.get(s);if(e){// 16-bit uint
if(ve[Se++]=216,ve[Se++]=29,ve[Se++]=25,!e.references){let t=r.idsToInsert||(r.idsToInsert=[]);e.references=[],t.push(e)}// TODO: also support 32-bit
return e.references.push(Se-t),void(Se+=2)}r.set(s,{offset:Se-t})}let e=s.constructor;if(e===Object)k(s,!0);else if(e===Array){n=s.length,24>n?ve[Se++]=128|n:M(n);for(let e=0;e<n;e++)m(s[e])}else if(e!==Map){for(let e,t=0,n=be.length;t<n;t++)if(e=me[t],s instanceof e){let e=be[t],n=e.tag||e.getTag&&e.getTag(s);return 24>n?ve[Se++]=192|n:256>n?(ve[Se++]=216,ve[Se++]=n):65536>n?(ve[Se++]=217,ve[Se++]=n>>8,ve[Se++]=255&n):-1<n&&(ve[Se++]=218,Ae.setUint32(Se,n),Se+=4),void e.encode.call(this,s,m,O)}if(s[Symbol.iterator]){ve[Se++]=159;// indefinite length array
for(let e of s)m(e);// stop-code
return void(ve[Se++]=255)}// no extension found, write as object
k(s,!s.hasOwnProperty)}else if((this.mapsAsObjects?!1!==this.useTag259ForMaps:this.useTag259ForMaps)&&(ve[Se++]=217,ve[Se++]=1,ve[Se++]=3),n=s.size,24>n?ve[Se++]=160|n:256>n?(ve[Se++]=184,ve[Se++]=n):65536>n?(ve[Se++]=185,ve[Se++]=n>>8,ve[Se++]=255&n):(ve[Se++]=186,Ae.setUint32(Se,n),Se+=4),o.keyMap)for(let[e,t]of s)m(o.encodeKey(e)),m(t);else for(let[e,t]of s)m(e),m(t)}}else if("boolean"===a)ve[Se++]=s?245:244;else if("bigint"===a){if(s<BigInt(1)<<BigInt(64)&&0<=s)ve[Se++]=27,Ae.setBigUint64(Se,s);else if(s>-(BigInt(1)<<BigInt(64))&&0>s)ve[Se++]=59,Ae.setBigUint64(Se,-s-BigInt(1));else// overflow
if(this.largeBigIntToFloat)ve[Se++]=251,Ae.setFloat64(Se,+s);else throw new RangeError(s+" was too large to fit in CBOR 64-bit integer format, set largeBigIntToFloat to convert to float-64");Se+=8}else if("undefined"===a)ve[Se++]=247;else throw new Error("Unknown type: "+a)},k=!1===this.useRecords?this.variableMapSize?e=>{// this method is slightly slower, but generates "preferred serialization" (optimally small for smaller objects)
let t=Object.keys(e),s=Object.values(e),n=t.length;if(24>n?ve[Se++]=160|n:256>n?(ve[Se++]=184,ve[Se++]=n):65536>n?(ve[Se++]=185,ve[Se++]=n>>8,ve[Se++]=255&n):(ve[Se++]=186,Ae.setUint32(Se,n),Se+=4),o.keyMap)for(let e=0;e<n;e++)m(encodeKey(t[e])),m(s[e]);else for(let e=0;e<n;e++)m(t[e]),m(s[e])}:(e,s)=>{ve[Se++]=185;// always use map 16, so we can preallocate and set the length afterwards
let n=Se-t;Se+=2;let a=0;if(o.keyMap)for(let t in e)(s||e.hasOwnProperty(t))&&(m(o.encodeKey(t)),m(e[t]),a++);else for(let t in e)(s||e.hasOwnProperty(t))&&(m(t),m(e[t]),a++);ve[n++ +t]=a>>8,ve[n+t]=255&a}:/* sharedStructures ? // For highly stable structures, using for-in can a little bit faster
(object, safePrototype) => {

@@ -118,14 +146,15 @@ let nextTransition, transition = structures.transitions || (structures.transitions = Object.create(null))

target[objectOffset + start] = id
}*/a=>{let b,f=Object.keys(a),g=e.transitions||(e.transitions=Object.create(null)),h=0,k=f.length;for(let c,d=0;d<k;d++)c=f[d],b=g[c],b||(b=g[c]=Object.create(null),h++),g=b;let l=g[ua];if(void 0!==l)// tag two byte
qa[ta++]=217,qa[ta++]=na,qa[ta++]=l;else if(l=e.nextId++,l||(l=0,e.nextId=1),l>=oa&&(e.nextId=(l=j)+1),g[ua]=l,e[l]=f,c&&c.length<=j)// tag two byte
// tag number
qa[ta++]=217,qa[ta++]=na,qa[ta++]=l,d=!0;else{qa[ta++]=216,qa[ta++]=na,h&&(s+=t*h),p.length>=oa-j&&(p.shift()[ua]=void 0),p.push(g),q(k+2),v(f),qa[ta++]=25,qa[ta++]=na,qa[ta++]=l;// now write the values
for(let b=0;b<k;b++)v(a[f[b]]);return}24>k?qa[ta++]=128|k:q(k);for(let b=0;b<k;b++)v(a[f[b]])},x=a=>{var c=Math.min,d=Math.round,e=Math.max;let f;if(16777216<a){// special handling for really large buffers
if(a-b>pa)throw new Error("Encoded buffer would be larger than maximum buffer size");f=c(pa,4096*d(e((a-b)*(67108864<a?1.25:2),4194304)/4096))}else// faster handling for smaller buffers
f=(e(a-b<<2,qa.length-1)>>12)+1<<12;let g=new la(f);return ra=new DataView(g.buffer,0,f),qa.copy?qa.copy(g,0,b,a):g.set(qa.slice(b,a)),ta-=b,b=0,sa=g.length-10,qa=g}}useBuffer(a){// this means we are finished using our own buffer and we can write over it safely
qa=a,ra=new DataView(qa.buffer,qa.byteOffset,qa.byteLength),ta=0}}ja=[Date,Set,Error,RegExp,ArrayBuffer,ma,Uint8Array,Uint8ClampedArray,Uint16Array,Uint32Array,"undefined"==typeof BigUint64Array?function(){}:BigUint64Array,Int8Array,Int16Array,Int32Array,"undefined"==typeof BigInt64Array?function(){}:BigInt64Array,Float32Array,Float64Array],ia=[{tag:1,encode(a){let b=a.getTime()/1e3;(this.useTimestamp32||0===a.getMilliseconds())&&0<=b&&4294967296>b?(qa[ta++]=26,ra.setUint32(ta,b),ta+=4):(qa[ta++]=251,ra.setFloat64(ta,b),ta+=8)}},{tag:258,// https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md
encode(a,b){let c=Array.from(a);b(c)}},{tag:27,// http://cbor.schmorp.de/generic-object
encode(a,b){b([a.name,a.message])}},{tag:27,// http://cbor.schmorp.de/generic-object
encode(a,b){b(["RegExp",a.source,a.flags])}},{encode(a,b,c){t(a,c)}},{encode(a,b,c){t(a,c)}},s(64),s(68),s(69),s(70),s(71),s(72),s(77),s(78),s(79),s(81),s(82)];let wa=new va({useRecords:!1});const xa=wa.encode,{NEVER:ya,ALWAYS:za,DECIMAL_ROUND:Aa,DECIMAL_FIT:Ba}=ea,Ca=1e3;a.ALWAYS=za,a.DECIMAL_FIT=Ba,a.DECIMAL_ROUND=Aa,a.Decoder=R,a.Encoder=va,a.FLOAT32_OPTIONS=ea,a.NEVER=ya,a.REUSE_BUFFER_MODE=Ca,a.Tag=W,a.addExtension=v,a.clearSource=o,a.decode=ca,a.decodeIter=function(a,b={}){if(!a||"object"!=typeof a)throw new Error("first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a promise");const c=new R(b);let d;const e=a=>{let b;// if there's incomplete data from previous chunk, concatinate and try again
d&&(a=Buffer.concat([d,a]),d=void 0);try{b=c.decodeMultiple(a)}catch(c){if(c.incomplete)d=a.slice(c.lastPosition),b=c.values;else throw c}return b};if("function"==typeof a[Symbol.iterator])return function*(){for(const b of a)yield*e(b)}();return"function"==typeof a[Symbol.asyncIterator]?async function*(){for await(const b of a)yield*e(b)}():void 0},a.decodeMultiple=da,a.encode=xa,a.encodeIter=/**
}*/e=>{let t=Object.keys(e),r=Object.values(e);this.keyMap&&(t=t.map(e=>this.encodeKey(e)));let o,u=a.transitions||(a.transitions=Object.create(null)),c=0,l=t.length;//let parentRecordId
for(let s,n=0;n<l;n++)//if (!parentRecordId)
// parentRecordId = transition[RECORD_SYMBOL]
s=t[n],o=u[s],o||(o=u[s]=Object.create(null),c++),u=o;let p=u[Ee];if(void 0!==p)ve[Se++]=217,ve[Se++]=224|p>>8,ve[Se++]=255&p;else if(p=a.nextId++,p||(p=0,a.nextId=1),p>=Oe&&(a.nextId=(p=d)+1),u[Ee]=p,a[p]=t,s&&s.length<=d)ve[Se++]=217,ve[Se++]=224|p>>8,ve[Se++]=255&p,n=!0;else{Ae.setUint32(Se,3655335680),Se+=3,c&&(y+=b*c),h.length>=Oe-d&&(h.shift()[Ee]=void 0),h.push(u),M(l+2),m(57344+p),m(t);for(let t of Object.values(e))m(t);return}24>l?ve[Se++]=128|l:M(l);for(let t=0;t<l;t++)m(r[t])},O=e=>{var s=Math.min,n=Math.round,a=Math.max;let r;if(16777216<e){// special handling for really large buffers
if(e-t>Ue)throw new Error("Encoded buffer would be larger than maximum buffer size");r=s(Ue,4096*n(a((e-t)*(67108864<e?1.25:2),4194304)/4096))}else// faster handling for smaller buffers
r=(a(e-t<<2,ve.length-1)>>12)+1<<12;let i=new Me(r);return Ae=new DataView(i.buffer,0,r),ve.copy?ve.copy(i,0,t,e):i.set(ve.slice(t,e)),Se-=t,t=0,Re=i.length-10,ve=i}}useBuffer(e){// this means we are finished using our own buffer and we can write over it safely
ve=e,Ae=new DataView(ve.buffer,ve.byteOffset,ve.byteLength),Se=0}clearSharedData(){this.structures&&(this.structures=[]),this.sharedValues&&(this.sharedValues=void 0)}updateSharedData(){let e=this.sharedVersion||0;this.sharedVersion=e+1;let t=this.saveShared(new je(this.structures,this.sharedValues,this.sharedVersion),t=>(t&&t.version||0)==e);if(!1===t){// get updated structures and try again if the update failed
let e=this.getShared()||{};this.structures=e.structures||[],this.sharedValues=e.packedValues,this.sharedVersion=e.version}return t}}class je{constructor(e,t,s){this.structures=e,this.packedValues=t,this.version=s}}me=[Date,Set,Error,RegExp,ne,ArrayBuffer,Ie,Uint8Array,Uint8ClampedArray,Uint16Array,Uint32Array,"undefined"==typeof BigUint64Array?function(){}:BigUint64Array,Int8Array,Int16Array,Int32Array,"undefined"==typeof BigInt64Array?function(){}:BigInt64Array,Float32Array,Float64Array,je],be=[{tag:1,encode(e){let t=e.getTime()/1e3;(this.useTimestamp32||0===e.getMilliseconds())&&0<=t&&4294967296>t?(ve[Se++]=26,Ae.setUint32(Se,t),Se+=4):(ve[Se++]=251,Ae.setFloat64(Se,t),Se+=8)}},{tag:258,// https://github.com/input-output-hk/cbor-sets-spec/blob/master/CBOR_SETS.md
encode(e,t){let s=Array.from(e);t(s)}},{tag:27,// http://cbor.schmorp.de/generic-object
encode(e,t){t([e.name,e.message])}},{tag:27,// http://cbor.schmorp.de/generic-object
encode(e,t){t(["RegExp",e.source,e.flags])}},{getTag(e){return e.tag},encode(e,t){t(e.value)}},{encode(e,t,s){U(e,s)}},{encode(e,t,s){U(e,s)}},O(64),O(68),O(69),O(70),O(71),O(72),O(77),O(78),O(79),O(81),O(82),{encode(e,t){// write SharedData
let s=e.packedValues||[],n=e.structures||[];if(0<s.values.length){ve[Se++]=216,ve[Se++]=51,M(4);let e=s.values;t(e),M(0),M(0),packedObjectMap=Object.create(sharedPackedObjectMap||null);for(let t=0,s=e.length;t<s;t++)packedObjectMap[e[t]]=t}if(n){Ae.setUint32(Se,3655335424),Se+=3;let s=n.slice(0);s.unshift(57344),s.push(new ne(e.version,1399353956)),t(s)}else t(new ne(e.version,1399353956))}}];let xe=new _e({useRecords:!1});const Ce=xe.encode,{NEVER:Pe,ALWAYS:Le,DECIMAL_ROUND:Be,DECIMAL_FIT:Fe}=fe,De=512,Ke=1024;e.ALWAYS=Le,e.DECIMAL_FIT=Fe,e.DECIMAL_ROUND=Be,e.Decoder=X,e.Encoder=_e,e.FLOAT32_OPTIONS=fe,e.NEVER=Pe,e.REUSE_BUFFER_MODE=De,e.Tag=ne,e.addExtension=R,e.clearSource=m,e.decode=le,e.decodeIter=function(e,t={}){if(!e||"object"!=typeof e)throw new Error("first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a promise");const s=new X(t);let n;const a=e=>{let t;// if there's incomplete data from previous chunk, concatinate and try again
n&&(e=Buffer.concat([n,e]),n=void 0);try{t=s.decodeMultiple(e)}catch(s){if(s.incomplete)n=e.slice(s.lastPosition),t=s.values;else throw s}return t};if("function"==typeof e[Symbol.iterator])return function*(){for(const t of e)yield*a(t)}();return"function"==typeof e[Symbol.asyncIterator]?async function*(){for await(const t of e)yield*a(t)}():void 0},e.decodeMultiple=pe,e.encode=Ce,e.encodeIter=/**
* Given an Iterable first argument, returns an Iterable where each value is encoded as a Buffer

@@ -136,2 +165,2 @@ * If the argument is only Async Iterable, the return value will be an Async Iterable.

* @returns {IterableIterator|Promise.<AsyncIterableIterator>}
*/function(a,b={}){if(!a||"object"!=typeof a)throw new Error("first argument must be an Iterable, Async Iterable, or a Promise for an Async Iterable");else{if("function"==typeof a[Symbol.iterator])return w(a,b);if("function"==typeof a.then||"function"==typeof a[Symbol.asyncIterator])return x(a,b);throw new Error("first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a Promise")}},a.isNativeAccelerationEnabled=!1,a.mapsAsObjects=!0,a.roundFloat32=function(a){ga[0]=a;let b=aa[(127&ha[3])<<1|ha[2]>>7];return(b*a+(0<a?.5:-.5)>>0)/b},a.useRecords=!1,Object.defineProperty(a,"__esModule",{value:!0})});
*/function(e,t={}){if(!e||"object"!=typeof e)throw new Error("first argument must be an Iterable, Async Iterable, or a Promise for an Async Iterable");else{if("function"==typeof e[Symbol.iterator])return S(e,t);if("function"==typeof e.then||"function"==typeof e[Symbol.asyncIterator])return V(e,t);throw new Error("first argument must be an Iterable, Async Iterable, Iterator, Async Iterator, or a Promise")}},e.isNativeAccelerationEnabled=!1,e.roundFloat32=function(e){he[0]=e;let t=ue[(127&ye[3])<<1|ye[2]>>7];return(t*e+(0<e?.5:-.5)>>0)/t},Object.defineProperty(e,"__esModule",{value:!0})});

@@ -584,2 +584,14 @@ (function (CBOR, chai) {

const senmlData = [
{ bn: '/3303/0/5700', bt: 1278887, v: 35.5 },
{ t: 10, v: 34 },
{ t: 20, v: 33 },
{ t: 30, v: 32 },
{ t: 40, v: 31 },
{ t: 50, v: 30 }
];
const senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, n: 0, u: 1, v: 2, vs: 3, t: 6, ut: 7, vd: 8 };
//import inspector from 'inspector'; inspector.open(9229, null, true); debugger

@@ -612,2 +624,23 @@ function tryRequire(module) {

suite('CBOR basic tests', function(){
test('encode/decode with keyMaps (basic)', function() {
var data = senmlData;
let cborSenml = new Encoder({ useRecords: false, keyMap: senmlKeys });
let cborBasic = new Encoder();
var serialized = cborSenml.encode(data);
var deserialized = cborSenml.decode(serialized);
assert(serialized.length < cborBasic.encode(data).length);
assert.deepEqual(deserialized, data);
});
test('encode/decode with keyMaps and Records)', function() {
var data = senmlData;
let cborSenml = new Encoder({ useRecords: true, keyMap: senmlKeys });
let cborBasic = new Encoder();
var serialized = cborSenml.encode(data);
var deserialized = cborSenml.decode(serialized);
assert(serialized.length < cborBasic.encode(data).length);
assert.deepEqual(deserialized, data);
});
test('encode/decode data', function(){

@@ -682,5 +715,6 @@ var data = {

var data = sampleData;
let structures = [];
let encoder = new Encoder({ structures, useRecords: true });
let sharedSerialized;
let encoder = new Encoder({ getStructures() { return }, saveStructures(shared) { sharedSerialized = encode(shared); }, useRecords: true });
var serialized = encoder.encode(data);
encoder = new Encoder({ getStructures() { return decode(sharedSerialized) }, saveStructures(shared) { sharedSerialized = encode(shared); }, useRecords: true });
var deserialized = encoder.decode(serialized);

@@ -698,4 +732,3 @@ assert.deepEqual(deserialized, data);

var data = sampleData;
let structures = [];
let encoder = new Encoder({ structures, pack: true, useRecords: true });
let encoder = new Encoder({ useStringRefs: true });
var serialized = encoder.encode(data);

@@ -739,2 +772,9 @@ var deserialized = encoder.decode(serialized);

});
test('pack/unpack sample data with bundled strings', function(){
var data = sampleData;
let encoder = new Encoder({ /*structures,*/ useRecords: false, bundleStrings: true });
var serialized = encoder.encode(data);
var deserialized = encoder.decode(serialized);
assert.deepEqual(deserialized, data);
});
if (typeof Buffer != 'undefined')

@@ -939,2 +979,3 @@ test('replace data', function(){

ancient: new Date(-3532219539133),
invalidDate: new Date('invalid')
};

@@ -949,2 +990,3 @@ let encoder = new Encoder();

assert.equal(deserialized.ancient.getTime(), -3532219539133);
assert.equal(deserialized.invalidDate.toString(), 'Invalid Date');
});

@@ -958,2 +1000,3 @@ test('map/date with options', function(){

date: new Date(1532219539011),
invalidDate: new Date('invalid')
};

@@ -970,2 +1013,3 @@ let encoder = new Encoder({

assert.equal(deserialized.date.getTime(), 1532219539000);
assert.isTrue(isNaN(deserialized.invalidDate.getTime()));
});

@@ -1020,3 +1064,3 @@ test('key caching', function() {

var serialized = encoder.encode(data);
assert.equal(serialized.length, 35);
assert.equal(serialized.length, 36);
var deserialized = encoder.decode(serialized);

@@ -1156,3 +1200,3 @@ assert.deepEqual(deserialized, data);

console.log('CBOR size', serialized.length);
let encoder = new Encoder({ structures });
let encoder = new Encoder({ structures, bundleStrings: true });
var serialized = encoder.encode(data);

@@ -1168,3 +1212,3 @@ console.log('CBOR w/ record ext size', serialized.length);

let structures = [];
let encoder = new Encoder({ structures });
let encoder = new Encoder({ structures, bundleStrings: true });
let buffer = typeof Buffer != 'undefined' ? Buffer.alloc(0x10000) : new Uint8Array(0x10000);

@@ -1171,0 +1215,0 @@

import { Decoder } from './decode'
export { addExtension, FLOAT32_OPTIONS } from './unpack'
export { addExtension, FLOAT32_OPTIONS } from './decode'
export class Encoder extends Decoder {

@@ -4,0 +4,0 @@ encode(value: any): Buffer

@@ -10,3 +10,3 @@ import { Decoder, mult10, Tag, typedArrays, addExtension as decodeAddExtension } from './decode.js'

const ByteArray = hasNodeBuffer ? Buffer : Uint8Array
const RECORD_STARTING_ID_PREFIX = 0x69 // tag 105/0x69
const RECORD_INLINE_ID = 0xdfff // temporary first-come first-serve tag // proposed tag: 0x7265 // 're'
const MAX_STRUCTURES = 0x100

@@ -19,2 +19,5 @@ const MAX_BUFFER_SIZE = hasNodeBuffer ? 0x100000000 : 0x7fd00000

let safeEnd
let bundledStrings = null
const MAX_BUNDLE_SIZE = 0xf000
const hasNonLatin = /[\u0080-\uFFFF]/
const RECORD_SYMBOL = Symbol('record-id')

@@ -32,3 +35,2 @@ export class Encoder extends Decoder {

options = options || {}
let lastSharedStructuresLength = 0
let encodeUtf8 = ByteArray.prototype.utf8Write ? function(string, position, maxBytes) {

@@ -42,3 +44,3 @@ return target.utf8Write(string, position, maxBytes)

let encoder = this
let maxSharedStructures = 64
let maxSharedStructures = options.maxSharedStructures || 128
let isSequential = options.sequential

@@ -49,2 +51,4 @@ if (isSequential) {

}
if (this.saveStructures)
this.saveShared = this.saveStructures
let samplingPackedValues, packedObjectMap, sharedValues = options.sharedValues

@@ -57,3 +61,2 @@ let sharedPackedObjectMap

}
}

@@ -63,4 +66,21 @@ let recordIdsToRemove = []

let serializationsSinceTransitionRebuild = 0
this.encode = function(value, encodeOptions) {
this.mapEncode = function(value, encodeOptions) {
// Experimental support for premapping keys using _keyMap instad of keyMap - not optiimised yet)
if (this._keyMap && !this._mapped) {
//console.log('encoding ', value)
switch (value.constructor.name) {
case 'Array':
value = value.map(r => this.encodeKeys(r))
break
//case 'Map':
// value = this.encodeKeys(value)
// break
}
//this._mapped = true
}
return this.encode(value, encodeOptions)
}
this.encode = function(value, encodeOptions) {
if (!target) {

@@ -82,6 +102,21 @@ target = new ByteArrayAllocate(8192)

referenceMap = encoder.structuredClone ? new Map() : null
if (encoder.bundleStrings && typeof value !== 'string') {
bundledStrings = []
bundledStrings.size = Infinity // force a new bundle start on first string
} else
bundledStrings = null
sharedStructures = encoder.structures
if (sharedStructures) {
if (sharedStructures.uninitialized)
encoder.structures = sharedStructures = encoder.getStructures()
if (sharedStructures.uninitialized) {
let sharedData = encoder.getShared() || {}
encoder.structures = sharedStructures = sharedData.structures || []
encoder.sharedVersion = sharedData.version
let sharedValues = encoder.sharedValues = sharedData.packedValues
if (sharedValues) {
sharedPackedObjectMap = {}
for (let i = 0, l = sharedValues.length; i < l; i++)
sharedPackedObjectMap[sharedValues[i]] = i
}
}
let sharedStructuresLength = sharedStructures.length

@@ -95,2 +130,3 @@ if (sharedStructuresLength > maxSharedStructures && !isSequential)

let keys = sharedStructures[i]
//console.log('shared struct keys:', keys)
if (!keys)

@@ -109,3 +145,2 @@ continue

}
lastSharedStructuresLength = sharedStructures.length
}

@@ -143,5 +178,8 @@ if (!isSequential)

encode(value)
if (bundledStrings) {
writeBundles(start, encode)
}
encoder.offset = position // update the offset so next serialization doesn't write over our buffer, but can continue writing to same buffer sequentially
if (referenceMap && referenceMap.idsToInsert) {
position += referenceMap.idsToInsert.length * 8
position += referenceMap.idsToInsert.length * 2
if (position > safeEnd)

@@ -154,3 +192,3 @@ makeRoom(position)

}
if (encodeOptions === REUSE_BUFFER_MODE) {
if (encodeOptions & REUSE_BUFFER_MODE) {
target.start = start

@@ -160,3 +198,3 @@ target.end = position

}
return target.subarray(start, position) // position can change if we call pack again in saveStructures, so we get the buffer now
return target.subarray(start, position) // position can change if we call pack again in saveShared, so we get the buffer now
} finally {

@@ -179,22 +217,15 @@ if (sharedStructures) {

}
if (hasSharedUpdate && encoder.saveStructures) {
if (encoder.structures.length > maxSharedStructures) {
encoder.structures = encoder.structures.slice(0, maxSharedStructures)
}
// we can't rely on start/end with REUSE_BUFFER_MODE since they will (probably) change when we save
let returnBuffer = target.subarray(start, position)
let shared = encoder.structures || []
if (sharedValues) {
shared = shared.concat(sharedValues)
}
if (encoder.saveStructures(encoder.structures, lastSharedStructuresLength) === false) {
// get updated structures and try again if the update failed
encoder.structures = encoder.getStructures() || []
return encoder.encode(value)
}
lastSharedStructuresLength = shared.length
return returnBuffer
}
if (hasSharedUpdate && encoder.saveShared) {
if (encoder.structures.length > maxSharedStructures) {
encoder.structures = encoder.structures.slice(0, maxSharedStructures)
}
// we can't rely on start/end with REUSE_BUFFER_MODE since they will (probably) change when we save
let returnBuffer = target.subarray(start, position)
if (encoder.updateSharedData() === false)
return encoder.encode(value) // re-encode if it fails
return returnBuffer
}
if (encodeOptions & RESET_BUFFER_MODE)
position = start
}

@@ -206,4 +237,4 @@ }

sharedPackedObjectMap = Object.create(null)
return ({ threshold }) => {
threshold = threshold || 4
return (options) => {
let threshold = options && options.threshold || 4
let position = this.pack ? options.maxPrivatePackedValues || 16 : 0

@@ -219,2 +250,3 @@ if (!sharedValues)

}
while (this.saveShared && this.updateSharedData() === false) {}
samplingPackedValues = null

@@ -268,2 +300,29 @@ }

let strLength = value.length
if (bundledStrings && strLength >= 4 && strLength < 0x400) {
if ((bundledStrings.size += strLength) > MAX_BUNDLE_SIZE) {
let extStart
let maxBytes = (bundledStrings[0] ? bundledStrings[0].length * 3 + bundledStrings[1].length : 0) + 10
if (position + maxBytes > safeEnd)
target = makeRoom(position + maxBytes)
target[position++] = 0xd9 // tag 16-bit
target[position++] = 0xdf // tag 0xdff9
target[position++] = 0xf9
// TODO: If we only have one bundle with any string data, only write one string bundle
target[position++] = bundledStrings.position ? 0x84 : 0x82 // array of 4 or 2 elements depending on if we write bundles
target[position++] = 0x1a // 32-bit unsigned int
extStart = position - start
position += 4 // reserve for writing bundle reference
if (bundledStrings.position) {
writeBundles(start, encode) // write the last bundles
}
bundledStrings = ['', ''] // create new ones
bundledStrings.size = 0
bundledStrings.position = extStart
}
let twoByte = hasNonLatin.test(value)
bundledStrings[twoByte ? 0 : 1] += value
target[position++] = twoByte ? 0xce : 0xcf
encode(strLength);
return
}
let headerSize

@@ -395,12 +454,12 @@ // first we estimate the header size, so we can write to the correct location

if (referee) {
if (!referee.id) {
target[position++] = 0xd8
target[position++] = 29 // http://cbor.schmorp.de/value-sharing
target[position++] = 0x19 // 16-bit uint
if (!referee.references) {
let idsToInsert = referenceMap.idsToInsert || (referenceMap.idsToInsert = [])
referee.id = idsToInsert.push(referee)
referee.references = []
idsToInsert.push(referee)
}
target[position++] = 0xd9
target[position++] = 40010 >> 8
target[position++] = 40010 & 0xff
target[position++] = 0x1a // uint32
targetView.setUint32(position, referee.id)
position += 4
referee.references.push(position - start)
position += 2 // TODO: also support 32-bit
return

@@ -445,7 +504,14 @@ } else

}
for (let [ key, entryValue ] of value) {
encode(key)
encode(entryValue)
if (encoder.keyMap) {
for (let [ key, entryValue ] of value) {
encode(encoder.encodeKey(key))
encode(entryValue)
}
} else {
for (let [ key, entryValue ] of value) {
encode(key)
encode(entryValue)
}
}
} else {
} else {
for (let i = 0, l = extensions.length; i < l; i++) {

@@ -455,3 +521,3 @@ let extensionClass = extensionClasses[i]

let extension = extensions[i]
let tag = extension.tag
let tag = extension.tag || extension.getTag && extension.getTag(value)
if (tag < 0x18) {

@@ -518,2 +584,3 @@ target[position++] = 0xc0 | tag

let keys = Object.keys(object)
let vals = Object.values(object)
let length = keys.length

@@ -535,5 +602,12 @@ if (length < 0x18) {

let key
for (let i = 0; i < length; i++) {
encode(key = keys[i])
encode(object[key])
if (encoder.keyMap) {
for (let i = 0; i < length; i++) {
encode(encodeKey(keys[i]))
encode(vals[i])
}
} else {
for (let i = 0; i < length; i++) {
encode(keys[i])
encode(vals[i])
}
}

@@ -546,8 +620,14 @@ } :

let size = 0
for (let key in object) {
if (safePrototype || object.hasOwnProperty(key)) {
encode(key)
if (encoder.keyMap) {
for (let key in object) if (safePrototype || object.hasOwnProperty(key)) {
encode(encoder.encodeKey(key))
encode(object[key])
size++
}
} else {
for (let key in object) if (safePrototype || object.hasOwnProperty(key)) {
encode(key)
encode(object[key])
size++
}
}

@@ -600,7 +680,12 @@ target[objectOffset++ + start] = size >> 8

let keys = Object.keys(object)
let vals = Object.values(object)
if (this.keyMap) keys = keys.map(k => this.encodeKey(k))
let nextTransition, transition = structures.transitions || (structures.transitions = Object.create(null))
let newTransitions = 0
let length = keys.length
//let parentRecordId
for (let i = 0; i < length; i++) {
//if (!parentRecordId)
let key = keys[i]
// parentRecordId = transition[RECORD_SYMBOL]
nextTransition = transition[key]

@@ -615,5 +700,5 @@ if (!nextTransition) {

if (recordId !== undefined) {
target[position++] = 0xd9 // tag two byte
target[position++] = RECORD_STARTING_ID_PREFIX
target[position++] = recordId
target[position++] = 0xd9
target[position++] = (recordId >> 8) | 0xe0
target[position++] = recordId & 0xff
} else {

@@ -631,9 +716,9 @@ recordId = structures.nextId++

if (sharedStructures && sharedStructures.length <= maxSharedStructures) {
target[position++] = 0xd9 // tag two byte
target[position++] = RECORD_STARTING_ID_PREFIX
target[position++] = recordId // tag number
target[position++] = 0xd9
target[position++] = (recordId >> 8) | 0xe0
target[position++] = recordId & 0xff
hasSharedUpdate = true
} else {
target[position++] = 0xd8
target[position++] = RECORD_STARTING_ID_PREFIX
targetView.setUint32(position, 0xd9dfff00) // tag two byte, then record definition id
position += 3
if (newTransitions)

@@ -646,9 +731,5 @@ transitionsCount += serializationsSinceTransitionRebuild * newTransitions

writeArrayHeader(length + 2)
encode(0xe000 + recordId)
encode(keys)
target[position++] = 0x19 // uint16
target[position++] = RECORD_STARTING_ID_PREFIX
target[position++] = recordId
// now write the values
for (let i =0; i < length; i++)
encode(object[keys[i]])
for (let v of Object.values(object)) encode(v)
return

@@ -662,4 +743,3 @@ }

}
for (let i =0; i < length; i++)
encode(object[keys[i]])
for (let i =0; i < length; i++) encode(vals[i])
}

@@ -694,7 +774,28 @@ const makeRoom = (end) => {

}
clearSharedData() {
if (this.structures)
this.structures = []
if (this.sharedValues)
this.sharedValues = undefined
}
updateSharedData() {
let lastVersion = this.sharedVersion || 0
this.sharedVersion = lastVersion + 1
let saveResults = this.saveShared(new SharedData(this.structures, this.sharedValues, this.sharedVersion),
existingShared => (existingShared && existingShared.version || 0) == lastVersion)
if (saveResults === false) {
// get updated structures and try again if the update failed
let sharedData = this.getShared() || {}
this.structures = sharedData.structures || []
this.sharedValues = sharedData.packedValues
this.sharedVersion = sharedData.version
}
return saveResults
}
}
function copyBinary(source, target, targetOffset, offset, endOffset) {
while (offset < endOffset) {
target[targetOffset++] = source[offset++]
class SharedData {
constructor(structures, values, version) {
this.structures = structures
this.packedValues = values
this.version = version
}

@@ -770,7 +871,7 @@ }

extensionClasses = [ Date, Set, Error, RegExp, ArrayBuffer, ByteArray,
extensionClasses = [ Date, Set, Error, RegExp, Tag, ArrayBuffer, ByteArray,
Uint8Array, Uint8ClampedArray, Uint16Array, Uint32Array,
typeof BigUint64Array == 'undefined' ? function() {} : BigUint64Array, Int8Array, Int16Array, Int32Array,
typeof BigInt64Array == 'undefined' ? function() {} : BigInt64Array,
Float32Array, Float64Array]
Float32Array, Float64Array, SharedData]

@@ -811,2 +912,9 @@ //Object.getPrototypeOf(Uint8Array.prototype).constructor /*TypedArray*/

}, {
getTag(tag) {
return tag.tag
},
encode(tag, encode) {
encode(tag.value)
}
}, {
encode(arrayBuffer, encode, makeRoom) {

@@ -829,3 +937,31 @@ writeBuffer(arrayBuffer, makeRoom)

typedArrayEncoder(81),
typedArrayEncoder(82)]
typedArrayEncoder(82),
{
encode(sharedData, encode) { // write SharedData
let packedValues = sharedData.packedValues || []
let sharedStructures = sharedData.structures || []
if (packedValues.values.length > 0) {
target[position++] = 0xd8 // one-byte tag
target[position++] = 51 // tag 51 for packed shared structures https://www.potaroo.net/ietf/ids/draft-ietf-cbor-packed-03.txt
writeArrayHeader(4)
let valuesArray = packedValues.values
encode(valuesArray)
writeArrayHeader(0) // prefixes
writeArrayHeader(0) // suffixes
packedObjectMap = Object.create(sharedPackedObjectMap || null)
for (let i = 0, l = valuesArray.length; i < l; i++) {
packedObjectMap[valuesArray[i]] = i
}
}
if (sharedStructures) {
targetView.setUint32(position, 0xd9dffe00)
position += 3
let definitions = sharedStructures.slice(0)
definitions.unshift(0xe000)
definitions.push(new Tag(sharedData.version, 0x53687264))
encode(definitions)
} else
encode(new Tag(sharedData.version, 0x53687264))
}
}]

@@ -870,19 +1006,20 @@ function typedArrayEncoder(tag) {

let nextId
let distanceToMove = idsToInsert.length * 8
let distanceToMove = idsToInsert.length * 2
let lastEnd = serialized.length - distanceToMove
idsToInsert.sort((a, b) => a.offset > b.offset ? 1 : -1)
for (let id = 0; id < idsToInsert.length; id++) {
let referee = idsToInsert[id]
referee.id = id
for (let position of referee.references) {
serialized[position++] = id >> 8
serialized[position] = id & 0xff
}
}
while (nextId = idsToInsert.pop()) {
let offset = nextId.offset
let id = nextId.id
serialized.copyWithin(offset + distanceToMove, offset, lastEnd)
distanceToMove -= 8
distanceToMove -= 2
let position = offset + distanceToMove
serialized[position++] = 0xd9
serialized[position++] = 40009 >> 8
serialized[position++] = 40009 & 0xff
serialized[position++] = 0x1a // uint32
serialized[position++] = id >> 24
serialized[position++] = (id >> 16) & 0xff
serialized[position++] = (id >> 8) & 0xff
serialized[position++] = id & 0xff
serialized[position++] = 0xd8
serialized[position++] = 28 // http://cbor.schmorp.de/value-sharing
lastEnd = offset

@@ -892,2 +1029,9 @@ }

}
function writeBundles(start, encode) {
targetView.setUint32(bundledStrings.position + start, position - bundledStrings.position - start + 1) // the offset to bundle
let writeStrings = bundledStrings
bundledStrings = null
encode(writeStrings[0])
encode(writeStrings[1])
}

@@ -908,2 +1052,4 @@ export function addExtension(extension) {

export const { NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT } = FLOAT32_OPTIONS
export const REUSE_BUFFER_MODE = 1000
export const REUSE_BUFFER_MODE = 512
export const RESET_BUFFER_MODE = 1024
export { Encoder, addExtension, encode, NEVER, ALWAYS, DECIMAL_ROUND, DECIMAL_FIT, REUSE_BUFFER_MODE } from './encode.js'
export { Tag, Decoder, decodeMultiple, decode, FLOAT32_OPTIONS, clearSource, roundFloat32, isNativeAccelerationEnabled } from './decode.js'
export { decodeIter, encodeIter } from './iterators.js'
export const useRecords = false
export const mapsAsObjects = true
{
"name": "cbor-x",
"author": "Kris Zyp",
"version": "0.9.4",
"description": "Ultra-fast CBOR implementation with tag extensions for records and structured cloning",
"version": "1.0.0",
"description": "Ultra-fast and conformant CBOR (RFC 8949) implementation with support for numerous tag extensions including records and structured cloning",
"license": "MIT",

@@ -55,6 +55,13 @@ "types": "./index.d.ts",

},
"files": [
"/dist",
"*.md",
"/*.js",
"/*.ts"
],
"optionalDependencies": {
"cbor-extract": "^0.3.2"
"cbor-extract": "^1.0.0"
},
"devDependencies": {
"@rollup/plugin-json": "^4.1.0",
"@types/node": "latest",

@@ -68,5 +75,7 @@ "async": "^3",

"rollup": "^1.20.3",
"@rollup/plugin-json": "^4.1.0",
"rollup-plugin-babel-minify": "^9.0.0"
},
"dependencies": {
"esbuild": "^0.13.15"
}
}

@@ -10,8 +10,10 @@ # cbor-x

The cbor-x package is an extremely fast CBOR NodeJS/JavaScript implementation. Currently, it is significantly faster than any other known implementations, faster than Avro (for JS), and generally faster than native V8 JSON.stringify/parse, on NodeJS. It implements the CBOR format as specificed in [RFC-8949](https://www.rfc-editor.org/rfc/rfc8949.html), numerous [registered IANA tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml) (the `x` in cbor-x), [RFC-8746](https://tools.ietf.org/html/rfc8746) and proposed optional [record extension](https://github.com/kriszyp/cbor-records), for defining record structures that makes CBOR even faster and more compact, often over twice as fast as even native JSON functions, several times faster than other JS implementations, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is supported through these tag extensions.
The cbor-x package is an extremely fast and conformant CBOR NodeJS/JavaScript implementation. Currently, it is over 3-10x faster than any other CBOR JS implementation (including cbor-js and cborg) and faster than most MessagePack encoders, Avro, and generally faster than native V8 JSON.stringify/parse, on NodeJS. It implements the CBOR format as specificed in [RFC-8949](https://www.rfc-editor.org/rfc/rfc8949.html), [RFC-8746](https://tools.ietf.org/html/rfc8746), [RFC-8742](https://datatracker.ietf.org/doc/html/rfc8742), [Packed CBOR](https://www.ietf.org/id/draft-ietf-cbor-packed-03.html), numerous [registered IANA tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml) (the `x` in cbor-x), and proposed optional [record extension](https://github.com/kriszyp/cbor-records), for defining record structures that makes CBOR even faster and more compact, often over twice as fast as even native JSON functions, and 15-50% more compact. See the performance section for more details. Structured cloning (with support for cyclical references) is supported through these tag extensions.
<img align="right" src="./assets/performance.png" width="380"/>
## Basic Usage
Install on NodeJS with:
Install with:
```

@@ -21,3 +23,3 @@ npm i cbor-x

And `import` or `require` it for basic standard serialization/encoding (`encode`) and deserialization/decoding (`decode`) functions:
```
```JavaScript
import { decode, encode } from 'cbor-x';

@@ -27,6 +29,6 @@ let serializedAsBuffer = encode(value);

```
This `encode` function will generate standard CBOR without any extensions that should be compatible with any standard CBOR parser/decoder. It will serialize JavaScript objects as CBOR `map`s by default. The `decode` function will deserialize CBOR `map`s as an `Object` with the properties from the map.
This `encode` function will generate standard CBOR without any extensions that should be compatible with any standard CBOR parser/decoder. It will serialize JavaScript objects as CBOR `map`s by default. The `decode` function will deserialize CBOR `map`s as an `Object` with the properties from the map. The cbor-x package runs on any modern JS platform, but does have additional optimizations for NodeJS usage (and will use a node addon for performance boost as an optional dependency).
## Node Usage
The cbor-x package runs on any modern JS platform, but is optimized for NodeJS usage (and will use a node addon for performance boost as an optional dependency).
## Deno Usage
Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard encode and decode functionality is available on Deno, like other platforms.

@@ -36,3 +38,3 @@ ### Streams

```
```JavaScript
import { EncoderStream } from 'cbor-x';

@@ -44,3 +46,3 @@ let stream = new EncoderStream();

Or for a full example of sending and receiving data on a stream:
```
```JavaScript
import { EncoderStream } from 'cbor-x';

@@ -60,7 +62,7 @@ let sendingStream = new EncoderStream();

## Deno Usage
CBOR modules are standard ESM modules and can be loaded directly from github (https://raw.githubusercontent.com/kriszyp/cbor-x/master/index.js) or downloaded and used directly in Deno. The standard encode and decode functionality is available on Deno, like other platforms.
Cbor-x modules are standard ESM modules and can be loaded directly from the [deno.land registry for cbor](https://deno.land/x/cbor) for use in Deno. The standard pack/encode and unpack/decode functionality is available on Deno, like other platforms.
## Browser Usage
Cbor-x works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script, at `dist/index.js` for ease of direct loading:
```
```HTML
<script src="node_modules/cbor-x/dist/index.js"></script>

@@ -72,3 +74,3 @@ ```

For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application:
```
```JavaScript
import { decode } from 'cbor-x/decode' // if you only need to decode

@@ -79,3 +81,3 @@ ```

You can also use cbor-x for [structured cloning](https://html.spec.whatwg.org/multipage/structured-data.html). By enabling the `structuredClone` option, you can include references to other objects or cyclic references, and object identity will be preserved. Structured cloning also enables preserving certain typed objects like `Error`, `Set`, `RegExp` and TypedArray instances, using [registered CBOR tag extensions](https://www.iana.org/assignments/cbor-tags/cbor-tags.xhtml). For example:
```
```JavaScript
let obj = {

@@ -98,3 +100,3 @@ set: new Set(['a', 'b']),

There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map` is designed for these), and records or object structures that have a well-defined set of fields. Typical JS objects/records may have many instances re(use) the same structure. By using the record extension, this distinction is preserved in CBOR and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase decoding performance by 2-3x. Cbor-x automatically generates record definitions that are reused and referenced by objects with the same structure. Records use CBOR's tags to align well CBOR's tag/extension mechanism. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield significant benefits. To use the record structures extension, we create a new `Encoder` instance. By default a new `Encoder` instance will have the record extension enabled:
```
```JavaScript
import { Encoder } from 'cbor-x';

@@ -114,3 +116,3 @@ let encoder = new Encoder();

```
```JavaScript
import { Encoder } from 'cbor-x';

@@ -122,3 +124,3 @@ let encoder = new Encoder({

If you are working with persisted data, you will need to persist the `structures` data when it is updated. Cbor-x provides an API for loading and saving the `structures` on demand (which is robust and can be used in multiple-process situations where other processes may be updating this same `structures` array), we just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future:
```
```JavaScript
import { Encoder } from 'cbor-x';

@@ -152,4 +154,31 @@ let encoder = new Encoder({

```
### KeyMaps for Senml
KeyMaps can be used to remap properties of source Objects and Maps to numerical equivalents for more efficient encoding.
The principle driver for this feature is to support `application/senml+cbor`content-encoding as defined in https://datatracker.ietf.org/doc/html/rfc8428#section-6 for use in LWM2M application (see http://www.openmobilealliance.org/release/LightweightM2M/V1_2-20201110-A/HTML-Version/OMA-TS-LightweightM2M_Core-V1_2-20201110-A.html#7-4-7-0-747-SenML-CBOR)
CBOR Packing
Records are also supported in conjunction with keyMaps, but these are disabled by default when keyMaps are specified as use of the two features does not introduce any additional compression efficiency unless that the data arrays are quite large (> 10 items).
```JavaScript
import { Encoder } from 'cbor-x'
const data = [
{ bn: '/3303/0/5700', bt: 1278887, v: 35.5 },
{ t: 10, v: 34 },
{ t: 20, v: 33 },
{ t: 30, v: 32 },
{ t: 40, v: 31 },
{ t: 50, v: 30 }
]
let senmlKeys = { bs: -6, bv: -5, bu: -4, bt: -3, bn: -2, bver: -1, n: 0, u: 1, v: 2, vs: 3, vb: 4, s: 5, t: 6, ut: 7, vd: 8}}
let senmlCbor = new Encoder({ keyMap: senmlKeys })
let basicCbor = new Encoder()
let senmlBuff = senmlCbor.encode(data)
let basicBuff = basicCbor.encode(data)
console.log('Senml CBOR size:', senmlBuff.length) // 77
console.log('Basic CBOR size:', basicBuff.length) // 90
assert.deepEqual(senmlEncoder.decode(senmlBuff), data)
```
### CBOR Packing
[Packed CBOR](https://datatracker.ietf.org/doc/html/draft-ietf-cbor-packed) is additional specification for CBOR which allows for compact encoding of data that has repeated values. Cbor-x supports decoding packed CBOR, no or flags/options needed. Cbor-x can also optionally generate packed CBOR (with the `pack` option), which will cause the encoder to look for repeated strings in a data structure that is being encoded, and store the strings in a packed table that can be referenced, to reduce encoding size. This involves extra overhead and reduces encoding performance, and generally does not yield as much compaction as standard compression tools. However, this is can be much faster than encoding plus compression, while still providing some level of reduction in encoding size. In addition to size reduction, packed CBOR is also usually faster to decode (assuming that some repetitive values could be found/packed).

@@ -159,5 +188,7 @@

## Options
The following options properties can be provided to the Encoder or Decoder constructor:
* `keyMap` - This can be set to an object which will be used to map keys in the source Object or Map to other keys including integers. This allows for more efficient encoding, and enables support for numeric cbar tag encodings such as used by `application/senml+cbor` (https://datatracker.ietf.org/doc/html/rfc8428#section-6)
* `useRecords` - Setting this to `false` disables the record extension and stores JavaScript objects as CBOR maps (with tag 259), and decodes maps as JavaScript `Object`s, which ensures compatibilty with other decoders.

@@ -171,2 +202,3 @@ * `structures` - Provides the array of structures that is to be used for record extension, if you want the structures saved and used again. This array will be modified in place with new record structures that are serialized (if less than 64 structures are in the array).

* `copyBuffers` - When decoding a CBOR message with binary data (Buffers are encoded as binary data), copy the buffer rather than providing a slice/view of the buffer. If you want your input data to be collected or modified while the decoded embedded buffer continues to live on, you can use this option (there is extra overhead to copying).
* `bundleStrings` - If `true` this uses a custom extension that bundles strings together, so that they can be decoded more quickly on browsers and Deno that do not have access to the NodeJS addon. This a custom extension, so both encoder and decoder need to support this. This can yield significant decoding performance increases on browsers (30%-50%).
* `useTimestamp32` - Encode JS `Date`s in 32-bit format when possible by dropping the milliseconds. This is a more efficient encoding of dates. You can also cause dates to use 32-bit format by manually setting the milliseconds to zero (`date.setMilliseconds(0)`).

@@ -180,3 +212,3 @@ * `sequential` - Encode structures in serialized data, and reference previously encoded structures with expectation that decoder will read the encoded structures in the same order as encoded, with `unpackMultiple`.

By default all non-integer numbers are serialized as 64-bit float (double). This is fast, and ensures maximum precision. However, often real-world data doesn't not need 64-bits of precision, and using 32-bit encoding can be much more space efficient. There are several options that provide more efficient encodings. Using the decimal rounding options for encoding and decoding provides lossless storage of common decimal representations like 7.99, in more efficient 32-bit format (rather than 64-bit). The `useFloat32` property has several possible options, available from the module as constants:
```
```JavaScript
import { ALWAYS, DECIMAL_ROUND, DECIMAL_FIT } from 'cbor-x'

@@ -246,3 +278,3 @@ ```

You can add your own custom extensions, which can be used to encode specific types/classes in certain ways. This is done by using the `addExtension` function, and specifying the class, extension type code (custom extensions should be a number greater than 40500, all others are reserved for CBOR or cbor-x), and your encode and decode functions (or just the one you need). You can use cbor-x encoding and decoding within your extensions:
```
```JavaScript
import { addExtension, Encoder } from 'cbor-x';

@@ -297,3 +329,6 @@

* 3 - Negative BigInt
* 6 - Packed string reference
* 27 - Generic named objects (used for Error, RegExp)
* 28, 29 - Value sharing/object referencing
* 51 - Packed table
* 64 - Uint8Array

@@ -313,7 +348,7 @@ * 68 - Uint8ClampedArray

* 259 - Map
* 40009, 40010 - Pointers for cycles
* 57344 - 57599 - Records
## Alternate Encoding/Package
The high-performance serialization and deserialization algorithms in this package are also available in the [msgpackr](https://github.com/kriszyp/msgpackr) for the MessagePack format, with the same API and design. A quick summary of the pros and cons of using MessagePack vs CBOR are:
* MessagePack has wider adoption, and, at least with this implementation is slightly more efficient (by roughly 1%, but YMMV).
* MessagePack has wider adoption and msgpackr has broader usage.
* CBOR has an [official IETF standardization track](https://www.rfc-editor.org/rfc/rfc8949.html), and the record extensions is conceptually/philosophically a better fit for CBOR tags.

@@ -320,0 +355,0 @@

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc