Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

cbor-x

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

cbor-x - npm Package Compare versions

Comparing version 0.3.0 to 0.3.1

35

decode.js

@@ -20,2 +20,3 @@ "use strict"

let dataView
let needsBufferCopy
let defaultOptions = {

@@ -508,3 +509,3 @@ useRecords: false,

function readBin(length) {
return currentDecoder.copyBuffers ?
return (currentDecoder.copyBuffers || needsBufferCopy) ?
// specifically use the copying slice (not the node one)

@@ -537,3 +538,4 @@ Uint8Array.prototype.slice.call(src, position, position += length) :

// id extension (for structured clones)
let id = dataView.getUint32(position - 4)
let id = dataView.getUint32(++position)
position += 4
if (!referenceMap)

@@ -559,5 +561,6 @@ referenceMap = new Map()

currentExtensions[10] = (data) => {
currentExtensions[10] = () => {
// pointer extension (for structured clones)
let id = dataView.getUint32(position - 4)
let id = dataView.getUint32(++position)
position += 4
let refEntry = referenceMap.get(id)

@@ -572,9 +575,14 @@ refEntry.used = true

currentExtensions[12] = (data) => {
let typeCode = data[0]
let typedArrayName = typedArrays[typeCode]
if (!typedArrayName)
throw new Error('Could not find typed array for code ' + typeCode)
// we have to always slice/copy here to get a new ArrayBuffer that is word/byte aligned
return new glbl[typedArrayName](Uint8Array.prototype.slice.call(data, 1).buffer)
currentExtensions[12] = () => {
needsBufferCopy = true
try {
let [ typeCode, buffer ] = read()
let typedArrayName = typedArrays[typeCode]
if (!typedArrayName)
throw new Error('Could not find typed array for code ' + typeCode)
// we have to always slice/copy here to get a new ArrayBuffer that is word/byte aligned
return new glbl[typedArrayName](buffer.buffer)
} finally {
needsBufferCopy = false
}
}

@@ -589,6 +597,3 @@ currentExtensions[13] = () => {

return new Date(read() * 1000)
} // notepack defines extension 0 to mean undefined, so use that as the default here
// registration of bulk record definition?
// currentExtensions[0x52] = () =>
}
function saveState(callback) {

@@ -595,0 +600,0 @@ let savedSrcEnd = srcEnd

@@ -12,2 +12,9 @@ "use strict"

let extensions, extensionClasses
const hasNodeBuffer = typeof Buffer !== 'undefined'
const ByteArrayAllocate = hasNodeBuffer ? Buffer.allocUnsafeSlow : Uint8Array
const ByteArray = hasNodeBuffer ? Buffer : Uint8Array
let target
let targetView
let position = 0
let safeEnd
const RECORD_SYMBOL = Symbol('record-id')

@@ -18,8 +25,4 @@ class Encoder extends Decoder {

this.offset = 0
let target = new ByteArrayAllocate(8192) // as you might expect, allocUnsafeSlow is the fastest and safest way to allocate memory
let targetView = new DataView(target.buffer, 0, 8192)
let typeBuffer
let position = 0
let start
let safeEnd
let sharedStructures

@@ -30,3 +33,3 @@ let hasSharedUpdate

let lastSharedStructuresLength = 0
let encodeUtf8 = target.utf8Write ? function(string, position, maxBytes) {
let encodeUtf8 = ByteArray.prototype.utf8Write ? function(string, position, maxBytes) {
return target.utf8Write(string, position, maxBytes)

@@ -53,3 +56,7 @@ } : (encoder && encoder.encodeInto) ?

this.encode = function(value) {
position = encoder.offset
if (!target) {
target = new ByteArrayAllocate(8192)
targetView = new DataView(target.buffer, 0, 8192)
position = 0
}
safeEnd = target.length - 10

@@ -115,3 +122,3 @@ if (safeEnd - position < 0x800) {

serializationsSinceTransitionRebuild++
if (transitionsCount > 5000) {
if (transitionsCount > 10000) {
// force a rebuild occasionally after a lot of transitions so it can get cleaned up

@@ -285,4 +292,4 @@ sharedStructures.transitions = null

}
target[position++] = 0xd6 // fixext 4
target[position++] = 0x70 // "p" for pointer
target[position++] = 0xca // tag 10
target[position++] = 0x1a // uint32
targetView.setUint32(position, referee.id)

@@ -341,10 +348,27 @@ position += 4

let extension = extensions[i]
let result = extension.encode.call(this, value, (size) => {
position += size
if (position > safeEnd)
makeRoom(position)
return {
target, targetView, position: position - size
let currentTarget = target
let currentTargetView = targetView
let currentPosition = position
target = null
let result
try {
result = extension.encode.call(this, value, (size) => {
target = currentTarget
currentTarget = null
position += size
if (position > safeEnd)
makeRoom(position)
return {
target, targetView, position: position - size
}
}, encode)
} finally {
// restore current target information (unless already restored)
if (currentTarget) {
target = currentTarget
targetView = currentTargetView
position = currentPosition
safeEnd = target.length - 10
}
}, encode)
}
if (result) {

@@ -458,3 +482,4 @@ position = writeExtensionData(result, target, position, extension.type)

let keys = Object.keys(object)
let nextTransition, hasNewTransition, transition = structures.transitions || (structures.transitions = Object.create(null))
let nextTransition, transition = structures.transitions || (structures.transitions = Object.create(null))
let newTransitions = 0
for (let i =0, l = keys.length; i < l; i++) {

@@ -465,3 +490,3 @@ let key = keys[i]

nextTransition = transition[key] = Object.create(null)
hasNewTransition = true
newTransitions++
}

@@ -492,4 +517,4 @@ transition = nextTransition

target[position++] = recordId
if (hasNewTransition)
transitionsCount += serializationsSinceTransitionRebuild
if (newTransitions)
transitionsCount += serializationsSinceTransitionRebuild * newTransitions
// record the removal of the id, we can maintain our shared structure

@@ -520,5 +545,7 @@ if (recordIdsToRemove.length >= 0xc0 - maxSharedStructures)

}
resetMemory() {
// this means we are finished using our local buffer and we can write over it safely
this.offset = 0
useBuffer(buffer) {
// this means we are finished using our own buffer and we can write over it safely
target = buffer
targetView = new DataView(target.buffer, target.byteOffset, target.byteLength)
position = 0
}

@@ -528,5 +555,2 @@ }

const hasNodeBuffer = typeof Buffer !== 'undefined'
const ByteArrayAllocate = hasNodeBuffer ? Buffer.allocUnsafeSlow : Uint8Array
const ByteArray = hasNodeBuffer ? Buffer : Uint8Array
function copyBinary(source, target, targetOffset, offset, endOffset) {

@@ -561,3 +585,3 @@ while (offset < endOffset) {

let { target, position} = allocateForWrite(1)
target[position++] = 0xd1 // 's' for Set
target[position++] = 0xcb // tag 11
}

@@ -570,3 +594,3 @@ encode(array)

let { target, position} = allocateForWrite(1)
target[position++] = 0xce // 'e' for error
target[position++] = 0xc8 // tag 8
}

@@ -579,3 +603,3 @@ encode([ error.name, error.message ])

let { target, position} = allocateForWrite(1)
target[position++] = 0xd3
target[position++] = 0xcd
}

@@ -585,5 +609,5 @@ encode([ regex.source, regex.flags ])

}, {
encode(arrayBuffer, allocateForWrite) {
encode(arrayBuffer, allocateForWrite, encode) {
if (this.structuredClone)
writeExtBuffer(arrayBuffer, 0x10, allocateForWrite)
writeExtBuffer(arrayBuffer, 0x10, allocateForWrite, encode)
else

@@ -593,6 +617,6 @@ writeBuffer(hasNodeBuffer ? Buffer.from(arrayBuffer) : new Uint8Array(arrayBuffer), allocateForWrite)

}, {
encode(typedArray, allocateForWrite) {
encode(typedArray, allocateForWrite, encode) {
let constructor = typedArray.constructor
if (constructor !== ByteArray && this.structuredClone)
writeExtBuffer(typedArray.buffer, typedArrays.indexOf(constructor.name), allocateForWrite)
writeExtBuffer(typedArray, typedArrays.indexOf(constructor.name), allocateForWrite, encode)
else

@@ -608,5 +632,7 @@ writeBuffer(typedArray, allocateForWrite)

function writeExtBuffer(buffer, type, allocateForWrite) {
let length = buffer.byteLength
let { target, position, targetView } = allocateForWrite(7 + length)
function writeExtBuffer(typedArray, type, allocateForWrite, encode) {
let length = typedArray.byteLength
let offset = typedArray.byteOffset || 0
let buffer = typedArray.buffer || typedArray
let { target, position, targetView } = allocateForWrite(1)
/*if (length < 0x100) {

@@ -620,12 +646,5 @@ target[position++] = 0xc7

} else {*/
target[position++] = 0xc9
targetView.setUint32(position, length + 1) // plus one for the type byte
position += 4
//}
target[position++] = 0x74 // "t" for typed array
target[position++] = type
if (hasNodeBuffer)
Buffer.from(buffer).copy(target, position)
else
copyBinary(new Uint8Array(buffer), target, position, 0, length)
target[position++] = 0xcc
encode([type, hasNodeBuffer ? Buffer.from(buffer, offset, length) :
new Uint8Array(buffer, offset, length)])
}

@@ -711,4 +730,4 @@ function writeBuffer(buffer, allocateForWrite) {

let position = offset + distanceToMove
serialized[position++] = 0xd6
serialized[position++] = 0x69 // 'i'
serialized[position++] = 0xc9 // tag 9
serialized[position++] = 0x1a // uint32
serialized[position++] = id << 24

@@ -715,0 +734,0 @@ serialized[position++] = (id << 16) & 0xff

exports.Encoder = require('./encode').Encoder
exports.addExtension = require('./encode').addExtension
let decodeModule = require('./decode')

@@ -24,3 +25,3 @@ let extractor = tryRequire('cbor-extract')

if (typeof window == 'undefined')
console.warn('Native extraction module not loaded, cbor-x will still run, but with decreased performance. ' + error.message.split('\n')[0])
console.warn('Native extraction module not loaded, cbor-x will still run, but with decreased performance. ' + error.message)
else

@@ -27,0 +28,0 @@ console.warn('For browser usage, directly use msgencoder/decode or msgencoder/encode modules. ' + error.message.split('\n')[0])

{
"name": "cbor-x",
"author": "Kris Zyp",
"version": "0.3.0",
"description": "Ultra-fast CBOR implementation with extensions for records and structured cloning",
"version": "0.3.1",
"license": "MIT",

@@ -34,2 +33,3 @@ "types": "./index.d.ts",

"cbor": "^5",
"@types/node": "latest",
"chai": "^4",

@@ -36,0 +36,0 @@ "mocha": "^4",

@@ -54,4 +54,9 @@ # cbor-x

## Browser Usage
Cbor-x works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script for ease of direct loading. For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application:
Cbor-x works as standalone JavaScript as well, and runs on modern browsers. It includes a bundled script, at `dist/index.js` for ease of direct loading:
```
<script src="node_modules/cbor-x/dist/index.js"></script>
```
For module-based development, it is recommended that you directly import the module of interest, to minimize dependencies that get pulled into your application:
```
import { decode } from 'cbor-x/decode' // if you only need to decode

@@ -209,11 +214,3 @@ ```

#### Arena Allocation (`resetMemory()`)
During the serialization process, data is written to buffers. Again, allocating new buffers is a relatively expensive process, and the `resetMemory` method can help allow reuse of buffers that will further improve performance. The `resetMemory` method can be called when previously created buffer(s) are no longer needed. For example, if we serialized an object, and wrote it to a database, we could indicate that we are done:
```
let buffer = encoder.encode(data);
writeToStorageSync(buffer);
// finished with buffer, we can reset the memory on our encoder now:
encoder.resetMemory()
// future serialization can now reuse memory for better performance
```
The use of `resetMemory` is never required, buffers will still be handled and cleaned up through GC if not used, it just provides a small performance boost.
During the serialization process, data is written to buffers. Again, allocating new buffers is a relatively expensive process, and the `useBuffer` method can help allow reuse of buffers that will further improve performance. With `useBuffer` method, you can provide a buffer, serialize data into it, and when it is known that you are done using that buffer, you can call `useBuffer` again to reuse it. The use of `useBuffer` is never required, buffers will still be handled and cleaned up through GC if not used, it just provides a small performance boost.

@@ -243,2 +240,7 @@ ## Record Structure Extension Definition

## Alternate Encoding/Package
The high-performance serialization and deserialization algorithms in the msgpackr package are also available in the [cbor-x](https://github.com/kriszyp/cbor-x) for the CBOR format. A quick summary of the pros and cons of using MessagePack vs CBOR are:
* MessagePack has wider adoption, and, at least with this implementation is slightly more efficient.
* CBOR has an official IETF standardization track, and the record extensions is conceptually/philosophically a better fit for CBOR tags.
## License

@@ -245,0 +247,0 @@

@@ -7,3 +7,2 @@ var data = require('./example4.json');

} catch(error) {
return {}
}

@@ -10,0 +9,0 @@ }

@@ -125,3 +125,3 @@ var inspector = require('inspector')

Class: Extended,
type: 11,
type: 30,
decode: function(buffer) {

@@ -231,4 +231,2 @@ let e = new Extended()

map.set('three', 3)
var data = {

@@ -388,7 +386,8 @@ map: map,

let encoder = new Encoder({ structures })
let buffer = Buffer.alloc(0x10000)
for (var i = 0; i < ITERATIONS; i++) {
//serialized = encode(data, { shared: sharedStructure })
encoder.useBuffer(buffer)
var serialized = encoder.encode(data)
encoder.resetMemory()
//var serializedGzip = deflateSync(serialized)

@@ -395,0 +394,0 @@ }

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc