Comparing version 0.1.2 to 0.1.3
@@ -1,3 +0,41 @@ | ||
There are the benchmarks from notepack package. The test data for these benchmarks is very heavily weighted with large binary/buffer data: | ||
Here are more comprehensive benchmarks. This is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): | ||
operation | op | ms | op/s | ||
---------------------------------------------------------- | ------: | ----: | -----: | ||
buf = Buffer(JSON.stringify(obj)); | 82000 | 5004 | 16386 | ||
obj = JSON.parse(buf); | 88600 | 5000 | 17720 | ||
require("msgpackr").pack(obj); | 161500 | 5002 | 32287 | ||
require("msgpackr").unpack(buf); | 94600 | 5004 | 18904 | ||
msgpackr w/ shared structures: packr.pack(obj); | 178400 | 5002 | 35665 | ||
msgpackr w/ shared structures: packr.unpack(buf); | 376700 | 5000 | 75340 | ||
buf = require("msgpack-lite").encode(obj); | 30100 | 5012 | 6005 | ||
obj = require("msgpack-lite").decode(buf); | 16200 | 5001 | 3239 | ||
buf = require("notepack").encode(obj); | 62600 | 5005 | 12507 | ||
obj = require("notepack").decode(buf); | 32400 | 5007 | 6470 | ||
require("what-the-pack")... encoder.encode(obj); | 63500 | 5002 | 12694 | ||
require("what-the-pack")... encoder.decode(buf); | 32000 | 5001 | 6398 | ||
require("avsc")...make schema/type...type.toBuffer(obj); | 84600 | 5003 | 16909 | ||
require("avsc")...make schema/type...type.toBuffer(obj); | 99300 | 5001 | 19856 | ||
(`avsc` is schema-based and more comparable in style to msgpackr with shared structures). | ||
Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where msgpackr is able to take advantage of the structured record extension and really pull away from other tools: | ||
operation (1000000 x 2) | op | ms | op/s | ||
------------------------------------------------ | ------: | ----: | -----: | ||
new PackrStream().write(obj); | 1000000 | 372 | 2688172 | ||
new UnpackrStream().write(buf); | 1000000 | 247 | 4048582 | ||
stream.write(msgpack.encode(obj)); | 1000000 | 2898 | 345065 | ||
stream.write(msgpack.decode(buf)); | 1000000 | 1969 | 507872 | ||
stream.write(notepack.encode(obj)); | 1000000 | 901 | 1109877 | ||
stream.write(notepack.decode(buf)); | 1000000 | 1012 | 988142 | ||
msgpack.Encoder().on("data",ondata).encode(obj); | 1000000 | 1763 | 567214 | ||
msgpack.createDecodeStream().write(buf); | 1000000 | 2222 | 450045 | ||
msgpack.createEncodeStream().write(obj); | 1000000 | 1577 | 634115 | ||
msgpack.Decoder().on("data",ondata).decode(buf); | 1000000 | 2246 | 445235 | ||
These are the benchmarks from notepack package. The larger test data for these benchmarks is very heavily weighted with large binary/buffer data and objects with extreme numbers of keys (much more than I typically see with real-world data, but YMMV): | ||
node ./benchmarks/encode | ||
@@ -20,16 +58,18 @@ +------------------+-------------------+-----------------+----------------+---------------+ | ||
node ./benchmarks/decode | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| │ tiny │ small │ medium │ large | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| notepack │ 2,350,498 ops/sec │ 561,550 ops/sec │ 31,475 ops/sec │ 288 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| msgpack-js │ 993,032 ops/sec │ 233,171 ops/sec │ 21,528 ops/sec │ 267 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| msgpackr │ 2,490,774 ops/sec │ 583,004 ops/sec │ 73,325 ops/sec │ 336 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| msgpackr records │ 4,398,718 ops/sec │ 1,135,979 ops/sec │ 169,718 ops/sec │ 630 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| msgpack-lite │ 571,500 ops/sec │ 132,079 ops/sec │ 12,118 ops/sec │ 177 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
| @msgpack/msgpack │ 2,111,586 ops/sec │ 561,525 ops/sec │ 27,249 ops/sec │ 87.43 ops/sec | | ||
+------------------+-------------------+-------------------+-----------------+---------------+ | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| │ tiny │ small │ medium │ large | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| notepack │ 2,220,904 ops/sec │ 560,630 ops/sec │ 28,177 ops/sec │ 275 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| msgpack-js │ 965,719 ops/sec │ 222,047 ops/sec │ 21,431 ops/sec │ 257 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| msgpackr │ 2,320,046 ops/sec │ 589,167 ops/sec │ 70,299 ops/sec │ 329 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| msgpackr records │ 3,750,547 ops/sec │ 912,419 ops/sec │ 136,853 ops/sec │ 733 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| msgpack-lite │ 569,222 ops/sec │ 129,008 ops/sec │ 12,424 ops/sec │ 180 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
| @msgpack/msgpack │ 2,089,697 ops/sec │ 557,507 ops/sec │ 20,256 ops/sec │ 85.03 ops/sec | | ||
+------------------+-------------------+-----------------+-----------------+---------------+ | ||
This was run by adding the msgpackr to the benchmarks for notepack. |
@@ -273,3 +273,10 @@ "use strict" | ||
} else if (constructor === Date) { | ||
throw new Error('Date not implemented yet') | ||
// using the 32 timestamp for now, TODO: implement support for 64-bit and 128-bit | ||
length = value.getTime() / 1000 | ||
target[position++] = 0xd6 | ||
target[position++] = 0xff | ||
target[position++] = length >> 24 | ||
target[position++] = (length >> 16) & 0xff | ||
target[position++] = (length >> 8) & 0xff | ||
target[position++] = length & 0xff | ||
} else if (constructor === Buffer) { | ||
@@ -276,0 +283,0 @@ length = value.length |
{ | ||
"name": "msgpackr", | ||
"author": "Kris Zyp", | ||
"version": "0.1.2", | ||
"version": "0.1.3", | ||
"description": "Fast MessagePack implementation with extension for record structures", | ||
@@ -6,0 +6,0 @@ "license": "MIT", |
@@ -5,3 +5,3 @@ # msgpackr | ||
# Basic Usage | ||
## Basic Usage | ||
@@ -21,3 +21,3 @@ Install with: | ||
## Streams | ||
### Streams | ||
We can use the including streaming functionality (which further improves performance). The `PackrStream` is a transform stream that can be used to serialize objects to a binary stream (writing to network/socket, IPC, etc.), and the `UnpackrStream` can be used to deserialize objects from a binary sream (reading from network/socket, etc.): | ||
@@ -46,3 +46,3 @@ | ||
# Record / Object Structures | ||
## Record / Object Structures | ||
There is a critical difference between maps (or dictionaries) that hold an arbitrary set of keys and values (JavaScript `Map`s are best for these), and records or object structures that have a well-defined set of fields which may have many instances using that same structure (most objects in JS). By using the record extension, this distinction is preserved in MessagePack and the encoding can reuse structures and not only provides better type preservation, but yield much more compact encodings and increase parsing/deserialization performance by 2-3x. Msgpackr automatically generates record definitions that are reused and referenced by objects with the same structure. There are a number of ways to use this to our advantage. For large object structures with repeating nested objects with similar structures, simply serializing with the record extension can yield benefits. To use the record structures extension, we create a new `Packr` instance. By default a new `Packr` instance will have the record extension enabled: | ||
@@ -60,3 +60,3 @@ ``` | ||
## Shared Record Structures | ||
### Shared Record Structures | ||
Another useful way of using msgpackr, and the record extension, is for storing data in a databases, files, or other storage systems. If a number of objects with common data structures are being stored, a shared structure can be used to greatly improve data storage and deserialization efficiency. We just need to provide a way to store the generated shared structure so it is available to deserialize stored data in the future: | ||
@@ -79,3 +79,3 @@ | ||
### resetMemory | ||
#### resetMemory | ||
During the serialization process, data is written to buffers. Allocating new buffers is a relatively expensive process, and the `resetMemory` method can help allow reuse of buffers that will further improve performance. The `resetMemory` method can be called when previously created buffer(s) are no longer needed. For example, if we serialized an object, and wrote it to a database, we could indicate that we are done: | ||
@@ -91,4 +91,4 @@ ``` | ||
# Performance | ||
msgpackr is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): | ||
## Performance | ||
Msgpackr is fast. Really fast. Here is comparison with the next fastest JS projects using the benchmark tool from `msgpack-lite` (and the sample data is from some clinical research data we use that has a good mix of different value types and structures). It also includes comparison to V8 native JSON functionality, and JavaScript Avro (`avsc`, a very optimized Avro implementation): | ||
@@ -114,3 +114,3 @@ operation | op | ms | op/s | ||
Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where msgpackr is able to take advantage of the structured record extension and really pull away from other tools: | ||
Here is a benchmark of streaming data (again borrowed from `msgpack-lite`'s benchmarking), where msgpackr is able to take advantage of the structured record extension and really demonstrate its performance capabilities: | ||
@@ -130,5 +130,4 @@ operation (1000000 x 2) | op | ms | op/s | ||
# Extensions | ||
## Record Structure Extension Definition | ||
The record struction extension uses extension id 0x72 ("r") to declare the use of this functionality. The extension "data" byte (or bytes) identifies the byte or bytes used to identify the start of a record in the subsequent MessagePack block or stream. The identifier byte (or the first byte in a sequence) must be from 0x40 - 0x7f (and therefore replaces one byte representations of positive integers 64 - 127). The extension decaration must be immediately follow by an MessagePack array that defines the field names of the record structure. | ||
The record struction extension uses extension id 0x72 ("r") to declare the use of this functionality. The extension "data" byte (or bytes) identifies the byte or bytes used to identify the start of a record in the subsequent MessagePack block or stream. The identifier byte (or the first byte in a sequence) must be from 0x40 - 0x7f (and therefore replaces one byte representations of positive integers 64 - 127, which can alternately be represented with int or uint types). The extension declaration must be immediately follow by an MessagePack array that defines the field names of the record structure. | ||
@@ -149,4 +148,4 @@ Once a record identifier and record field names have been defined, the parser/decoder should proceed to read the next value. Any subsequent use of the record identifier as a value in the block or stream should parsed as a record instance, and the next n values, where is n is the number of fields (as defined in the array of field names), should be read as the values of the fields. For example, here we have defined a structure with fields "foo" and "bar", with the record identifier 0x40, and then read a record instance that defines the field values of 4 and 2, respectively: | ||
# License | ||
## License | ||
MIT |
@@ -28,11 +28,7 @@ "use strict" | ||
constructor(options) { | ||
if (options) { | ||
options.objectMode = true | ||
} else { | ||
options = { | ||
objectMode: true, | ||
structures: [], | ||
} | ||
} | ||
if (!options) | ||
options = {} | ||
options.objectMode = true | ||
super(options) | ||
options.structures = [] | ||
this.unpackr = new Unpackr(options) | ||
@@ -39,0 +35,0 @@ } |
"use strict" | ||
let { setSource, extractStrings } = require('./build/Release/msgpackr.node') | ||
let { setSource, extractStrings } = tryRequire('./build/Release/msgpackr.node') | ||
let src | ||
@@ -23,2 +23,7 @@ let srcEnd | ||
currentExtensions[0] = (data) => {} // notepack defines extension 0 to mean undefined, so use that as the default here | ||
currentExtensions[0xd6] = (data) => { | ||
// 32-bit date extension | ||
return new Date(((data[0] << 24) + (data[1] << 16) + (data[2] << 8) + data[3]) * 1000) | ||
} // notepack defines extension 0 to mean undefined, so use that as the default here | ||
// registration of bulk record definition? | ||
@@ -442,2 +447,10 @@ // currentExtensions[0x52] = () => | ||
throw new Error('Unknown extension type ' + type) | ||
} | ||
function tryRequire(moduleId) { | ||
try { | ||
return require(moduleId) | ||
} catch (error) { | ||
console.error(error) | ||
return {} | ||
} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
87127
1674
142
5