Comparing version 1.17.0 to 1.18.0
@@ -208,1 +208,25 @@ # Using the ProtoDef compiler | ||
``` | ||
### Skip Checks (optional) | ||
ProtoDef Compiler allows an optional `noArraySizeCheck` to be set. By default this value is `false`. | ||
If set to `true`, the compiler will skip array checks that appliy safety limits to avoid out of memory crashes. Sometimes these checks can be too restrictive, and the `noArraySizeCheck` parameter allows you to disable them. | ||
```javascript | ||
const { ProtoDefCompiler } = require('protodef').Compiler | ||
// Create a ProtoDefCompiler instance | ||
const compiler = new ProtoDefCompiler() | ||
compiler.addTypesToCompile(require('./protocol.json')) | ||
// Compile a ProtoDef instance | ||
const compiledProto = await compiler.compileProtoDef() | ||
// Set the `noArraySizeCheck` variable to skip array checks. | ||
compiledProto.setVariable('noArraySizeCheck', true); | ||
// Use it as if it were a normal ProtoDef | ||
const buffer = compiledProto.createPacketBuffer('mainType', result) | ||
const result = compiledProto.parsePacketBuffer('mainType', buffer) | ||
``` |
# History | ||
## 1.18.0 | ||
* [Update commands.yml to use GITHUB_TOKEN (#164)](https://github.com/ProtoDef-io/node-protodef/commit/ed625528ef109c7443b56528447f2cd84194e23a) (thanks @extremeheat) | ||
* [Add additional varint types and bitflags (#163)](https://github.com/ProtoDef-io/node-protodef/commit/89c2588fdd26f6100c406f57e8ca42c11c57af84) (thanks @extremeheat) | ||
* [Create commands.yml](https://github.com/ProtoDef-io/node-protodef/commit/67b411aacbf9ad2ccf0aaeac0bb4d9d568b568b3) (thanks @rom1504) | ||
* [Add Optional `skipChecks` to Disable Array Size Check (#154)](https://github.com/ProtoDef-io/node-protodef/commit/1173604de64d4c32f3ff9a2a789230885115870c) (thanks @bdkopen) | ||
* [add benchmark to ci](https://github.com/ProtoDef-io/node-protodef/commit/55e6c631cd14ccbb4d684e9f4e9c50e6fbbf809f) (thanks @rom1504) | ||
## 1.17.0 | ||
@@ -4,0 +11,0 @@ |
@@ -0,1 +1,2 @@ | ||
const assert = require('assert') | ||
const ProtoDef = require('protodef').ProtoDef | ||
@@ -5,2 +6,6 @@ const Serializer = require('protodef').Serializer | ||
BigInt.prototype.toJSON = function () { // eslint-disable-line -- Allow serializing BigIntegers | ||
return this.toString() | ||
} | ||
// the protocol can be in a separate json file | ||
@@ -13,2 +18,3 @@ const exampleProtocol = { | ||
switch: 'native', | ||
bitflags: 'native', | ||
entity_look: [ | ||
@@ -29,6 +35,7 @@ 'container', | ||
}, | ||
{ | ||
name: 'onGround', | ||
type: 'bool' | ||
} | ||
{ name: 'flags', type: ['bitflags', { type: 'u8', flags: ['onGround'] }] }, | ||
{ name: 'longId', type: 'varint64' }, | ||
{ name: 'longerId', type: 'varint128' }, | ||
{ name: 'zigzagId', type: 'zigzag32' }, | ||
{ name: 'zigzagBig', type: 'zigzag64' } | ||
] | ||
@@ -77,4 +84,10 @@ ], | ||
yaw: 1, | ||
pitch: 1, | ||
onGround: true | ||
pitch: 6, | ||
flags: { | ||
onGround: true | ||
}, | ||
longId: 13n, | ||
longerId: 2n ** 68n, // 9 bytes integer, 10 over wire | ||
zigzagId: -3, | ||
zigzagBig: 4294967296n | ||
} | ||
@@ -85,3 +98,4 @@ }) | ||
parser.on('data', function (chunk) { | ||
console.log(JSON.stringify(chunk, null, 2)) | ||
console.dir(chunk, { depth: null }) | ||
assert.deepEqual([...chunk.buffer], [22, 1, 1, 6, 1, 13, 128, 128, 128, 128, 128, 128, 128, 128, 128, 32, 5, 128, 128, 128, 128, 32]) | ||
}) |
{ | ||
"name": "protodef", | ||
"version": "1.17.0", | ||
"version": "1.18.0", | ||
"description": "A simple yet powerful way to define binary protocols", | ||
@@ -5,0 +5,0 @@ "main": "index.js", |
@@ -18,3 +18,7 @@ ## Numeric | ||
| u64 | 8 | 1 | unsigned long | | ||
| varint | (varies) | 300 | int var | | ||
| varint | 1-4 | 300 | int var | | ||
| varint64 | 1-8 | 300n | unsigned long | | ||
| varint128 | 1-16 | 2 ^ 68 | unsigned __int128 | | ||
| zigzag32 | 1-4 | -100 | signed int var | | ||
| zigzag64 | 1-8 | -680n | signed long | | ||
@@ -39,4 +43,24 @@ ### **int** ({ size: Integer }) | ||
[Protobuf](https://developers.google.com/protocol-buffers/docs/encoding#varints)-compatible representation for variable-length integers using one or more bytes. | ||
[Protobuf](https://developers.google.com/protocol-buffers/docs/encoding#varints)-compatible representation for variable-length integers using one or more bytes. Intended for 32-bit unsigned integers, or signed 32-bit integers that have been directly cast to an integer (where the MSB is the sign bit) before encoding. | ||
Example of value: `300` (size is 2 bytes) | ||
### **varint64** () | ||
Arguments: None | ||
Same as **varint**, but for 64-bit unsigned integers, or signed 64-bit integers that have been directly cast to an integer (where the MSB is the sign bit) before encoding. | ||
### **varint128** () | ||
Arguments: None | ||
Same as **varint**, but for 128-bit unsigned integers, or signed 128-bit integers that have been directly cast to an integer (where the MSB is the sign bit) before encoding. | ||
### **zigzag32** () | ||
Arguments: None | ||
Similar to **varint**, except using [ZigZag encoding](https://protobuf.dev/programming-guides/encoding/#signed-ints) for signed integers. Intended for 32-bit signed numbers. | ||
### **zigzag64** () | ||
Arguments: None | ||
Same as **zigzag32**, but for 64-bit signed integers in ZigZag encoding. |
@@ -73,2 +73,41 @@ ## Utils | ||
### **bitflags** ([ { type: string, flags: object | array, big?: boolean, shift?: number } ]) | ||
Arguments: | ||
* type : The underlying integer type (eg varint, lu32). | ||
* flags : Either an array of flag values from LSB to MSB, or an object containing a mappng of valueName => bitMask. | ||
* big : 64+ bits. In langauges like javascript (where all numbers are 64-bit floating points), special data types may have to be used for integers greater than 32 bits, so this must be set to true if the `type` is using the special data type. | ||
* shift : Specify if flags is an object and holds bit positions as values opposed to a bitmask. | ||
Represents boolean flags packed into an integer. Similar to bitfields, but only intended for enumerated boolean flags (each flag occupies 1 bit), and supports arbitrary underlying integer types. | ||
Example: | ||
```json | ||
[ | ||
"bitflags", | ||
{ | ||
"type": "lu32", | ||
"flags": ["onGround", "inAir"] | ||
} | ||
] | ||
``` | ||
or | ||
```yaml | ||
[ | ||
"bitflags", | ||
{ | ||
"type": "lu32", | ||
"big": true, | ||
"flags": { | ||
"onGround": 0b1, | ||
"inAir": 0b10 | ||
} | ||
} | ||
] | ||
``` | ||
Example of value to pass when writing: `{"flags": { "onGround": true, "inAir": false } }`. Likewise when reading you will get a similar object back, with a extra `_value` field holding the raw integer. | ||
### **pstring** ({ countType: Type, ?count: Countable }) | ||
@@ -75,0 +114,0 @@ Arguments: |
@@ -30,2 +30,6 @@ { | ||
{ "$ref": "varint" }, | ||
{ "$ref": "varint64" }, | ||
{ "$ref": "varint128" }, | ||
{ "$ref": "zigzag32" }, | ||
{ "$ref": "zigzag64" }, | ||
{ "$ref": "lint" }, | ||
@@ -44,4 +48,5 @@ | ||
{ "$ref": "bitfield" }, | ||
{ "$ref": "bitflags" }, | ||
{ "$ref": "mapper" } | ||
] | ||
} |
@@ -65,2 +65,14 @@ { | ||
}, | ||
"varint64": { | ||
"enum": ["varint64"] | ||
}, | ||
"varint128": { | ||
"enum": ["varint128"] | ||
}, | ||
"zigzag32": { | ||
"enum": ["zigzag32"] | ||
}, | ||
"zigzag64": { | ||
"enum": ["zigzag64"] | ||
}, | ||
"int": { | ||
@@ -67,0 +79,0 @@ "title": "int", |
@@ -105,2 +105,16 @@ { | ||
}, | ||
"bitflags": { | ||
"title": "bitflags", | ||
"type": "array", | ||
"items": [ | ||
{ | ||
"enum": ["bitflags"] | ||
}, | ||
{ | ||
"type": "object", | ||
"additionalItems": false | ||
} | ||
], | ||
"additionalItems": false | ||
}, | ||
"mapper": { | ||
@@ -107,0 +121,0 @@ "title": "mapper", |
@@ -55,3 +55,3 @@ [ | ||
{ | ||
"description":"maximum varint", | ||
"description":"maximum varint (32bit)", | ||
"value":2147483647, | ||
@@ -61,3 +61,3 @@ "buffer":["0xff", "0xff", "0xff", "0xff", "0x07"] | ||
{ | ||
"description":"minimum varint", | ||
"description":"minimum varint (32bit)", | ||
"value":-2147483648, | ||
@@ -69,2 +69,82 @@ "buffer":["0x80", "0x80", "0x80", "0x80", "0x08"] | ||
{ | ||
"type":"varint64", | ||
"values":[ | ||
{ | ||
"description":"8-bit integer", | ||
"value":1, | ||
"buffer":["0x01"] | ||
}, | ||
{ | ||
"description":"8-bit maximum integer", | ||
"value":127, | ||
"buffer":["0x7f"] | ||
}, | ||
{ | ||
"description":"16-bit integer", | ||
"value":300, | ||
"buffer":["0xac", "0x02"] | ||
}, | ||
{ | ||
"description":"24-bit integer", | ||
"value":100000, | ||
"buffer":["0xa0", "0x8d", "0x06"] | ||
}, | ||
{ | ||
"description":"32-bit integer", | ||
"value":16909060, | ||
"buffer":["0x84", "0x86", "0x88", "0x08"] | ||
} | ||
] | ||
}, | ||
{ | ||
"type":"varint128", | ||
"values":[ | ||
{ | ||
"description":"8-bit integer", | ||
"value":1, | ||
"buffer":["0x01"] | ||
}, | ||
{ | ||
"description":"8-bit maximum integer", | ||
"value":127, | ||
"buffer":["0x7f"] | ||
}, | ||
{ | ||
"description":"16-bit integer", | ||
"value":300, | ||
"buffer":["0xac", "0x02"] | ||
}, | ||
{ | ||
"description":"24-bit integer", | ||
"value":100000, | ||
"buffer":["0xa0", "0x8d", "0x06"] | ||
}, | ||
{ | ||
"description":"32-bit integer", | ||
"value":16909060, | ||
"buffer":["0x84", "0x86", "0x88", "0x08"] | ||
} | ||
] | ||
}, | ||
{ | ||
"type":"zigzag32", | ||
"values":[ | ||
{ | ||
"description":"8-bit integer", | ||
"value":1, | ||
"buffer":["0x02"] | ||
} | ||
] | ||
}, | ||
{ | ||
"type":"zigzag64", | ||
"values":[ | ||
{ | ||
"description":"8-bit integer", | ||
"value":1, | ||
"buffer":["0x02"] | ||
} | ||
] | ||
}, | ||
{ | ||
"type":"buffer", | ||
@@ -345,2 +425,55 @@ "subtypes":[ | ||
{ | ||
"type":"bitflags", | ||
"subtypes":[ | ||
{ | ||
"description":"8bit bitset flag array", | ||
"type":[ | ||
"bitflags", | ||
{ "type": "u8", "flags": ["onGround"] } | ||
], | ||
"values":[ | ||
{ | ||
"value":{ | ||
"_value": 1, | ||
"onGround": true | ||
}, | ||
"buffer":["0x01"] | ||
} | ||
] | ||
}, | ||
{ | ||
"description":"8bit bitset flag object", | ||
"type":[ | ||
"bitflags", | ||
{ "type": "u8", "flags": { "onGround": 1 } } | ||
], | ||
"values":[ | ||
{ | ||
"value":{ | ||
"_value": 1, | ||
"onGround": true | ||
}, | ||
"buffer":["0x01"] | ||
} | ||
] | ||
}, | ||
{ | ||
"description":"8bit bitset flag big object", | ||
"type":[ | ||
"bitflags", | ||
{ "type": "u8", "big": true, "flags": { "onGround": 1 } } | ||
], | ||
"values":[ | ||
{ | ||
"value":{ | ||
"_value": 1, | ||
"onGround": true | ||
}, | ||
"buffer":["0x01"] | ||
} | ||
] | ||
} | ||
] | ||
}, | ||
{ | ||
"type":"mapper", | ||
@@ -347,0 +480,0 @@ "subtypes":[ |
@@ -13,3 +13,3 @@ module.exports = { | ||
} | ||
code += 'if (count > 0xffffff) throw new Error("array size is abnormally large, not reading: " + count)\n' | ||
code += 'if (count > 0xffffff && !ctx.noArraySizeCheck) throw new Error("array size is abnormally large, not reading: " + count)\n' | ||
code += 'const data = []\n' | ||
@@ -16,0 +16,0 @@ code += 'let size = countSize\n' |
@@ -61,2 +61,23 @@ module.exports = { | ||
}], | ||
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { | ||
let fstr = JSON.stringify(flags) | ||
if (Array.isArray(flags)) { | ||
fstr = '{' | ||
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') | ||
fstr += '}' | ||
} else if (shift) { | ||
fstr = '{' | ||
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` | ||
fstr += '}' | ||
} | ||
return compiler.wrapCode(` | ||
const { value: _value, size } = ${compiler.callType(type, 'offset')} | ||
const value = { _value } | ||
const flags = ${fstr} | ||
for (const key in flags) { | ||
value[key] = (_value & flags[key]) == flags[key] | ||
} | ||
return { value, size } | ||
`.trim()) | ||
}], | ||
mapper: ['parametrizable', (compiler, mapper) => { | ||
@@ -120,2 +141,22 @@ let code = 'const { value, size } = ' + compiler.callType(mapper.type) + '\n' | ||
}], | ||
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { | ||
let fstr = JSON.stringify(flags) | ||
if (Array.isArray(flags)) { | ||
fstr = '{' | ||
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') | ||
fstr += '}' | ||
} else if (shift) { | ||
fstr = '{' | ||
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` | ||
fstr += '}' | ||
} | ||
return compiler.wrapCode(` | ||
const flags = ${fstr} | ||
let val = value._value ${big ? '|| 0n' : ''} | ||
for (const key in flags) { | ||
if (value[key]) val |= flags[key] | ||
} | ||
return (ctx.${type})(val, buffer, offset) | ||
`.trim()) | ||
}], | ||
mapper: ['parametrizable', (compiler, mapper) => { | ||
@@ -153,2 +194,22 @@ const mappings = JSON.stringify(swapMappings(mapper.mappings)) | ||
}], | ||
bitflags: ['parametrizable', (compiler, { type, flags, shift, big }) => { | ||
let fstr = JSON.stringify(flags) | ||
if (Array.isArray(flags)) { | ||
fstr = '{' | ||
for (const [k, v] of Object.entries(flags)) fstr += `"${v}": ${big ? (1n << BigInt(k)) : (1 << k)}` + (big ? 'n,' : ',') | ||
fstr += '}' | ||
} else if (shift) { | ||
fstr = '{' | ||
for (const key in flags) fstr += `"${key}": ${1 << flags[key]}${big ? 'n,' : ','}` | ||
fstr += '}' | ||
} | ||
return compiler.wrapCode(` | ||
const flags = ${fstr} | ||
let val = value._value ${big ? '|| 0n' : ''} | ||
for (const key in flags) { | ||
if (value[key]) val |= flags[key] | ||
} | ||
return (ctx.${type})(val) | ||
`.trim()) | ||
}], | ||
mapper: ['parametrizable', (compiler, mapper) => { | ||
@@ -155,0 +216,0 @@ const mappings = JSON.stringify(swapMappings(mapper.mappings)) |
const { getCount, sendCount, calcCount, PartialReadError } = require('../utils') | ||
module.exports = { | ||
varint: [readVarInt, writeVarInt, sizeOfVarInt, require('../../ProtoDef/schemas/utils.json').varint], | ||
bool: [readBool, writeBool, 1, require('../../ProtoDef/schemas/utils.json').bool], | ||
@@ -10,4 +9,6 @@ pstring: [readPString, writePString, sizeOfPString, require('../../ProtoDef/schemas/utils.json').pstring], | ||
bitfield: [readBitField, writeBitField, sizeOfBitField, require('../../ProtoDef/schemas/utils.json').bitfield], | ||
bitflags: [readBitflags, writeBitflags, sizeOfBitflags, require('../../ProtoDef/schemas/utils.json').bitflags], | ||
cstring: [readCString, writeCString, sizeOfCString, require('../../ProtoDef/schemas/utils.json').cstring], | ||
mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper] | ||
mapper: [readMapper, writeMapper, sizeOfMapper, require('../../ProtoDef/schemas/utils.json').mapper], | ||
...require('./varint') | ||
} | ||
@@ -62,43 +63,2 @@ | ||
function readVarInt (buffer, offset) { | ||
let result = 0 | ||
let shift = 0 | ||
let cursor = offset | ||
while (true) { | ||
if (cursor + 1 > buffer.length) { throw new PartialReadError() } | ||
const b = buffer.readUInt8(cursor) | ||
result |= ((b & 0x7f) << shift) // Add the bits to our number, except MSB | ||
cursor++ | ||
if (!(b & 0x80)) { // If the MSB is not set, we return the number | ||
return { | ||
value: result, | ||
size: cursor - offset | ||
} | ||
} | ||
shift += 7 // we only have 7 bits, MSB being the return-trigger | ||
if (shift > 64) throw new PartialReadError(`varint is too big: ${shift}`) // Make sure our shift don't overflow. | ||
} | ||
} | ||
function sizeOfVarInt (value) { | ||
let cursor = 0 | ||
while (value & ~0x7F) { | ||
value >>>= 7 | ||
cursor++ | ||
} | ||
return cursor + 1 | ||
} | ||
function writeVarInt (value, buffer, offset) { | ||
let cursor = 0 | ||
while (value & ~0x7F) { | ||
buffer.writeUInt8((value & 0xFF) | 0x80, offset + cursor) | ||
cursor++ | ||
value >>>= 7 | ||
} | ||
buffer.writeUInt8(value, offset + cursor) | ||
return offset + cursor + 1 | ||
} | ||
function readPString (buffer, offset, typeArgs, rootNode) { | ||
@@ -263,1 +223,63 @@ const { size, count } = getCount.call(this, buffer, offset, typeArgs, rootNode) | ||
} | ||
function readBitflags (buffer, offset, { type, flags, shift, big }, rootNode) { | ||
const { size, value } = this.read(buffer, offset, type, rootNode) | ||
let f = {} | ||
if (Array.isArray(flags)) { | ||
for (const [k, v] of Object.entries(flags)) { | ||
f[v] = big ? (1n << BigInt(k)) : (1 << k) | ||
} | ||
} else if (shift) { | ||
for (const k in flags) { | ||
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) | ||
} | ||
} else { | ||
f = flags | ||
} | ||
const result = { _value: value } | ||
for (const key in f) { | ||
result[key] = (value & f[key]) === f[key] | ||
} | ||
return { value: result, size } | ||
} | ||
function writeBitflags (value, buffer, offset, { type, flags, shift, big }, rootNode) { | ||
let f = {} | ||
if (Array.isArray(flags)) { | ||
for (const [k, v] of Object.entries(flags)) { | ||
f[v] = big ? (1n << BigInt(k)) : (1 << k) | ||
} | ||
} else if (shift) { | ||
for (const k in flags) { | ||
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) | ||
} | ||
} else { | ||
f = flags | ||
} | ||
let val = value._value || (big ? 0n : 0) | ||
for (const key in f) { | ||
if (value[key]) val |= f[key] | ||
} | ||
return this.write(val, buffer, offset, type, rootNode) | ||
} | ||
function sizeOfBitflags (value, { type, flags, shift, big }, rootNode) { | ||
if (!value) throw new Error('Missing field') | ||
let f = {} | ||
if (Array.isArray(flags)) { | ||
for (const [k, v] of Object.entries(flags)) { | ||
f[v] = big ? (1n << BigInt(k)) : (1 << k) | ||
} | ||
} else if (shift) { | ||
for (const k in flags) { | ||
f[k] = big ? (1n << BigInt(flags[k])) : (1 << flags[k]) | ||
} | ||
} else { | ||
f = flags | ||
} | ||
let mappedValue = value._value || (big ? 0n : 0) | ||
for (const key in f) { | ||
if (value[key]) mappedValue |= f[key] | ||
} | ||
return this.sizeOf(mappedValue, type, rootNode) | ||
} |
@@ -16,3 +16,8 @@ /* eslint-env mocha */ | ||
const actualResult = proto.parsePacketBuffer(type, buffer) | ||
if (value === null) { assert.ok(actualResult.data === undefined) } else { expect(actualResult.data).to.deep.equal(value) } | ||
if (typeof actualResult.data === 'bigint') value = BigInt(value) | ||
if (value === null) { | ||
assert.ok(actualResult.data === undefined) | ||
} else { | ||
expect(actualResult.data).to.deep.equal(value) | ||
} | ||
expect(actualResult.metadata.size).to.deep.equal(buffer.length) | ||
@@ -19,0 +24,0 @@ }) |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
189601
66
5125