protobuf-codec
Advanced tools
Comparing version 1.0.3 to 1.0.4
import { decoders, tag } from './wire-types.js' | ||
import assert from 'nanoassert' | ||
export default function* reader( | ||
export default function * reader ( | ||
buf, | ||
byteOffset = 0, | ||
byteLength = buf.byteLength) { | ||
byteLength = buf.byteLength | ||
) { | ||
let o = byteOffset | ||
@@ -9,0 +10,0 @@ const end = byteOffset + byteLength |
@@ -0,1 +1,73 @@ | ||
import assert from 'nanoassert' | ||
export const wireTypes = { | ||
VARINT: 0, | ||
BYTES: 2, | ||
FIXED64: 1, | ||
FIXED32: 5 | ||
} | ||
export const varint = { | ||
encode ( | ||
int, | ||
buf = alloc(this, int), | ||
byteOffset = 0 | ||
) { | ||
assert(int <= varint.MAX_VALUE, 'int exceeds MAX_VALUE') | ||
let o = byteOffset | ||
let n = BigInt(int) | ||
while (n >= 128n) { | ||
buf[o++] = Number((n & 0xffn) | 0b1000_0000n) | ||
n >>= 7n | ||
} | ||
buf[o++] = Number(n) | ||
this.encode.bytes = o - byteOffset | ||
return buf.subarray(byteOffset, o) | ||
}, | ||
encodeOversize (int, len, buf, byteOffset = 0) { | ||
assert(int <= varint.MAX_VALUE, 'int exceeds MAX_VALUE') | ||
assert(len >= this.encodingLength(int), 'len does not fit int') | ||
assert(buf.byteLength - byteOffset >= len, 'buf does not fit len') | ||
let o = byteOffset | ||
const end = byteOffset + len - 1 | ||
let n = BigInt(int) | ||
while (o < end) { | ||
buf[o++] = Number((n & 0xffn) | 0b1000_0000n) | ||
n >>= 7n | ||
} | ||
buf[o++] = Number(n) | ||
this.encodeOversize.bytes = o - byteOffset | ||
return buf.subarray(byteOffset, o) | ||
}, | ||
encodingLength (int) { | ||
assert(int <= varint.MAX_VALUE, 'int exceeds MAX_VALUE') | ||
if (int <= 0xffff_ffff) return (9 * (32 - Math.clz32(Number(int))) + 64) / 64 | 0 | ||
const high = Number(BigInt(int) >> 32n) | ||
return (9 * (32 - Math.clz32(high) + 32) + 64) / 64 | 0 | ||
}, | ||
MAX_VALUE: (1n << 64n) - 1n | ||
} | ||
export const bytes = { | ||
encode (src, buf = alloc(this, src), byteOffset = 0) { | ||
let o = byteOffset | ||
varint.encode(src.byteLength, buf, o) | ||
o += varint.encode.bytes | ||
buf.set(src, o) | ||
o += src.byteLength | ||
this.encode.bytes = o - byteOffset | ||
return buf.subarray(byteOffset, o) | ||
}, | ||
encodingLength (src) { | ||
return varint.encodingLength(src.byteLength) + src.byteLength | ||
} | ||
} | ||
export const tag = { | ||
@@ -25,3 +97,3 @@ encode ( | ||
export const string = { | ||
encode(str, buf = alloc(this, str), byteOffset = 0) { | ||
encode (str, buf = alloc(this, str), byteOffset = 0) { | ||
assert(typeof str === 'string') | ||
@@ -150,3 +222,3 @@ const src = utf8.decode(str) | ||
export const enumerable = { | ||
encode(en, buf = alloc(this, en), byteOffset = 0) { | ||
encode (en, buf = alloc(this, en), byteOffset = 0) { | ||
assert(en <= enumerable.MAX_VALUE, 'enum value exceeds MAX_VALUE') | ||
@@ -153,0 +225,0 @@ varint.encode(en, buf, byteOffset) |
@@ -1,2 +0,2 @@ | ||
import { varint, tag, bytes, uint64, uint32, wireTypes } from './wire-types.js' | ||
import { varint, tag, uint64, bytes, uint32, wireTypes } from './wire-types.js' | ||
@@ -18,3 +18,3 @@ const PAGE_SIZE = 256 | ||
*/ | ||
get pages() { | ||
get pages () { | ||
return this.buf.length | ||
@@ -30,3 +30,3 @@ } | ||
*/ | ||
alloc(bytes) { | ||
alloc (bytes) { | ||
const tail = this.buf.at(-1) | ||
@@ -49,7 +49,6 @@ if (tail.byteLength - this.offset >= bytes) { | ||
*/ | ||
_trim() { | ||
_trim () { | ||
if (this.offset === 0) { | ||
this.buf.pop() // remove the item that is unused | ||
} | ||
else this.buf.push(this.buf.pop().subarray(0, this.offset)) | ||
} else this.buf.push(this.buf.pop().subarray(0, this.offset)) | ||
} | ||
@@ -64,6 +63,6 @@ | ||
*/ | ||
append(...bufs) { | ||
append (...bufs) { | ||
this._trim() | ||
this.buf.push(...bufs) | ||
let tail = this.buf.at(-1) | ||
const tail = this.buf.at(-1) | ||
this.offset = tail.byteLength | ||
@@ -81,3 +80,3 @@ | ||
*/ | ||
varint(fieldNumber, value, codec = varint) { | ||
varint (fieldNumber, value, codec = varint) { | ||
if (!value) return | ||
@@ -94,3 +93,3 @@ | ||
bytes(fieldNumber, value, codec = bytes) { | ||
bytes (fieldNumber, value, codec = bytes) { | ||
if (!value) return | ||
@@ -107,3 +106,3 @@ | ||
fixed64(fieldNumber, value, codec = uint64) { | ||
fixed64 (fieldNumber, value, codec = uint64) { | ||
if (!value) return | ||
@@ -119,3 +118,3 @@ const buf = this.alloc( | ||
fixed32(fieldNumber, value, codec = uint32) { | ||
fixed32 (fieldNumber, value, codec = uint32) { | ||
if (!value) return | ||
@@ -151,5 +150,5 @@ const buf = this.alloc( | ||
*/ | ||
concat(buf, byteOffset = 0) { | ||
concat (buf, byteOffset = 0) { | ||
this._trim() | ||
let size = this.encodingLength() | ||
const size = this.encodingLength() | ||
@@ -167,5 +166,1 @@ if (buf == null) buf = new Uint8Array(size) | ||
} | ||
function _view (bytes) { | ||
return new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength) | ||
} |
{ | ||
"name": "protobuf-codec", | ||
"version": "1.0.3", | ||
"version": "1.0.4", | ||
"description": "Minimal Protocol Buffers wire encoding/decoding", | ||
@@ -11,3 +11,4 @@ "type": "module", | ||
"scripts": { | ||
"test": "tape 'test/**/*.mjs'" | ||
"test": "tape 'test/**/*.mjs'", | ||
"posttest": "standard ." | ||
}, | ||
@@ -34,2 +35,3 @@ "repository": { | ||
"devDependencies": { | ||
"standard": "^17.0.0", | ||
"tape": "^5.5.3" | ||
@@ -36,0 +38,0 @@ }, |
@@ -1,2 +0,2 @@ | ||
import Writer from "../encode/writer.mjs" | ||
import Writer from '../encode/writer.js' | ||
import test from 'tape' | ||
@@ -3,0 +3,0 @@ |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
20413
494
0
2