@solana/codecs-data-structures
Advanced tools
Comparing version 2.0.0-experimental.f7d1af1 to 2.0.0-experimental.fb88a79
@@ -1,2 +0,2 @@ | ||
import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core'; | ||
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core'; | ||
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers'; | ||
@@ -6,3 +6,8 @@ | ||
// src/utils.ts | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
function maxCodecSizes(sizes) { | ||
@@ -17,5 +22,69 @@ return sizes.reduce( | ||
} | ||
function getFixedSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : null; | ||
} | ||
function getMaxSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null; | ||
} | ||
// src/array-like-codec-size.ts | ||
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) { | ||
// src/array.ts | ||
function getArrayEncoder(item, config = {}) { | ||
const size = config.size ?? getU32Encoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item)); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (array) => { | ||
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0; | ||
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0); | ||
}, | ||
maxSize | ||
}, | ||
write: (array, bytes, offset) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, array.length); | ||
} | ||
if (typeof size === "object") { | ||
offset = size.write(array.length, bytes, offset); | ||
} | ||
array.forEach((value) => { | ||
offset = item.write(value, bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}); | ||
} | ||
function getArrayDecoder(item, config = {}) { | ||
const size = config.size ?? getU32Decoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const itemSize = getFixedSize(item); | ||
const fixedSize = computeArrayLikeCodecSize(size, itemSize); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize }, | ||
read: (bytes, offset) => { | ||
const array = []; | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [array, offset]; | ||
} | ||
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.read(bytes, offset); | ||
offset = newOffset2; | ||
array.push(value); | ||
} | ||
return [array, offset]; | ||
} | ||
}); | ||
} | ||
function getArrayCodec(item, config = {}) { | ||
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config)); | ||
} | ||
function readArrayLikeCodecSize(size, itemSize, bytes, offset) { | ||
if (typeof size === "number") { | ||
@@ -25,23 +94,19 @@ return [size, offset]; | ||
if (typeof size === "object") { | ||
return size.decode(bytes, offset); | ||
return size.read(bytes, offset); | ||
} | ||
if (size === "remainder") { | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
if (childrenSize === null) { | ||
if (itemSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const remainder = bytes.slice(offset).length; | ||
if (remainder % childrenSize !== 0) { | ||
const remainder = Math.max(0, bytes.length - offset); | ||
if (remainder % itemSize !== 0) { | ||
throw new Error( | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.` | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.` | ||
); | ||
} | ||
return [remainder / childrenSize, offset]; | ||
return [remainder / itemSize, offset]; | ||
} | ||
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`); | ||
} | ||
function getArrayLikeCodecSizeDescription(size) { | ||
return typeof size === "object" ? size.description : `${size}`; | ||
} | ||
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) { | ||
function computeArrayLikeCodecSize(size, itemSize) { | ||
if (typeof size !== "number") | ||
@@ -51,70 +116,11 @@ return null; | ||
return 0; | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
return childrenSize === null ? null : childrenSize * size; | ||
return itemSize === null ? null : itemSize * size; | ||
} | ||
function getArrayLikeCodecSizePrefix(size, realSize) { | ||
return typeof size === "object" ? size.encode(realSize) : new Uint8Array(); | ||
} | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
// src/array.ts | ||
function arrayCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getArrayEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
encode: (value) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, value.length); | ||
} | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]); | ||
} | ||
}; | ||
} | ||
function getArrayDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [[], offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
const values = []; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
values.push(value); | ||
offset = newOffset2; | ||
} | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getArrayCodec(item, options = {}) { | ||
return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options)); | ||
} | ||
var getBitArrayEncoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
encode(value) { | ||
const bytes = []; | ||
function getBitArrayEncoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createEncoder({ | ||
fixedSize: size, | ||
write(value, bytes, offset) { | ||
const bytesToAdd = []; | ||
for (let i = 0; i < size; i += 1) { | ||
@@ -127,19 +133,18 @@ let byte = 0; | ||
if (backward) { | ||
bytes.unshift(byte); | ||
bytesToAdd.unshift(byte); | ||
} else { | ||
bytes.push(byte); | ||
bytesToAdd.push(byte); | ||
} | ||
} | ||
return new Uint8Array(bytes); | ||
}, | ||
bytes.set(bytesToAdd, offset); | ||
return size; | ||
} | ||
}); | ||
} | ||
function getBitArrayDecoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createDecoder({ | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayDecoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
decode(bytes, offset = 0) { | ||
read(bytes, offset) { | ||
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset); | ||
@@ -161,46 +166,30 @@ const booleans = []; | ||
return [booleans, offset + size]; | ||
}, | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayCodec = (size, options = {}) => combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options)); | ||
function getBooleanEncoder(options = {}) { | ||
const size = options.size ?? getU8Encoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
description: options.description ?? `bool(${size.description})`, | ||
encode: (value) => size.encode(value ? 1 : 0), | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
} | ||
}); | ||
} | ||
function getBooleanDecoder(options = {}) { | ||
const size = options.size ?? getU8Decoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset); | ||
const [value, vOffset] = size.decode(bytes, offset); | ||
return [value === 1, vOffset]; | ||
}, | ||
description: options.description ?? `bool(${size.description})`, | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
function getBitArrayCodec(size, config = {}) { | ||
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config)); | ||
} | ||
function getBooleanCodec(options = {}) { | ||
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options)); | ||
function getBooleanEncoder(config = {}) { | ||
const size = config.size ?? getU8Encoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapEncoder(size, (value) => value ? 1 : 0); | ||
} | ||
function getBytesEncoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteEncoder = { | ||
description, | ||
encode: (value) => value, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
function getBooleanDecoder(config = {}) { | ||
const size = config.size ?? getU8Decoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapDecoder(size, (value) => Number(value) === 1); | ||
} | ||
function getBooleanCodec(config = {}) { | ||
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config)); | ||
} | ||
function getBytesEncoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteEncoder = createEncoder({ | ||
getSizeFromValue: (value) => value.length, | ||
write: (value, bytes, offset) => { | ||
bytes.set(value, offset); | ||
return offset + value.length; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -210,26 +199,20 @@ return byteEncoder; | ||
if (typeof size === "number") { | ||
return fixEncoder(byteEncoder, size, description); | ||
return fixEncoder(byteEncoder, size); | ||
} | ||
return { | ||
...byteEncoder, | ||
encode: (value) => { | ||
const contentBytes = byteEncoder.encode(value); | ||
const lengthBytes = size.encode(contentBytes.length); | ||
return mergeBytes([lengthBytes, contentBytes]); | ||
return createEncoder({ | ||
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length, | ||
write: (value, bytes, offset) => { | ||
offset = size.write(value.length, bytes, offset); | ||
return byteEncoder.write(value, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesDecoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteDecoder = { | ||
decode: (bytes, offset = 0) => { | ||
function getBytesDecoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteDecoder = createDecoder({ | ||
read: (bytes, offset) => { | ||
const slice = bytes.slice(offset); | ||
return [slice, offset + slice.length]; | ||
}, | ||
description, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -239,9 +222,8 @@ return byteDecoder; | ||
if (typeof size === "number") { | ||
return fixDecoder(byteDecoder, size, description); | ||
return fixDecoder(byteDecoder, size); | ||
} | ||
return { | ||
...byteDecoder, | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.read(bytes, offset); | ||
const length = Number(lengthBigInt); | ||
@@ -251,47 +233,39 @@ offset = lengthOffset; | ||
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes); | ||
const [value, contentOffset] = byteDecoder.decode(contentBytes); | ||
const [value, contentOffset] = byteDecoder.read(contentBytes, 0); | ||
offset += contentOffset; | ||
return [value, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesCodec(options = {}) { | ||
return combineCodec(getBytesEncoder(options), getBytesDecoder(options)); | ||
function getBytesCodec(config = {}) { | ||
return combineCodec(getBytesEncoder(config), getBytesDecoder(config)); | ||
} | ||
function dataEnumCodecHelper(variants, prefix, description) { | ||
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", "); | ||
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize); | ||
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null; | ||
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize)); | ||
return { | ||
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`, | ||
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]), | ||
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize]) | ||
}; | ||
} | ||
function getDataEnumEncoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
encode: (variant) => { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
const variantPrefix = prefix.encode(discriminator); | ||
const variantSerializer = variants[discriminator][1]; | ||
const variantBytes = variantSerializer.encode(variant); | ||
return mergeBytes([variantPrefix, variantBytes]); | ||
function getDataEnumEncoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (variant) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
const variantEncoder = variants[discriminator][1]; | ||
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder); | ||
}, | ||
maxSize: getDataEnumMaxSize(variants, prefix) | ||
}, | ||
write: (variant, bytes, offset) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
offset = prefix.write(discriminator, bytes, offset); | ||
const variantEncoder = variants[discriminator][1]; | ||
return variantEncoder.write(variant, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumDecoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getDataEnumDecoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) }, | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset); | ||
const [discriminator, dOffset] = prefix.decode(bytes, offset); | ||
const [discriminator, dOffset] = prefix.read(bytes, offset); | ||
offset = dOffset; | ||
@@ -304,121 +278,192 @@ const variantField = variants[Number(discriminator)] ?? null; | ||
} | ||
const [variant, vOffset] = variantField[1].decode(bytes, offset); | ||
const [variant, vOffset] = variantField[1].read(bytes, offset); | ||
offset = vOffset; | ||
return [{ __kind: variantField[0], ...variant ?? {} }, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumCodec(variants, options = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options)); | ||
function getDataEnumCodec(variants, config = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config)); | ||
} | ||
function mapCodecHelper(key, value, size, description) { | ||
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
function getDataEnumFixedSize(variants, prefix) { | ||
if (variants.length === 0) | ||
return isFixedSize(prefix) ? prefix.fixedSize : null; | ||
if (!isFixedSize(variants[0][1])) | ||
return null; | ||
const variantSize = variants[0][1].fixedSize; | ||
const sameSizedVariants = variants.every( | ||
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize | ||
); | ||
if (!sameSizedVariants) | ||
return null; | ||
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null; | ||
} | ||
function getDataEnumMaxSize(variants, prefix) { | ||
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec))); | ||
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0; | ||
} | ||
function getVariantDiscriminator(variants, variant) { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
return { | ||
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize]) | ||
}; | ||
return discriminator; | ||
} | ||
function getMapEncoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
encode: (map) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("map", size, map.size); | ||
} | ||
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)])); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]); | ||
function getTupleEncoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (value, bytes, offset) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
items.forEach((item, index) => { | ||
offset = item.write(value[index], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapDecoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const map = /* @__PURE__ */ new Map(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [map, offset]; | ||
function getTupleDecoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const values = []; | ||
items.forEach((item) => { | ||
const [newValue, newOffset] = item.read(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}); | ||
} | ||
function getTupleCodec(items) { | ||
return combineCodec( | ||
getTupleEncoder(items), | ||
getTupleDecoder(items) | ||
); | ||
} | ||
// src/map.ts | ||
function getMapEncoder(key, value, config = {}) { | ||
return mapEncoder( | ||
getArrayEncoder(getTupleEncoder([key, value]), config), | ||
(map) => [...map.entries()] | ||
); | ||
} | ||
function getMapDecoder(key, value, config = {}) { | ||
return mapDecoder( | ||
getArrayDecoder(getTupleDecoder([key, value]), config), | ||
(entries) => new Map(entries) | ||
); | ||
} | ||
function getMapCodec(key, value, config = {}) { | ||
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config)); | ||
} | ||
function getNullableEncoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Encoder(); | ||
const fixed = config.fixed ?? false; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
const fixedSize = prefix.fixedSize + item.fixedSize; | ||
return createEncoder({ | ||
fixedSize, | ||
write: (option, bytes, offset) => { | ||
const prefixOffset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
item.write(option, bytes, prefixOffset); | ||
} | ||
return offset + fixedSize; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize( | ||
size, | ||
[key.fixedSize, value.fixedSize], | ||
bytes, | ||
offset | ||
); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [decodedKey, kOffset] = key.decode(bytes, offset); | ||
offset = kOffset; | ||
const [decodedValue, vOffset] = value.decode(bytes, offset); | ||
offset = vOffset; | ||
map.set(decodedKey, decodedValue); | ||
}); | ||
} | ||
return createEncoder({ | ||
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0), | ||
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0, | ||
write: (option, bytes, offset) => { | ||
offset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
offset = item.write(option, bytes, offset); | ||
} | ||
return [map, offset]; | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapCodec(key, value, options = {}) { | ||
return combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options)); | ||
} | ||
function nullableCodecHelper(item, prefix, fixed, description) { | ||
let descriptionSuffix = `; ${prefix.description}`; | ||
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null; | ||
if (fixed) { | ||
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
descriptionSuffix += "; fixed"; | ||
function getNullableDecoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Decoder(); | ||
const fixed = config.fixed ?? false; | ||
let fixedSize = null; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
fixedSize = prefix.fixedSize + item.fixedSize; | ||
} | ||
return { | ||
description: description ?? `nullable(${item.description + descriptionSuffix})`, | ||
fixedSize, | ||
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize]) | ||
}; | ||
} | ||
function getNullableEncoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Encoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
encode: (option) => { | ||
const prefixByte = prefix.encode(Number(option !== null)); | ||
let itemBytes = option !== null ? item.encode(option) : new Uint8Array(); | ||
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes; | ||
return mergeBytes([prefixByte, itemBytes]); | ||
} | ||
}; | ||
} | ||
function getNullableDecoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Decoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
if (bytes.length - offset <= 0) { | ||
return [null, offset]; | ||
} | ||
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0); | ||
const [isSome, prefixOffset] = prefix.decode(bytes, offset); | ||
offset = prefixOffset; | ||
const [isSome, prefixOffset] = prefix.read(bytes, offset); | ||
if (isSome === 0) { | ||
return [null, fixed ? fixedOffset : offset]; | ||
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset]; | ||
} | ||
const [value, newOffset] = item.decode(bytes, offset); | ||
offset = newOffset; | ||
return [value, fixed ? fixedOffset : offset]; | ||
const [value, newOffset] = item.read(bytes, prefixOffset); | ||
return [value, fixedSize !== null ? offset + fixedSize : newOffset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getNullableCodec(item, options = {}) { | ||
return combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options)); | ||
function getNullableCodec(item, config = {}) { | ||
const configCast = config; | ||
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast)); | ||
} | ||
function scalarEnumCoderHelper(constructor, prefix, description) { | ||
function getScalarEnumEncoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor); | ||
return mapEncoder(prefix, (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return value; | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return valueIndex; | ||
return enumKeys.indexOf(value); | ||
}); | ||
} | ||
function getScalarEnumDecoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor); | ||
return mapDecoder(prefix, (value) => { | ||
const valueAsNumber = Number(value); | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber]; | ||
}); | ||
} | ||
function getScalarEnumCodec(constructor, config = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config)); | ||
} | ||
function getScalarEnumStats(constructor) { | ||
const enumKeys = Object.keys(constructor); | ||
const enumValues = Object.values(constructor); | ||
const isNumericEnum = enumValues.some((v) => typeof v === "number"); | ||
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", "); | ||
const minRange = 0; | ||
@@ -428,9 +473,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1; | ||
return { | ||
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`, | ||
enumKeys, | ||
enumValues, | ||
fixedSize: prefix.fixedSize, | ||
isNumericEnum, | ||
maxRange, | ||
maxSize: prefix.maxSize, | ||
minRange, | ||
@@ -440,124 +482,38 @@ stringValues | ||
} | ||
function getScalarEnumEncoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description); | ||
return { | ||
description, | ||
encode: (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return prefix.encode(value); | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return prefix.encode(valueIndex); | ||
return prefix.encode(enumKeys.indexOf(value)); | ||
}, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetEncoder(item, config = {}) { | ||
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]); | ||
} | ||
function getScalarEnumDecoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper( | ||
constructor, | ||
prefix, | ||
options.description | ||
); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset); | ||
const [value, newOffset] = prefix.decode(bytes, offset); | ||
const valueAsNumber = Number(value); | ||
offset = newOffset; | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset]; | ||
}, | ||
description, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetDecoder(item, config = {}) { | ||
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries)); | ||
} | ||
function getScalarEnumCodec(constructor, options = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options)); | ||
function getSetCodec(item, config = {}) { | ||
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config)); | ||
} | ||
function setCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getSetEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
encode: (set) => { | ||
if (typeof size === "number" && set.size !== size) { | ||
assertValidNumberOfItemsForCodec("set", size, set.size); | ||
} | ||
const itemBytes = Array.from(set, (value) => item.encode(value)); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]); | ||
function getStructEncoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (struct, bytes, offset) => { | ||
fields.forEach(([key, codec]) => { | ||
offset = codec.write(struct[key], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getSetDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const set = /* @__PURE__ */ new Set(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [set, offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
offset = newOffset2; | ||
set.add(value); | ||
} | ||
return [set, offset]; | ||
} | ||
}; | ||
} | ||
function getSetCodec(item, options = {}) { | ||
return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options)); | ||
} | ||
function structCodecHelper(fields, description) { | ||
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", "); | ||
return { | ||
description: description ?? `struct(${fieldDescriptions})`, | ||
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)), | ||
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize)) | ||
}; | ||
} | ||
function getStructEncoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
encode: (struct) => { | ||
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key])); | ||
return mergeBytes(fieldBytes); | ||
} | ||
}; | ||
} | ||
function getStructDecoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getStructDecoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const struct = {}; | ||
fields.forEach(([key, codec]) => { | ||
const [value, newOffset] = codec.decode(bytes, offset); | ||
const [value, newOffset] = codec.read(bytes, offset); | ||
offset = newOffset; | ||
@@ -568,66 +524,25 @@ struct[key] = value; | ||
} | ||
}; | ||
}); | ||
} | ||
function getStructCodec(fields, options = {}) { | ||
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options)); | ||
function getStructCodec(fields) { | ||
return combineCodec(getStructEncoder(fields), getStructDecoder(fields)); | ||
} | ||
function tupleCodecHelper(items, description) { | ||
const itemDescriptions = items.map((item) => item.description).join(", "); | ||
return { | ||
description: description ?? `tuple(${itemDescriptions})`, | ||
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)), | ||
maxSize: sumCodecSizes(items.map((item) => item.maxSize)) | ||
}; | ||
} | ||
function getTupleEncoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
encode: (value) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
return mergeBytes(items.map((item, index) => item.encode(value[index]))); | ||
} | ||
}; | ||
} | ||
function getTupleDecoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const values = []; | ||
items.forEach((codec) => { | ||
const [newValue, newOffset] = codec.decode(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getTupleCodec(items, options = {}) { | ||
return combineCodec( | ||
getTupleEncoder(items, options), | ||
getTupleDecoder(items, options) | ||
); | ||
} | ||
function getUnitEncoder(options = {}) { | ||
return { | ||
description: options.description ?? "unit", | ||
encode: () => new Uint8Array(), | ||
function getUnitEncoder() { | ||
return createEncoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
write: (_value, _bytes, offset) => offset | ||
}); | ||
} | ||
function getUnitDecoder(options = {}) { | ||
return { | ||
decode: (_bytes, offset = 0) => [void 0, offset], | ||
description: options.description ?? "unit", | ||
function getUnitDecoder() { | ||
return createDecoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
read: (_bytes, offset) => [void 0, offset] | ||
}); | ||
} | ||
function getUnitCodec(options = {}) { | ||
return combineCodec(getUnitEncoder(options), getUnitDecoder(options)); | ||
function getUnitCodec() { | ||
return combineCodec(getUnitEncoder(), getUnitDecoder()); | ||
} | ||
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; | ||
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; | ||
//# sourceMappingURL=out.js.map | ||
//# sourceMappingURL=index.browser.js.map |
@@ -17,24 +17,2 @@ this.globalThis = this.globalThis || {}; | ||
} | ||
function assertFixedSizeCodec(data, message) { | ||
if (data.fixedSize === null) { | ||
throw new Error(message ?? "Expected a fixed-size codec, got a variable-size one."); | ||
} | ||
} | ||
var mergeBytes = (byteArrays) => { | ||
const nonEmptyByteArrays = byteArrays.filter((arr) => arr.length); | ||
if (nonEmptyByteArrays.length === 0) { | ||
return byteArrays.length ? byteArrays[0] : new Uint8Array(); | ||
} | ||
if (nonEmptyByteArrays.length === 1) { | ||
return nonEmptyByteArrays[0]; | ||
} | ||
const totalLength = nonEmptyByteArrays.reduce((total, arr) => total + arr.length, 0); | ||
const result = new Uint8Array(totalLength); | ||
let offset = 0; | ||
nonEmptyByteArrays.forEach((arr) => { | ||
result.set(arr, offset); | ||
offset += arr.length; | ||
}); | ||
return result; | ||
}; | ||
var padBytes = (bytes, length) => { | ||
@@ -48,4 +26,37 @@ if (bytes.length >= length) | ||
var fixBytes = (bytes, length) => padBytes(bytes.length <= length ? bytes : bytes.slice(0, length), length); | ||
function combineCodec(encoder, decoder, description) { | ||
if (encoder.fixedSize !== decoder.fixedSize) { | ||
function getEncodedSize(value, encoder) { | ||
return "fixedSize" in encoder ? encoder.fixedSize : encoder.getSizeFromValue(value); | ||
} | ||
function createEncoder(encoder) { | ||
return Object.freeze({ | ||
...encoder, | ||
encode: (value) => { | ||
const bytes = new Uint8Array(getEncodedSize(value, encoder)); | ||
encoder.write(value, bytes, 0); | ||
return bytes; | ||
} | ||
}); | ||
} | ||
function createDecoder(decoder) { | ||
return Object.freeze({ | ||
...decoder, | ||
decode: (bytes, offset = 0) => decoder.read(bytes, offset)[0] | ||
}); | ||
} | ||
function isFixedSize(codec) { | ||
return "fixedSize" in codec && typeof codec.fixedSize === "number"; | ||
} | ||
function assertIsFixedSize(codec, message) { | ||
if (!isFixedSize(codec)) { | ||
throw new Error(message != null ? message : "Expected a fixed-size codec, got a variable-size one."); | ||
} | ||
} | ||
function isVariableSize(codec) { | ||
return !isFixedSize(codec); | ||
} | ||
function combineCodec(encoder, decoder) { | ||
if (isFixedSize(encoder) !== isFixedSize(decoder)) { | ||
throw new Error(`Encoder and decoder must either both be fixed-size or variable-size.`); | ||
} | ||
if (isFixedSize(encoder) && isFixedSize(decoder) && encoder.fixedSize !== decoder.fixedSize) { | ||
throw new Error( | ||
@@ -55,3 +66,3 @@ `Encoder and decoder must have the same fixed size, got [${encoder.fixedSize}] and [${decoder.fixedSize}].` | ||
} | ||
if (encoder.maxSize !== decoder.maxSize) { | ||
if (!isFixedSize(encoder) && !isFixedSize(decoder) && encoder.maxSize !== decoder.maxSize) { | ||
throw new Error( | ||
@@ -61,32 +72,26 @@ `Encoder and decoder must have the same max size, got [${encoder.maxSize}] and [${decoder.maxSize}].` | ||
} | ||
if (description === void 0 && encoder.description !== decoder.description) { | ||
throw new Error( | ||
`Encoder and decoder must have the same description, got [${encoder.description}] and [${decoder.description}]. Pass a custom description as a third argument if you want to override the description and bypass this error.` | ||
); | ||
} | ||
return { | ||
...decoder, | ||
...encoder, | ||
decode: decoder.decode, | ||
description: description ?? encoder.description, | ||
encode: encoder.encode, | ||
fixedSize: encoder.fixedSize, | ||
maxSize: encoder.maxSize | ||
read: decoder.read, | ||
write: encoder.write | ||
}; | ||
} | ||
function fixCodecHelper(data, fixedBytes, description) { | ||
return { | ||
description: description ?? `fixed(${fixedBytes}, ${data.description})`, | ||
function fixEncoder(encoder, fixedBytes) { | ||
return createEncoder({ | ||
fixedSize: fixedBytes, | ||
maxSize: fixedBytes | ||
}; | ||
write: (value, bytes, offset) => { | ||
const variableByteArray = encoder.encode(value); | ||
const fixedByteArray = variableByteArray.length > fixedBytes ? variableByteArray.slice(0, fixedBytes) : variableByteArray; | ||
bytes.set(fixedByteArray, offset); | ||
return offset + fixedBytes; | ||
} | ||
}); | ||
} | ||
function fixEncoder(encoder, fixedBytes, description) { | ||
return { | ||
...fixCodecHelper(encoder, fixedBytes, description), | ||
encode: (value) => fixBytes(encoder.encode(value), fixedBytes) | ||
}; | ||
} | ||
function fixDecoder(decoder, fixedBytes, description) { | ||
return { | ||
...fixCodecHelper(decoder, fixedBytes, description), | ||
decode: (bytes, offset = 0) => { | ||
function fixDecoder(decoder, fixedBytes) { | ||
return createDecoder({ | ||
fixedSize: fixedBytes, | ||
read: (bytes, offset) => { | ||
assertByteArrayHasEnoughBytesForCodec("fixCodec", fixedBytes, bytes, offset); | ||
@@ -96,10 +101,25 @@ if (offset > 0 || bytes.length > fixedBytes) { | ||
} | ||
if (decoder.fixedSize !== null) { | ||
if (isFixedSize(decoder)) { | ||
bytes = fixBytes(bytes, decoder.fixedSize); | ||
} | ||
const [value] = decoder.decode(bytes, 0); | ||
const [value] = decoder.read(bytes, 0); | ||
return [value, offset + fixedBytes]; | ||
} | ||
}; | ||
}); | ||
} | ||
function mapEncoder(encoder, unmap) { | ||
return createEncoder({ | ||
...isVariableSize(encoder) ? { ...encoder, getSizeFromValue: (value) => encoder.getSizeFromValue(unmap(value)) } : encoder, | ||
write: (value, bytes, offset) => encoder.write(unmap(value), bytes, offset) | ||
}); | ||
} | ||
function mapDecoder(decoder, map) { | ||
return createDecoder({ | ||
...decoder, | ||
read: (bytes, offset) => { | ||
const [value, newOffset] = decoder.read(bytes, offset); | ||
return [map(value, bytes, offset), newOffset]; | ||
} | ||
}); | ||
} | ||
@@ -114,21 +134,9 @@ // ../codecs-numbers/dist/index.browser.js | ||
} | ||
function sharedNumberFactory(input) { | ||
let littleEndian; | ||
let defaultDescription = input.name; | ||
if (input.size > 1) { | ||
littleEndian = !("endian" in input.options) || input.options.endian === 0; | ||
defaultDescription += littleEndian ? "(le)" : "(be)"; | ||
} | ||
return { | ||
description: input.options.description ?? defaultDescription, | ||
fixedSize: input.size, | ||
littleEndian, | ||
maxSize: input.size | ||
}; | ||
function isLittleEndian(config) { | ||
return (config == null ? void 0 : config.endian) === 1 ? false : true; | ||
} | ||
function numberEncoderFactory(input) { | ||
const codecData = sharedNumberFactory(input); | ||
return { | ||
description: codecData.description, | ||
encode(value) { | ||
return createEncoder({ | ||
fixedSize: input.size, | ||
write(value, bytes, offset) { | ||
if (input.range) { | ||
@@ -138,31 +146,27 @@ assertNumberIsBetweenForCodec(input.name, input.range[0], input.range[1], value); | ||
const arrayBuffer = new ArrayBuffer(input.size); | ||
input.set(new DataView(arrayBuffer), value, codecData.littleEndian); | ||
return new Uint8Array(arrayBuffer); | ||
}, | ||
fixedSize: codecData.fixedSize, | ||
maxSize: codecData.maxSize | ||
}; | ||
input.set(new DataView(arrayBuffer), value, isLittleEndian(input.config)); | ||
bytes.set(new Uint8Array(arrayBuffer), offset); | ||
return offset + input.size; | ||
} | ||
}); | ||
} | ||
function numberDecoderFactory(input) { | ||
const codecData = sharedNumberFactory(input); | ||
return { | ||
decode(bytes, offset = 0) { | ||
assertByteArrayIsNotEmptyForCodec(codecData.description, bytes, offset); | ||
assertByteArrayHasEnoughBytesForCodec(codecData.description, input.size, bytes, offset); | ||
return createDecoder({ | ||
fixedSize: input.size, | ||
read(bytes, offset = 0) { | ||
assertByteArrayIsNotEmptyForCodec(input.name, bytes, offset); | ||
assertByteArrayHasEnoughBytesForCodec(input.name, input.size, bytes, offset); | ||
const view = new DataView(toArrayBuffer(bytes, offset, input.size)); | ||
return [input.get(view, codecData.littleEndian), offset + input.size]; | ||
}, | ||
description: codecData.description, | ||
fixedSize: codecData.fixedSize, | ||
maxSize: codecData.maxSize | ||
}; | ||
return [input.get(view, isLittleEndian(input.config)), offset + input.size]; | ||
} | ||
}); | ||
} | ||
function toArrayBuffer(bytes, offset, length) { | ||
const bytesOffset = bytes.byteOffset + (offset ?? 0); | ||
const bytesLength = length ?? bytes.byteLength; | ||
const bytesOffset = bytes.byteOffset + (offset != null ? offset : 0); | ||
const bytesLength = length != null ? length : bytes.byteLength; | ||
return bytes.buffer.slice(bytesOffset, bytesOffset + bytesLength); | ||
} | ||
var getU32Encoder = (options = {}) => numberEncoderFactory({ | ||
var getU32Encoder = (config = {}) => numberEncoderFactory({ | ||
config, | ||
name: "u32", | ||
options, | ||
range: [0, Number("0xffffffff")], | ||
@@ -172,11 +176,10 @@ set: (view, value, le) => view.setUint32(0, value, le), | ||
}); | ||
var getU32Decoder = (options = {}) => numberDecoderFactory({ | ||
var getU32Decoder = (config = {}) => numberDecoderFactory({ | ||
config, | ||
get: (view, le) => view.getUint32(0, le), | ||
name: "u32", | ||
options, | ||
size: 4 | ||
}); | ||
var getU8Encoder = (options = {}) => numberEncoderFactory({ | ||
var getU8Encoder = () => numberEncoderFactory({ | ||
name: "u8", | ||
options, | ||
range: [0, Number("0xff")], | ||
@@ -186,9 +189,15 @@ set: (view, value) => view.setUint8(0, value), | ||
}); | ||
var getU8Decoder = (options = {}) => numberDecoderFactory({ | ||
var getU8Decoder = () => numberDecoderFactory({ | ||
get: (view) => view.getUint8(0), | ||
name: "u8", | ||
options, | ||
size: 1 | ||
}); | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
// src/utils.ts | ||
@@ -204,5 +213,72 @@ function maxCodecSizes(sizes) { | ||
} | ||
function getFixedSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : null; | ||
} | ||
function getMaxSize(codec) { | ||
var _a; | ||
return isFixedSize(codec) ? codec.fixedSize : (_a = codec.maxSize) != null ? _a : null; | ||
} | ||
// src/array-like-codec-size.ts | ||
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) { | ||
// src/array.ts | ||
function getArrayEncoder(item, config = {}) { | ||
var _a, _b; | ||
const size = (_a = config.size) != null ? _a : getU32Encoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item)); | ||
const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0; | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (array) => { | ||
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0; | ||
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0); | ||
}, | ||
maxSize | ||
}, | ||
write: (array, bytes, offset) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, array.length); | ||
} | ||
if (typeof size === "object") { | ||
offset = size.write(array.length, bytes, offset); | ||
} | ||
array.forEach((value) => { | ||
offset = item.write(value, bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}); | ||
} | ||
function getArrayDecoder(item, config = {}) { | ||
var _a, _b; | ||
const size = (_a = config.size) != null ? _a : getU32Decoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const itemSize = getFixedSize(item); | ||
const fixedSize = computeArrayLikeCodecSize(size, itemSize); | ||
const maxSize = (_b = computeArrayLikeCodecSize(size, getMaxSize(item))) != null ? _b : void 0; | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize }, | ||
read: (bytes, offset) => { | ||
const array = []; | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [array, offset]; | ||
} | ||
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.read(bytes, offset); | ||
offset = newOffset2; | ||
array.push(value); | ||
} | ||
return [array, offset]; | ||
} | ||
}); | ||
} | ||
function getArrayCodec(item, config = {}) { | ||
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config)); | ||
} | ||
function readArrayLikeCodecSize(size, itemSize, bytes, offset) { | ||
if (typeof size === "number") { | ||
@@ -212,23 +288,19 @@ return [size, offset]; | ||
if (typeof size === "object") { | ||
return size.decode(bytes, offset); | ||
return size.read(bytes, offset); | ||
} | ||
if (size === "remainder") { | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
if (childrenSize === null) { | ||
if (itemSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const remainder = bytes.slice(offset).length; | ||
if (remainder % childrenSize !== 0) { | ||
const remainder = Math.max(0, bytes.length - offset); | ||
if (remainder % itemSize !== 0) { | ||
throw new Error( | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.` | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.` | ||
); | ||
} | ||
return [remainder / childrenSize, offset]; | ||
return [remainder / itemSize, offset]; | ||
} | ||
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`); | ||
} | ||
function getArrayLikeCodecSizeDescription(size) { | ||
return typeof size === "object" ? size.description : `${size}`; | ||
} | ||
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) { | ||
function computeArrayLikeCodecSize(size, itemSize) { | ||
if (typeof size !== "number") | ||
@@ -238,96 +310,39 @@ return null; | ||
return 0; | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
return childrenSize === null ? null : childrenSize * size; | ||
return itemSize === null ? null : itemSize * size; | ||
} | ||
function getArrayLikeCodecSizePrefix(size, realSize) { | ||
return typeof size === "object" ? size.encode(realSize) : new Uint8Array(); | ||
} | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
// src/array.ts | ||
function arrayCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getArrayEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
encode: (value) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, value.length); | ||
} | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]); | ||
} | ||
}; | ||
} | ||
function getArrayDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [[], offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
const values = []; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
values.push(value); | ||
offset = newOffset2; | ||
} | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getArrayCodec(item, options = {}) { | ||
return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options)); | ||
} | ||
// src/bit-array.ts | ||
var getBitArrayEncoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
encode(value) { | ||
const bytes = []; | ||
function getBitArrayEncoder(size, config = {}) { | ||
var _a; | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = (_a = parsedConfig.backward) != null ? _a : false; | ||
return createEncoder({ | ||
fixedSize: size, | ||
write(value, bytes, offset) { | ||
var _a2; | ||
const bytesToAdd = []; | ||
for (let i = 0; i < size; i += 1) { | ||
let byte = 0; | ||
for (let j = 0; j < 8; j += 1) { | ||
const feature = Number(value[i * 8 + j] ?? 0); | ||
const feature = Number((_a2 = value[i * 8 + j]) != null ? _a2 : 0); | ||
byte |= feature << (backward ? j : 7 - j); | ||
} | ||
if (backward) { | ||
bytes.unshift(byte); | ||
bytesToAdd.unshift(byte); | ||
} else { | ||
bytes.push(byte); | ||
bytesToAdd.push(byte); | ||
} | ||
} | ||
return new Uint8Array(bytes); | ||
}, | ||
bytes.set(bytesToAdd, offset); | ||
return size; | ||
} | ||
}); | ||
} | ||
function getBitArrayDecoder(size, config = {}) { | ||
var _a; | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = (_a = parsedConfig.backward) != null ? _a : false; | ||
return createDecoder({ | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayDecoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
decode(bytes, offset = 0) { | ||
read(bytes, offset) { | ||
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset); | ||
@@ -349,50 +364,37 @@ const booleans = []; | ||
return [booleans, offset + size]; | ||
}, | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayCodec = (size, options = {}) => combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options)); | ||
} | ||
}); | ||
} | ||
function getBitArrayCodec(size, config = {}) { | ||
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config)); | ||
} | ||
// src/boolean.ts | ||
function getBooleanEncoder(options = {}) { | ||
const size = options.size ?? getU8Encoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
description: options.description ?? `bool(${size.description})`, | ||
encode: (value) => size.encode(value ? 1 : 0), | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
function getBooleanEncoder(config = {}) { | ||
var _a; | ||
const size = (_a = config.size) != null ? _a : getU8Encoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapEncoder(size, (value) => value ? 1 : 0); | ||
} | ||
function getBooleanDecoder(options = {}) { | ||
const size = options.size ?? getU8Decoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset); | ||
const [value, vOffset] = size.decode(bytes, offset); | ||
return [value === 1, vOffset]; | ||
}, | ||
description: options.description ?? `bool(${size.description})`, | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
function getBooleanDecoder(config = {}) { | ||
var _a; | ||
const size = (_a = config.size) != null ? _a : getU8Decoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapDecoder(size, (value) => Number(value) === 1); | ||
} | ||
function getBooleanCodec(options = {}) { | ||
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options)); | ||
function getBooleanCodec(config = {}) { | ||
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config)); | ||
} | ||
// src/bytes.ts | ||
function getBytesEncoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteEncoder = { | ||
description, | ||
encode: (value) => value, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
function getBytesEncoder(config = {}) { | ||
var _a; | ||
const size = (_a = config.size) != null ? _a : "variable"; | ||
const byteEncoder = createEncoder({ | ||
getSizeFromValue: (value) => value.length, | ||
write: (value, bytes, offset) => { | ||
bytes.set(value, offset); | ||
return offset + value.length; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -402,26 +404,21 @@ return byteEncoder; | ||
if (typeof size === "number") { | ||
return fixEncoder(byteEncoder, size, description); | ||
return fixEncoder(byteEncoder, size); | ||
} | ||
return { | ||
...byteEncoder, | ||
encode: (value) => { | ||
const contentBytes = byteEncoder.encode(value); | ||
const lengthBytes = size.encode(contentBytes.length); | ||
return mergeBytes([lengthBytes, contentBytes]); | ||
return createEncoder({ | ||
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length, | ||
write: (value, bytes, offset) => { | ||
offset = size.write(value.length, bytes, offset); | ||
return byteEncoder.write(value, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesDecoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteDecoder = { | ||
decode: (bytes, offset = 0) => { | ||
function getBytesDecoder(config = {}) { | ||
var _a; | ||
const size = (_a = config.size) != null ? _a : "variable"; | ||
const byteDecoder = createDecoder({ | ||
read: (bytes, offset) => { | ||
const slice = bytes.slice(offset); | ||
return [slice, offset + slice.length]; | ||
}, | ||
description, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -431,9 +428,8 @@ return byteDecoder; | ||
if (typeof size === "number") { | ||
return fixDecoder(byteDecoder, size, description); | ||
return fixDecoder(byteDecoder, size); | ||
} | ||
return { | ||
...byteDecoder, | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.read(bytes, offset); | ||
const length = Number(lengthBigInt); | ||
@@ -443,51 +439,46 @@ offset = lengthOffset; | ||
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes); | ||
const [value, contentOffset] = byteDecoder.decode(contentBytes); | ||
const [value, contentOffset] = byteDecoder.read(contentBytes, 0); | ||
offset += contentOffset; | ||
return [value, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesCodec(options = {}) { | ||
return combineCodec(getBytesEncoder(options), getBytesDecoder(options)); | ||
function getBytesCodec(config = {}) { | ||
return combineCodec(getBytesEncoder(config), getBytesDecoder(config)); | ||
} | ||
// src/data-enum.ts | ||
function dataEnumCodecHelper(variants, prefix, description) { | ||
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", "); | ||
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize); | ||
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null; | ||
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize)); | ||
return { | ||
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`, | ||
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]), | ||
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize]) | ||
}; | ||
} | ||
function getDataEnumEncoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
encode: (variant) => { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
const variantPrefix = prefix.encode(discriminator); | ||
const variantSerializer = variants[discriminator][1]; | ||
const variantBytes = variantSerializer.encode(variant); | ||
return mergeBytes([variantPrefix, variantBytes]); | ||
function getDataEnumEncoder(variants, config = {}) { | ||
var _a; | ||
const prefix = (_a = config.size) != null ? _a : getU8Encoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (variant) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
const variantEncoder = variants[discriminator][1]; | ||
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder); | ||
}, | ||
maxSize: getDataEnumMaxSize(variants, prefix) | ||
}, | ||
write: (variant, bytes, offset) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
offset = prefix.write(discriminator, bytes, offset); | ||
const variantEncoder = variants[discriminator][1]; | ||
return variantEncoder.write(variant, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumDecoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getDataEnumDecoder(variants, config = {}) { | ||
var _a; | ||
const prefix = (_a = config.size) != null ? _a : getU8Decoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) }, | ||
read: (bytes, offset) => { | ||
var _a2; | ||
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset); | ||
const [discriminator, dOffset] = prefix.decode(bytes, offset); | ||
const [discriminator, dOffset] = prefix.read(bytes, offset); | ||
offset = dOffset; | ||
const variantField = variants[Number(discriminator)] ?? null; | ||
const variantField = (_a2 = variants[Number(discriminator)]) != null ? _a2 : null; | ||
if (!variantField) { | ||
@@ -498,127 +489,205 @@ throw new Error( | ||
} | ||
const [variant, vOffset] = variantField[1].decode(bytes, offset); | ||
const [variant, vOffset] = variantField[1].read(bytes, offset); | ||
offset = vOffset; | ||
return [{ __kind: variantField[0], ...variant ?? {} }, offset]; | ||
return [{ __kind: variantField[0], ...variant != null ? variant : {} }, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumCodec(variants, options = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options)); | ||
function getDataEnumCodec(variants, config = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config)); | ||
} | ||
// src/map.ts | ||
function mapCodecHelper(key, value, size, description) { | ||
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
function getDataEnumFixedSize(variants, prefix) { | ||
if (variants.length === 0) | ||
return isFixedSize(prefix) ? prefix.fixedSize : null; | ||
if (!isFixedSize(variants[0][1])) | ||
return null; | ||
const variantSize = variants[0][1].fixedSize; | ||
const sameSizedVariants = variants.every( | ||
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize | ||
); | ||
if (!sameSizedVariants) | ||
return null; | ||
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null; | ||
} | ||
function getDataEnumMaxSize(variants, prefix) { | ||
var _a; | ||
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec))); | ||
return (_a = sumCodecSizes([getMaxSize(prefix), maxVariantSize])) != null ? _a : void 0; | ||
} | ||
function getVariantDiscriminator(variants, variant) { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
return { | ||
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize]) | ||
}; | ||
return discriminator; | ||
} | ||
function getMapEncoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
encode: (map) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("map", size, map.size); | ||
} | ||
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)])); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]); | ||
// src/tuple.ts | ||
function getTupleEncoder(items) { | ||
var _a; | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (value, bytes, offset) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
items.forEach((item, index) => { | ||
offset = item.write(value[index], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapDecoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const map = /* @__PURE__ */ new Map(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [map, offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize( | ||
size, | ||
[key.fixedSize, value.fixedSize], | ||
bytes, | ||
offset | ||
); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [decodedKey, kOffset] = key.decode(bytes, offset); | ||
offset = kOffset; | ||
const [decodedValue, vOffset] = value.decode(bytes, offset); | ||
offset = vOffset; | ||
map.set(decodedKey, decodedValue); | ||
} | ||
return [map, offset]; | ||
function getTupleDecoder(items) { | ||
var _a; | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = (_a = sumCodecSizes(items.map(getMaxSize))) != null ? _a : void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const values = []; | ||
items.forEach((item) => { | ||
const [newValue, newOffset] = item.read(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapCodec(key, value, options = {}) { | ||
return combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options)); | ||
function getTupleCodec(items) { | ||
return combineCodec( | ||
getTupleEncoder(items), | ||
getTupleDecoder(items) | ||
); | ||
} | ||
// src/map.ts | ||
function getMapEncoder(key, value, config = {}) { | ||
return mapEncoder( | ||
getArrayEncoder(getTupleEncoder([key, value]), config), | ||
(map) => [...map.entries()] | ||
); | ||
} | ||
function getMapDecoder(key, value, config = {}) { | ||
return mapDecoder( | ||
getArrayDecoder(getTupleDecoder([key, value]), config), | ||
(entries) => new Map(entries) | ||
); | ||
} | ||
function getMapCodec(key, value, config = {}) { | ||
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config)); | ||
} | ||
// src/nullable.ts | ||
function nullableCodecHelper(item, prefix, fixed, description) { | ||
let descriptionSuffix = `; ${prefix.description}`; | ||
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null; | ||
if (fixed) { | ||
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
descriptionSuffix += "; fixed"; | ||
fixedSize = prefix.fixedSize + item.fixedSize; | ||
function getNullableEncoder(item, config = {}) { | ||
var _a, _b, _c; | ||
const prefix = (_a = config.prefix) != null ? _a : getU8Encoder(); | ||
const fixed = (_b = config.fixed) != null ? _b : false; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
const fixedSize = prefix.fixedSize + item.fixedSize; | ||
return createEncoder({ | ||
fixedSize, | ||
write: (option, bytes, offset) => { | ||
const prefixOffset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
item.write(option, bytes, prefixOffset); | ||
} | ||
return offset + fixedSize; | ||
} | ||
}); | ||
} | ||
return { | ||
description: description ?? `nullable(${item.description + descriptionSuffix})`, | ||
fixedSize, | ||
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize]) | ||
}; | ||
} | ||
function getNullableEncoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Encoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
encode: (option) => { | ||
const prefixByte = prefix.encode(Number(option !== null)); | ||
let itemBytes = option !== null ? item.encode(option) : new Uint8Array(); | ||
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes; | ||
return mergeBytes([prefixByte, itemBytes]); | ||
return createEncoder({ | ||
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0), | ||
maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0, | ||
write: (option, bytes, offset) => { | ||
offset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
offset = item.write(option, bytes, offset); | ||
} | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getNullableDecoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Decoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getNullableDecoder(item, config = {}) { | ||
var _a, _b, _c; | ||
const prefix = (_a = config.prefix) != null ? _a : getU8Decoder(); | ||
const fixed = (_b = config.fixed) != null ? _b : false; | ||
let fixedSize = null; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
fixedSize = prefix.fixedSize + item.fixedSize; | ||
} | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize: (_c = sumCodecSizes([prefix, item].map(getMaxSize))) != null ? _c : void 0 } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
if (bytes.length - offset <= 0) { | ||
return [null, offset]; | ||
} | ||
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0); | ||
const [isSome, prefixOffset] = prefix.decode(bytes, offset); | ||
offset = prefixOffset; | ||
const [isSome, prefixOffset] = prefix.read(bytes, offset); | ||
if (isSome === 0) { | ||
return [null, fixed ? fixedOffset : offset]; | ||
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset]; | ||
} | ||
const [value, newOffset] = item.decode(bytes, offset); | ||
offset = newOffset; | ||
return [value, fixed ? fixedOffset : offset]; | ||
const [value, newOffset] = item.read(bytes, prefixOffset); | ||
return [value, fixedSize !== null ? offset + fixedSize : newOffset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getNullableCodec(item, options = {}) { | ||
return combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options)); | ||
function getNullableCodec(item, config = {}) { | ||
const configCast = config; | ||
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast)); | ||
} | ||
// src/scalar-enum.ts | ||
function scalarEnumCoderHelper(constructor, prefix, description) { | ||
function getScalarEnumEncoder(constructor, config = {}) { | ||
var _a; | ||
const prefix = (_a = config.size) != null ? _a : getU8Encoder(); | ||
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor); | ||
return mapEncoder(prefix, (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return value; | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return valueIndex; | ||
return enumKeys.indexOf(value); | ||
}); | ||
} | ||
function getScalarEnumDecoder(constructor, config = {}) { | ||
var _a; | ||
const prefix = (_a = config.size) != null ? _a : getU8Decoder(); | ||
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor); | ||
return mapDecoder(prefix, (value) => { | ||
const valueAsNumber = Number(value); | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber]; | ||
}); | ||
} | ||
function getScalarEnumCodec(constructor, config = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config)); | ||
} | ||
function getScalarEnumStats(constructor) { | ||
const enumKeys = Object.keys(constructor); | ||
const enumValues = Object.values(constructor); | ||
const isNumericEnum = enumValues.some((v) => typeof v === "number"); | ||
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", "); | ||
const minRange = 0; | ||
@@ -628,9 +697,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1; | ||
return { | ||
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`, | ||
enumKeys, | ||
enumValues, | ||
fixedSize: prefix.fixedSize, | ||
isNumericEnum, | ||
maxRange, | ||
maxSize: prefix.maxSize, | ||
minRange, | ||
@@ -640,128 +706,44 @@ stringValues | ||
} | ||
function getScalarEnumEncoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description); | ||
return { | ||
description, | ||
encode: (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return prefix.encode(value); | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return prefix.encode(valueIndex); | ||
return prefix.encode(enumKeys.indexOf(value)); | ||
}, | ||
fixedSize, | ||
maxSize | ||
}; | ||
} | ||
function getScalarEnumDecoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper( | ||
constructor, | ||
prefix, | ||
options.description | ||
); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset); | ||
const [value, newOffset] = prefix.decode(bytes, offset); | ||
const valueAsNumber = Number(value); | ||
offset = newOffset; | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset]; | ||
}, | ||
description, | ||
fixedSize, | ||
maxSize | ||
}; | ||
} | ||
function getScalarEnumCodec(constructor, options = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options)); | ||
} | ||
// src/set.ts | ||
function setCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
function getSetEncoder(item, config = {}) { | ||
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]); | ||
} | ||
function getSetEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
encode: (set) => { | ||
if (typeof size === "number" && set.size !== size) { | ||
assertValidNumberOfItemsForCodec("set", size, set.size); | ||
} | ||
const itemBytes = Array.from(set, (value) => item.encode(value)); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]); | ||
} | ||
}; | ||
function getSetDecoder(item, config = {}) { | ||
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries)); | ||
} | ||
function getSetDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const set = /* @__PURE__ */ new Set(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [set, offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
offset = newOffset2; | ||
set.add(value); | ||
} | ||
return [set, offset]; | ||
} | ||
}; | ||
function getSetCodec(item, config = {}) { | ||
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config)); | ||
} | ||
function getSetCodec(item, options = {}) { | ||
return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options)); | ||
} | ||
// src/struct.ts | ||
function structCodecHelper(fields, description) { | ||
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", "); | ||
return { | ||
description: description ?? `struct(${fieldDescriptions})`, | ||
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)), | ||
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize)) | ||
}; | ||
} | ||
function getStructEncoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
encode: (struct) => { | ||
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key])); | ||
return mergeBytes(fieldBytes); | ||
function getStructEncoder(fields) { | ||
var _a; | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (struct, bytes, offset) => { | ||
fields.forEach(([key, codec]) => { | ||
offset = codec.write(struct[key], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getStructDecoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getStructDecoder(fields) { | ||
var _a; | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = (_a = sumCodecSizes(fieldCodecs.map(getMaxSize))) != null ? _a : void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const struct = {}; | ||
fields.forEach(([key, codec]) => { | ||
const [value, newOffset] = codec.decode(bytes, offset); | ||
const [value, newOffset] = codec.read(bytes, offset); | ||
offset = newOffset; | ||
@@ -772,76 +754,29 @@ struct[key] = value; | ||
} | ||
}; | ||
}); | ||
} | ||
function getStructCodec(fields, options = {}) { | ||
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options)); | ||
function getStructCodec(fields) { | ||
return combineCodec(getStructEncoder(fields), getStructDecoder(fields)); | ||
} | ||
// src/tuple.ts | ||
function tupleCodecHelper(items, description) { | ||
const itemDescriptions = items.map((item) => item.description).join(", "); | ||
return { | ||
description: description ?? `tuple(${itemDescriptions})`, | ||
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)), | ||
maxSize: sumCodecSizes(items.map((item) => item.maxSize)) | ||
}; | ||
} | ||
function getTupleEncoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
encode: (value) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
return mergeBytes(items.map((item, index) => item.encode(value[index]))); | ||
} | ||
}; | ||
} | ||
function getTupleDecoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const values = []; | ||
items.forEach((codec) => { | ||
const [newValue, newOffset] = codec.decode(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getTupleCodec(items, options = {}) { | ||
return combineCodec( | ||
getTupleEncoder(items, options), | ||
getTupleDecoder(items, options) | ||
); | ||
} | ||
// src/unit.ts | ||
function getUnitEncoder(options = {}) { | ||
return { | ||
description: options.description ?? "unit", | ||
encode: () => new Uint8Array(), | ||
function getUnitEncoder() { | ||
return createEncoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
write: (_value, _bytes, offset) => offset | ||
}); | ||
} | ||
function getUnitDecoder(options = {}) { | ||
return { | ||
decode: (_bytes, offset = 0) => [void 0, offset], | ||
description: options.description ?? "unit", | ||
function getUnitDecoder() { | ||
return createDecoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
read: (_bytes, offset) => [void 0, offset] | ||
}); | ||
} | ||
function getUnitCodec(options = {}) { | ||
return combineCodec(getUnitEncoder(options), getUnitDecoder(options)); | ||
function getUnitCodec() { | ||
return combineCodec(getUnitEncoder(), getUnitDecoder()); | ||
} | ||
exports.assertValidNumberOfItemsForCodec = assertValidNumberOfItemsForCodec; | ||
exports.decodeArrayLikeCodecSize = decodeArrayLikeCodecSize; | ||
exports.getArrayCodec = getArrayCodec; | ||
exports.getArrayDecoder = getArrayDecoder; | ||
exports.getArrayEncoder = getArrayEncoder; | ||
exports.getArrayLikeCodecSizeDescription = getArrayLikeCodecSizeDescription; | ||
exports.getArrayLikeCodecSizeFromChildren = getArrayLikeCodecSizeFromChildren; | ||
exports.getArrayLikeCodecSizePrefix = getArrayLikeCodecSizePrefix; | ||
exports.getBitArrayCodec = getBitArrayCodec; | ||
@@ -848,0 +783,0 @@ exports.getBitArrayDecoder = getBitArrayDecoder; |
@@ -1,2 +0,2 @@ | ||
import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core'; | ||
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core'; | ||
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers'; | ||
@@ -6,3 +6,8 @@ | ||
// src/utils.ts | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
function maxCodecSizes(sizes) { | ||
@@ -17,5 +22,69 @@ return sizes.reduce( | ||
} | ||
function getFixedSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : null; | ||
} | ||
function getMaxSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null; | ||
} | ||
// src/array-like-codec-size.ts | ||
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) { | ||
// src/array.ts | ||
function getArrayEncoder(item, config = {}) { | ||
const size = config.size ?? getU32Encoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item)); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (array) => { | ||
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0; | ||
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0); | ||
}, | ||
maxSize | ||
}, | ||
write: (array, bytes, offset) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, array.length); | ||
} | ||
if (typeof size === "object") { | ||
offset = size.write(array.length, bytes, offset); | ||
} | ||
array.forEach((value) => { | ||
offset = item.write(value, bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}); | ||
} | ||
function getArrayDecoder(item, config = {}) { | ||
const size = config.size ?? getU32Decoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const itemSize = getFixedSize(item); | ||
const fixedSize = computeArrayLikeCodecSize(size, itemSize); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize }, | ||
read: (bytes, offset) => { | ||
const array = []; | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [array, offset]; | ||
} | ||
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.read(bytes, offset); | ||
offset = newOffset2; | ||
array.push(value); | ||
} | ||
return [array, offset]; | ||
} | ||
}); | ||
} | ||
function getArrayCodec(item, config = {}) { | ||
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config)); | ||
} | ||
function readArrayLikeCodecSize(size, itemSize, bytes, offset) { | ||
if (typeof size === "number") { | ||
@@ -25,23 +94,19 @@ return [size, offset]; | ||
if (typeof size === "object") { | ||
return size.decode(bytes, offset); | ||
return size.read(bytes, offset); | ||
} | ||
if (size === "remainder") { | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
if (childrenSize === null) { | ||
if (itemSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const remainder = bytes.slice(offset).length; | ||
if (remainder % childrenSize !== 0) { | ||
const remainder = Math.max(0, bytes.length - offset); | ||
if (remainder % itemSize !== 0) { | ||
throw new Error( | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.` | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.` | ||
); | ||
} | ||
return [remainder / childrenSize, offset]; | ||
return [remainder / itemSize, offset]; | ||
} | ||
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`); | ||
} | ||
function getArrayLikeCodecSizeDescription(size) { | ||
return typeof size === "object" ? size.description : `${size}`; | ||
} | ||
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) { | ||
function computeArrayLikeCodecSize(size, itemSize) { | ||
if (typeof size !== "number") | ||
@@ -51,70 +116,11 @@ return null; | ||
return 0; | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
return childrenSize === null ? null : childrenSize * size; | ||
return itemSize === null ? null : itemSize * size; | ||
} | ||
function getArrayLikeCodecSizePrefix(size, realSize) { | ||
return typeof size === "object" ? size.encode(realSize) : new Uint8Array(); | ||
} | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
// src/array.ts | ||
function arrayCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getArrayEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
encode: (value) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, value.length); | ||
} | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]); | ||
} | ||
}; | ||
} | ||
function getArrayDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [[], offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
const values = []; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
values.push(value); | ||
offset = newOffset2; | ||
} | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getArrayCodec(item, options = {}) { | ||
return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options)); | ||
} | ||
var getBitArrayEncoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
encode(value) { | ||
const bytes = []; | ||
function getBitArrayEncoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createEncoder({ | ||
fixedSize: size, | ||
write(value, bytes, offset) { | ||
const bytesToAdd = []; | ||
for (let i = 0; i < size; i += 1) { | ||
@@ -127,19 +133,18 @@ let byte = 0; | ||
if (backward) { | ||
bytes.unshift(byte); | ||
bytesToAdd.unshift(byte); | ||
} else { | ||
bytes.push(byte); | ||
bytesToAdd.push(byte); | ||
} | ||
} | ||
return new Uint8Array(bytes); | ||
}, | ||
bytes.set(bytesToAdd, offset); | ||
return size; | ||
} | ||
}); | ||
} | ||
function getBitArrayDecoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createDecoder({ | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayDecoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
decode(bytes, offset = 0) { | ||
read(bytes, offset) { | ||
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset); | ||
@@ -161,46 +166,30 @@ const booleans = []; | ||
return [booleans, offset + size]; | ||
}, | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayCodec = (size, options = {}) => combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options)); | ||
function getBooleanEncoder(options = {}) { | ||
const size = options.size ?? getU8Encoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
description: options.description ?? `bool(${size.description})`, | ||
encode: (value) => size.encode(value ? 1 : 0), | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
} | ||
}); | ||
} | ||
function getBooleanDecoder(options = {}) { | ||
const size = options.size ?? getU8Decoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset); | ||
const [value, vOffset] = size.decode(bytes, offset); | ||
return [value === 1, vOffset]; | ||
}, | ||
description: options.description ?? `bool(${size.description})`, | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
function getBitArrayCodec(size, config = {}) { | ||
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config)); | ||
} | ||
function getBooleanCodec(options = {}) { | ||
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options)); | ||
function getBooleanEncoder(config = {}) { | ||
const size = config.size ?? getU8Encoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapEncoder(size, (value) => value ? 1 : 0); | ||
} | ||
function getBytesEncoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteEncoder = { | ||
description, | ||
encode: (value) => value, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
function getBooleanDecoder(config = {}) { | ||
const size = config.size ?? getU8Decoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapDecoder(size, (value) => Number(value) === 1); | ||
} | ||
function getBooleanCodec(config = {}) { | ||
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config)); | ||
} | ||
function getBytesEncoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteEncoder = createEncoder({ | ||
getSizeFromValue: (value) => value.length, | ||
write: (value, bytes, offset) => { | ||
bytes.set(value, offset); | ||
return offset + value.length; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -210,26 +199,20 @@ return byteEncoder; | ||
if (typeof size === "number") { | ||
return fixEncoder(byteEncoder, size, description); | ||
return fixEncoder(byteEncoder, size); | ||
} | ||
return { | ||
...byteEncoder, | ||
encode: (value) => { | ||
const contentBytes = byteEncoder.encode(value); | ||
const lengthBytes = size.encode(contentBytes.length); | ||
return mergeBytes([lengthBytes, contentBytes]); | ||
return createEncoder({ | ||
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length, | ||
write: (value, bytes, offset) => { | ||
offset = size.write(value.length, bytes, offset); | ||
return byteEncoder.write(value, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesDecoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteDecoder = { | ||
decode: (bytes, offset = 0) => { | ||
function getBytesDecoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteDecoder = createDecoder({ | ||
read: (bytes, offset) => { | ||
const slice = bytes.slice(offset); | ||
return [slice, offset + slice.length]; | ||
}, | ||
description, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -239,9 +222,8 @@ return byteDecoder; | ||
if (typeof size === "number") { | ||
return fixDecoder(byteDecoder, size, description); | ||
return fixDecoder(byteDecoder, size); | ||
} | ||
return { | ||
...byteDecoder, | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.read(bytes, offset); | ||
const length = Number(lengthBigInt); | ||
@@ -251,47 +233,39 @@ offset = lengthOffset; | ||
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes); | ||
const [value, contentOffset] = byteDecoder.decode(contentBytes); | ||
const [value, contentOffset] = byteDecoder.read(contentBytes, 0); | ||
offset += contentOffset; | ||
return [value, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesCodec(options = {}) { | ||
return combineCodec(getBytesEncoder(options), getBytesDecoder(options)); | ||
function getBytesCodec(config = {}) { | ||
return combineCodec(getBytesEncoder(config), getBytesDecoder(config)); | ||
} | ||
function dataEnumCodecHelper(variants, prefix, description) { | ||
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", "); | ||
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize); | ||
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null; | ||
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize)); | ||
return { | ||
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`, | ||
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]), | ||
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize]) | ||
}; | ||
} | ||
function getDataEnumEncoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
encode: (variant) => { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
const variantPrefix = prefix.encode(discriminator); | ||
const variantSerializer = variants[discriminator][1]; | ||
const variantBytes = variantSerializer.encode(variant); | ||
return mergeBytes([variantPrefix, variantBytes]); | ||
function getDataEnumEncoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (variant) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
const variantEncoder = variants[discriminator][1]; | ||
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder); | ||
}, | ||
maxSize: getDataEnumMaxSize(variants, prefix) | ||
}, | ||
write: (variant, bytes, offset) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
offset = prefix.write(discriminator, bytes, offset); | ||
const variantEncoder = variants[discriminator][1]; | ||
return variantEncoder.write(variant, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumDecoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getDataEnumDecoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) }, | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset); | ||
const [discriminator, dOffset] = prefix.decode(bytes, offset); | ||
const [discriminator, dOffset] = prefix.read(bytes, offset); | ||
offset = dOffset; | ||
@@ -304,121 +278,192 @@ const variantField = variants[Number(discriminator)] ?? null; | ||
} | ||
const [variant, vOffset] = variantField[1].decode(bytes, offset); | ||
const [variant, vOffset] = variantField[1].read(bytes, offset); | ||
offset = vOffset; | ||
return [{ __kind: variantField[0], ...variant ?? {} }, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumCodec(variants, options = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options)); | ||
function getDataEnumCodec(variants, config = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config)); | ||
} | ||
function mapCodecHelper(key, value, size, description) { | ||
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
function getDataEnumFixedSize(variants, prefix) { | ||
if (variants.length === 0) | ||
return isFixedSize(prefix) ? prefix.fixedSize : null; | ||
if (!isFixedSize(variants[0][1])) | ||
return null; | ||
const variantSize = variants[0][1].fixedSize; | ||
const sameSizedVariants = variants.every( | ||
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize | ||
); | ||
if (!sameSizedVariants) | ||
return null; | ||
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null; | ||
} | ||
function getDataEnumMaxSize(variants, prefix) { | ||
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec))); | ||
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0; | ||
} | ||
function getVariantDiscriminator(variants, variant) { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
return { | ||
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize]) | ||
}; | ||
return discriminator; | ||
} | ||
function getMapEncoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
encode: (map) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("map", size, map.size); | ||
} | ||
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)])); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]); | ||
function getTupleEncoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (value, bytes, offset) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
items.forEach((item, index) => { | ||
offset = item.write(value[index], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapDecoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const map = /* @__PURE__ */ new Map(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [map, offset]; | ||
function getTupleDecoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const values = []; | ||
items.forEach((item) => { | ||
const [newValue, newOffset] = item.read(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}); | ||
} | ||
function getTupleCodec(items) { | ||
return combineCodec( | ||
getTupleEncoder(items), | ||
getTupleDecoder(items) | ||
); | ||
} | ||
// src/map.ts | ||
function getMapEncoder(key, value, config = {}) { | ||
return mapEncoder( | ||
getArrayEncoder(getTupleEncoder([key, value]), config), | ||
(map) => [...map.entries()] | ||
); | ||
} | ||
function getMapDecoder(key, value, config = {}) { | ||
return mapDecoder( | ||
getArrayDecoder(getTupleDecoder([key, value]), config), | ||
(entries) => new Map(entries) | ||
); | ||
} | ||
function getMapCodec(key, value, config = {}) { | ||
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config)); | ||
} | ||
function getNullableEncoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Encoder(); | ||
const fixed = config.fixed ?? false; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
const fixedSize = prefix.fixedSize + item.fixedSize; | ||
return createEncoder({ | ||
fixedSize, | ||
write: (option, bytes, offset) => { | ||
const prefixOffset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
item.write(option, bytes, prefixOffset); | ||
} | ||
return offset + fixedSize; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize( | ||
size, | ||
[key.fixedSize, value.fixedSize], | ||
bytes, | ||
offset | ||
); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [decodedKey, kOffset] = key.decode(bytes, offset); | ||
offset = kOffset; | ||
const [decodedValue, vOffset] = value.decode(bytes, offset); | ||
offset = vOffset; | ||
map.set(decodedKey, decodedValue); | ||
}); | ||
} | ||
return createEncoder({ | ||
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0), | ||
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0, | ||
write: (option, bytes, offset) => { | ||
offset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
offset = item.write(option, bytes, offset); | ||
} | ||
return [map, offset]; | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapCodec(key, value, options = {}) { | ||
return combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options)); | ||
} | ||
function nullableCodecHelper(item, prefix, fixed, description) { | ||
let descriptionSuffix = `; ${prefix.description}`; | ||
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null; | ||
if (fixed) { | ||
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
descriptionSuffix += "; fixed"; | ||
function getNullableDecoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Decoder(); | ||
const fixed = config.fixed ?? false; | ||
let fixedSize = null; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
fixedSize = prefix.fixedSize + item.fixedSize; | ||
} | ||
return { | ||
description: description ?? `nullable(${item.description + descriptionSuffix})`, | ||
fixedSize, | ||
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize]) | ||
}; | ||
} | ||
function getNullableEncoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Encoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
encode: (option) => { | ||
const prefixByte = prefix.encode(Number(option !== null)); | ||
let itemBytes = option !== null ? item.encode(option) : new Uint8Array(); | ||
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes; | ||
return mergeBytes([prefixByte, itemBytes]); | ||
} | ||
}; | ||
} | ||
function getNullableDecoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Decoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
if (bytes.length - offset <= 0) { | ||
return [null, offset]; | ||
} | ||
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0); | ||
const [isSome, prefixOffset] = prefix.decode(bytes, offset); | ||
offset = prefixOffset; | ||
const [isSome, prefixOffset] = prefix.read(bytes, offset); | ||
if (isSome === 0) { | ||
return [null, fixed ? fixedOffset : offset]; | ||
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset]; | ||
} | ||
const [value, newOffset] = item.decode(bytes, offset); | ||
offset = newOffset; | ||
return [value, fixed ? fixedOffset : offset]; | ||
const [value, newOffset] = item.read(bytes, prefixOffset); | ||
return [value, fixedSize !== null ? offset + fixedSize : newOffset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getNullableCodec(item, options = {}) { | ||
return combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options)); | ||
function getNullableCodec(item, config = {}) { | ||
const configCast = config; | ||
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast)); | ||
} | ||
function scalarEnumCoderHelper(constructor, prefix, description) { | ||
function getScalarEnumEncoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor); | ||
return mapEncoder(prefix, (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return value; | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return valueIndex; | ||
return enumKeys.indexOf(value); | ||
}); | ||
} | ||
function getScalarEnumDecoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor); | ||
return mapDecoder(prefix, (value) => { | ||
const valueAsNumber = Number(value); | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber]; | ||
}); | ||
} | ||
function getScalarEnumCodec(constructor, config = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config)); | ||
} | ||
function getScalarEnumStats(constructor) { | ||
const enumKeys = Object.keys(constructor); | ||
const enumValues = Object.values(constructor); | ||
const isNumericEnum = enumValues.some((v) => typeof v === "number"); | ||
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", "); | ||
const minRange = 0; | ||
@@ -428,9 +473,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1; | ||
return { | ||
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`, | ||
enumKeys, | ||
enumValues, | ||
fixedSize: prefix.fixedSize, | ||
isNumericEnum, | ||
maxRange, | ||
maxSize: prefix.maxSize, | ||
minRange, | ||
@@ -440,124 +482,38 @@ stringValues | ||
} | ||
function getScalarEnumEncoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description); | ||
return { | ||
description, | ||
encode: (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return prefix.encode(value); | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return prefix.encode(valueIndex); | ||
return prefix.encode(enumKeys.indexOf(value)); | ||
}, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetEncoder(item, config = {}) { | ||
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]); | ||
} | ||
function getScalarEnumDecoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper( | ||
constructor, | ||
prefix, | ||
options.description | ||
); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset); | ||
const [value, newOffset] = prefix.decode(bytes, offset); | ||
const valueAsNumber = Number(value); | ||
offset = newOffset; | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset]; | ||
}, | ||
description, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetDecoder(item, config = {}) { | ||
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries)); | ||
} | ||
function getScalarEnumCodec(constructor, options = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options)); | ||
function getSetCodec(item, config = {}) { | ||
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config)); | ||
} | ||
function setCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getSetEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
encode: (set) => { | ||
if (typeof size === "number" && set.size !== size) { | ||
assertValidNumberOfItemsForCodec("set", size, set.size); | ||
} | ||
const itemBytes = Array.from(set, (value) => item.encode(value)); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]); | ||
function getStructEncoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (struct, bytes, offset) => { | ||
fields.forEach(([key, codec]) => { | ||
offset = codec.write(struct[key], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getSetDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const set = /* @__PURE__ */ new Set(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [set, offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
offset = newOffset2; | ||
set.add(value); | ||
} | ||
return [set, offset]; | ||
} | ||
}; | ||
} | ||
function getSetCodec(item, options = {}) { | ||
return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options)); | ||
} | ||
function structCodecHelper(fields, description) { | ||
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", "); | ||
return { | ||
description: description ?? `struct(${fieldDescriptions})`, | ||
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)), | ||
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize)) | ||
}; | ||
} | ||
function getStructEncoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
encode: (struct) => { | ||
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key])); | ||
return mergeBytes(fieldBytes); | ||
} | ||
}; | ||
} | ||
function getStructDecoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getStructDecoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const struct = {}; | ||
fields.forEach(([key, codec]) => { | ||
const [value, newOffset] = codec.decode(bytes, offset); | ||
const [value, newOffset] = codec.read(bytes, offset); | ||
offset = newOffset; | ||
@@ -568,66 +524,25 @@ struct[key] = value; | ||
} | ||
}; | ||
}); | ||
} | ||
function getStructCodec(fields, options = {}) { | ||
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options)); | ||
function getStructCodec(fields) { | ||
return combineCodec(getStructEncoder(fields), getStructDecoder(fields)); | ||
} | ||
function tupleCodecHelper(items, description) { | ||
const itemDescriptions = items.map((item) => item.description).join(", "); | ||
return { | ||
description: description ?? `tuple(${itemDescriptions})`, | ||
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)), | ||
maxSize: sumCodecSizes(items.map((item) => item.maxSize)) | ||
}; | ||
} | ||
function getTupleEncoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
encode: (value) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
return mergeBytes(items.map((item, index) => item.encode(value[index]))); | ||
} | ||
}; | ||
} | ||
function getTupleDecoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const values = []; | ||
items.forEach((codec) => { | ||
const [newValue, newOffset] = codec.decode(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getTupleCodec(items, options = {}) { | ||
return combineCodec( | ||
getTupleEncoder(items, options), | ||
getTupleDecoder(items, options) | ||
); | ||
} | ||
function getUnitEncoder(options = {}) { | ||
return { | ||
description: options.description ?? "unit", | ||
encode: () => new Uint8Array(), | ||
function getUnitEncoder() { | ||
return createEncoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
write: (_value, _bytes, offset) => offset | ||
}); | ||
} | ||
function getUnitDecoder(options = {}) { | ||
return { | ||
decode: (_bytes, offset = 0) => [void 0, offset], | ||
description: options.description ?? "unit", | ||
function getUnitDecoder() { | ||
return createDecoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
read: (_bytes, offset) => [void 0, offset] | ||
}); | ||
} | ||
function getUnitCodec(options = {}) { | ||
return combineCodec(getUnitEncoder(options), getUnitDecoder(options)); | ||
function getUnitCodec() { | ||
return combineCodec(getUnitEncoder(), getUnitDecoder()); | ||
} | ||
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; | ||
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; | ||
//# sourceMappingURL=out.js.map | ||
//# sourceMappingURL=index.native.js.map |
@@ -1,2 +0,2 @@ | ||
import { mergeBytes, combineCodec, assertByteArrayHasEnoughBytesForCodec, assertFixedSizeCodec, assertByteArrayIsNotEmptyForCodec, fixEncoder, fixDecoder, fixBytes } from '@solana/codecs-core'; | ||
import { assertIsFixedSize, createEncoder, getEncodedSize, createDecoder, combineCodec, assertByteArrayHasEnoughBytesForCodec, mapEncoder, mapDecoder, fixEncoder, fixDecoder, assertByteArrayIsNotEmptyForCodec, isFixedSize } from '@solana/codecs-core'; | ||
import { getU32Encoder, getU32Decoder, getU8Encoder, getU8Decoder } from '@solana/codecs-numbers'; | ||
@@ -6,3 +6,8 @@ | ||
// src/utils.ts | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
function maxCodecSizes(sizes) { | ||
@@ -17,5 +22,69 @@ return sizes.reduce( | ||
} | ||
function getFixedSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : null; | ||
} | ||
function getMaxSize(codec) { | ||
return isFixedSize(codec) ? codec.fixedSize : codec.maxSize ?? null; | ||
} | ||
// src/array-like-codec-size.ts | ||
function decodeArrayLikeCodecSize(size, childrenSizes, bytes, offset) { | ||
// src/array.ts | ||
function getArrayEncoder(item, config = {}) { | ||
const size = config.size ?? getU32Encoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const fixedSize = computeArrayLikeCodecSize(size, getFixedSize(item)); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (array) => { | ||
const prefixSize = typeof size === "object" ? getEncodedSize(array.length, size) : 0; | ||
return prefixSize + [...array].reduce((all, value) => all + getEncodedSize(value, item), 0); | ||
}, | ||
maxSize | ||
}, | ||
write: (array, bytes, offset) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, array.length); | ||
} | ||
if (typeof size === "object") { | ||
offset = size.write(array.length, bytes, offset); | ||
} | ||
array.forEach((value) => { | ||
offset = item.write(value, bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}); | ||
} | ||
function getArrayDecoder(item, config = {}) { | ||
const size = config.size ?? getU32Decoder(); | ||
if (size === "remainder") { | ||
assertIsFixedSize(item, 'Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const itemSize = getFixedSize(item); | ||
const fixedSize = computeArrayLikeCodecSize(size, itemSize); | ||
const maxSize = computeArrayLikeCodecSize(size, getMaxSize(item)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize }, | ||
read: (bytes, offset) => { | ||
const array = []; | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [array, offset]; | ||
} | ||
const [resolvedSize, newOffset] = readArrayLikeCodecSize(size, itemSize, bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.read(bytes, offset); | ||
offset = newOffset2; | ||
array.push(value); | ||
} | ||
return [array, offset]; | ||
} | ||
}); | ||
} | ||
function getArrayCodec(item, config = {}) { | ||
return combineCodec(getArrayEncoder(item, config), getArrayDecoder(item, config)); | ||
} | ||
function readArrayLikeCodecSize(size, itemSize, bytes, offset) { | ||
if (typeof size === "number") { | ||
@@ -25,23 +94,19 @@ return [size, offset]; | ||
if (typeof size === "object") { | ||
return size.decode(bytes, offset); | ||
return size.read(bytes, offset); | ||
} | ||
if (size === "remainder") { | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
if (childrenSize === null) { | ||
if (itemSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
const remainder = bytes.slice(offset).length; | ||
if (remainder % childrenSize !== 0) { | ||
const remainder = Math.max(0, bytes.length - offset); | ||
if (remainder % itemSize !== 0) { | ||
throw new Error( | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${childrenSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${childrenSize} should be equal to zero.` | ||
`The remainder of the byte array (${remainder} bytes) cannot be split into chunks of ${itemSize} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${remainder} modulo ${itemSize} should be equal to zero.` | ||
); | ||
} | ||
return [remainder / childrenSize, offset]; | ||
return [remainder / itemSize, offset]; | ||
} | ||
throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(size)}`); | ||
} | ||
function getArrayLikeCodecSizeDescription(size) { | ||
return typeof size === "object" ? size.description : `${size}`; | ||
} | ||
function getArrayLikeCodecSizeFromChildren(size, childrenSizes) { | ||
function computeArrayLikeCodecSize(size, itemSize) { | ||
if (typeof size !== "number") | ||
@@ -51,70 +116,11 @@ return null; | ||
return 0; | ||
const childrenSize = sumCodecSizes(childrenSizes); | ||
return childrenSize === null ? null : childrenSize * size; | ||
return itemSize === null ? null : itemSize * size; | ||
} | ||
function getArrayLikeCodecSizePrefix(size, realSize) { | ||
return typeof size === "object" ? size.encode(realSize) : new Uint8Array(); | ||
} | ||
// src/assertions.ts | ||
function assertValidNumberOfItemsForCodec(codecDescription, expected, actual) { | ||
if (expected !== actual) { | ||
throw new Error(`Expected [${codecDescription}] to have ${expected} items, got ${actual}.`); | ||
} | ||
} | ||
// src/array.ts | ||
function arrayCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `array(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getArrayEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
encode: (value) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("array", size, value.length); | ||
} | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, value.length), ...value.map((v) => item.encode(v))]); | ||
} | ||
}; | ||
} | ||
function getArrayDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...arrayCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [[], offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
const values = []; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
values.push(value); | ||
offset = newOffset2; | ||
} | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getArrayCodec(item, options = {}) { | ||
return combineCodec(getArrayEncoder(item, options), getArrayDecoder(item, options)); | ||
} | ||
var getBitArrayEncoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
encode(value) { | ||
const bytes = []; | ||
function getBitArrayEncoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createEncoder({ | ||
fixedSize: size, | ||
write(value, bytes, offset) { | ||
const bytesToAdd = []; | ||
for (let i = 0; i < size; i += 1) { | ||
@@ -127,19 +133,18 @@ let byte = 0; | ||
if (backward) { | ||
bytes.unshift(byte); | ||
bytesToAdd.unshift(byte); | ||
} else { | ||
bytes.push(byte); | ||
bytesToAdd.push(byte); | ||
} | ||
} | ||
return new Uint8Array(bytes); | ||
}, | ||
bytes.set(bytesToAdd, offset); | ||
return size; | ||
} | ||
}); | ||
} | ||
function getBitArrayDecoder(size, config = {}) { | ||
const parsedConfig = typeof config === "boolean" ? { backward: config } : config; | ||
const backward = parsedConfig.backward ?? false; | ||
return createDecoder({ | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayDecoder = (size, options = {}) => { | ||
const parsedOptions = typeof options === "boolean" ? { backward: options } : options; | ||
const backward = parsedOptions.backward ?? false; | ||
const backwardSuffix = backward ? "; backward" : ""; | ||
return { | ||
decode(bytes, offset = 0) { | ||
read(bytes, offset) { | ||
assertByteArrayHasEnoughBytesForCodec("bitArray", size, bytes, offset); | ||
@@ -161,46 +166,30 @@ const booleans = []; | ||
return [booleans, offset + size]; | ||
}, | ||
description: parsedOptions.description ?? `bitArray(${size}${backwardSuffix})`, | ||
fixedSize: size, | ||
maxSize: size | ||
}; | ||
}; | ||
var getBitArrayCodec = (size, options = {}) => combineCodec(getBitArrayEncoder(size, options), getBitArrayDecoder(size, options)); | ||
function getBooleanEncoder(options = {}) { | ||
const size = options.size ?? getU8Encoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
description: options.description ?? `bool(${size.description})`, | ||
encode: (value) => size.encode(value ? 1 : 0), | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
} | ||
}); | ||
} | ||
function getBooleanDecoder(options = {}) { | ||
const size = options.size ?? getU8Decoder(); | ||
assertFixedSizeCodec(size, "Codec [bool] requires a fixed size."); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("bool", bytes, offset); | ||
const [value, vOffset] = size.decode(bytes, offset); | ||
return [value === 1, vOffset]; | ||
}, | ||
description: options.description ?? `bool(${size.description})`, | ||
fixedSize: size.fixedSize, | ||
maxSize: size.fixedSize | ||
}; | ||
function getBitArrayCodec(size, config = {}) { | ||
return combineCodec(getBitArrayEncoder(size, config), getBitArrayDecoder(size, config)); | ||
} | ||
function getBooleanCodec(options = {}) { | ||
return combineCodec(getBooleanEncoder(options), getBooleanDecoder(options)); | ||
function getBooleanEncoder(config = {}) { | ||
const size = config.size ?? getU8Encoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapEncoder(size, (value) => value ? 1 : 0); | ||
} | ||
function getBytesEncoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteEncoder = { | ||
description, | ||
encode: (value) => value, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
function getBooleanDecoder(config = {}) { | ||
const size = config.size ?? getU8Decoder(); | ||
assertIsFixedSize(size, "Codec [bool] requires a fixed size."); | ||
return mapDecoder(size, (value) => Number(value) === 1); | ||
} | ||
function getBooleanCodec(config = {}) { | ||
return combineCodec(getBooleanEncoder(config), getBooleanDecoder(config)); | ||
} | ||
function getBytesEncoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteEncoder = createEncoder({ | ||
getSizeFromValue: (value) => value.length, | ||
write: (value, bytes, offset) => { | ||
bytes.set(value, offset); | ||
return offset + value.length; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -210,26 +199,20 @@ return byteEncoder; | ||
if (typeof size === "number") { | ||
return fixEncoder(byteEncoder, size, description); | ||
return fixEncoder(byteEncoder, size); | ||
} | ||
return { | ||
...byteEncoder, | ||
encode: (value) => { | ||
const contentBytes = byteEncoder.encode(value); | ||
const lengthBytes = size.encode(contentBytes.length); | ||
return mergeBytes([lengthBytes, contentBytes]); | ||
return createEncoder({ | ||
getSizeFromValue: (value) => getEncodedSize(value.length, size) + value.length, | ||
write: (value, bytes, offset) => { | ||
offset = size.write(value.length, bytes, offset); | ||
return byteEncoder.write(value, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesDecoder(options = {}) { | ||
const size = options.size ?? "variable"; | ||
const sizeDescription = typeof size === "object" ? size.description : `${size}`; | ||
const description = options.description ?? `bytes(${sizeDescription})`; | ||
const byteDecoder = { | ||
decode: (bytes, offset = 0) => { | ||
function getBytesDecoder(config = {}) { | ||
const size = config.size ?? "variable"; | ||
const byteDecoder = createDecoder({ | ||
read: (bytes, offset) => { | ||
const slice = bytes.slice(offset); | ||
return [slice, offset + slice.length]; | ||
}, | ||
description, | ||
fixedSize: null, | ||
maxSize: null | ||
}; | ||
} | ||
}); | ||
if (size === "variable") { | ||
@@ -239,9 +222,8 @@ return byteDecoder; | ||
if (typeof size === "number") { | ||
return fixDecoder(byteDecoder, size, description); | ||
return fixDecoder(byteDecoder, size); | ||
} | ||
return { | ||
...byteDecoder, | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("bytes", bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.decode(bytes, offset); | ||
const [lengthBigInt, lengthOffset] = size.read(bytes, offset); | ||
const length = Number(lengthBigInt); | ||
@@ -251,47 +233,39 @@ offset = lengthOffset; | ||
assertByteArrayHasEnoughBytesForCodec("bytes", length, contentBytes); | ||
const [value, contentOffset] = byteDecoder.decode(contentBytes); | ||
const [value, contentOffset] = byteDecoder.read(contentBytes, 0); | ||
offset += contentOffset; | ||
return [value, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getBytesCodec(options = {}) { | ||
return combineCodec(getBytesEncoder(options), getBytesDecoder(options)); | ||
function getBytesCodec(config = {}) { | ||
return combineCodec(getBytesEncoder(config), getBytesDecoder(config)); | ||
} | ||
function dataEnumCodecHelper(variants, prefix, description) { | ||
const fieldDescriptions = variants.map(([name, codec]) => `${String(name)}${codec ? `: ${codec.description}` : ""}`).join(", "); | ||
const allVariantHaveTheSameFixedSize = variants.every((one, _i, all) => one[1].fixedSize === all[0][1].fixedSize); | ||
const fixedVariantSize = allVariantHaveTheSameFixedSize ? variants[0][1].fixedSize : null; | ||
const maxVariantSize = maxCodecSizes(variants.map(([, field]) => field.maxSize)); | ||
return { | ||
description: description ?? `dataEnum(${fieldDescriptions}; ${prefix.description})`, | ||
fixedSize: variants.length === 0 ? prefix.fixedSize : sumCodecSizes([prefix.fixedSize, fixedVariantSize]), | ||
maxSize: variants.length === 0 ? prefix.maxSize : sumCodecSizes([prefix.maxSize, maxVariantSize]) | ||
}; | ||
} | ||
function getDataEnumEncoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
encode: (variant) => { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
const variantPrefix = prefix.encode(discriminator); | ||
const variantSerializer = variants[discriminator][1]; | ||
const variantBytes = variantSerializer.encode(variant); | ||
return mergeBytes([variantPrefix, variantBytes]); | ||
function getDataEnumEncoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createEncoder({ | ||
...fixedSize !== null ? { fixedSize } : { | ||
getSizeFromValue: (variant) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
const variantEncoder = variants[discriminator][1]; | ||
return getEncodedSize(discriminator, prefix) + getEncodedSize(variant, variantEncoder); | ||
}, | ||
maxSize: getDataEnumMaxSize(variants, prefix) | ||
}, | ||
write: (variant, bytes, offset) => { | ||
const discriminator = getVariantDiscriminator(variants, variant); | ||
offset = prefix.write(discriminator, bytes, offset); | ||
const variantEncoder = variants[discriminator][1]; | ||
return variantEncoder.write(variant, bytes, offset); | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumDecoder(variants, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
return { | ||
...dataEnumCodecHelper(variants, prefix, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getDataEnumDecoder(variants, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const fixedSize = getDataEnumFixedSize(variants, prefix); | ||
return createDecoder({ | ||
...fixedSize !== null ? { fixedSize } : { maxSize: getDataEnumMaxSize(variants, prefix) }, | ||
read: (bytes, offset) => { | ||
assertByteArrayIsNotEmptyForCodec("dataEnum", bytes, offset); | ||
const [discriminator, dOffset] = prefix.decode(bytes, offset); | ||
const [discriminator, dOffset] = prefix.read(bytes, offset); | ||
offset = dOffset; | ||
@@ -304,121 +278,192 @@ const variantField = variants[Number(discriminator)] ?? null; | ||
} | ||
const [variant, vOffset] = variantField[1].decode(bytes, offset); | ||
const [variant, vOffset] = variantField[1].read(bytes, offset); | ||
offset = vOffset; | ||
return [{ __kind: variantField[0], ...variant ?? {} }, offset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getDataEnumCodec(variants, options = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, options), getDataEnumDecoder(variants, options)); | ||
function getDataEnumCodec(variants, config = {}) { | ||
return combineCodec(getDataEnumEncoder(variants, config), getDataEnumDecoder(variants, config)); | ||
} | ||
function mapCodecHelper(key, value, size, description) { | ||
if (size === "remainder" && (key.fixedSize === null || value.fixedSize === null)) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
function getDataEnumFixedSize(variants, prefix) { | ||
if (variants.length === 0) | ||
return isFixedSize(prefix) ? prefix.fixedSize : null; | ||
if (!isFixedSize(variants[0][1])) | ||
return null; | ||
const variantSize = variants[0][1].fixedSize; | ||
const sameSizedVariants = variants.every( | ||
(variant) => isFixedSize(variant[1]) && variant[1].fixedSize === variantSize | ||
); | ||
if (!sameSizedVariants) | ||
return null; | ||
return isFixedSize(prefix) ? prefix.fixedSize + variantSize : null; | ||
} | ||
function getDataEnumMaxSize(variants, prefix) { | ||
const maxVariantSize = maxCodecSizes(variants.map(([, codec]) => getMaxSize(codec))); | ||
return sumCodecSizes([getMaxSize(prefix), maxVariantSize]) ?? void 0; | ||
} | ||
function getVariantDiscriminator(variants, variant) { | ||
const discriminator = variants.findIndex(([key]) => variant.__kind === key); | ||
if (discriminator < 0) { | ||
throw new Error( | ||
`Invalid data enum variant. Expected one of [${variants.map(([key]) => key).join(", ")}], got "${variant.__kind}".` | ||
); | ||
} | ||
return { | ||
description: description ?? `map(${key.description}, ${value.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [key.fixedSize, value.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [key.maxSize, value.maxSize]) | ||
}; | ||
return discriminator; | ||
} | ||
function getMapEncoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
encode: (map) => { | ||
if (typeof size === "number") { | ||
assertValidNumberOfItemsForCodec("map", size, map.size); | ||
} | ||
const itemBytes = Array.from(map, ([k, v]) => mergeBytes([key.encode(k), value.encode(v)])); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, map.size), ...itemBytes]); | ||
function getTupleEncoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => items.map((item, index) => getEncodedSize(value[index], item)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (value, bytes, offset) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
items.forEach((item, index) => { | ||
offset = item.write(value[index], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapDecoder(key, value, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...mapCodecHelper(key, value, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const map = /* @__PURE__ */ new Map(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [map, offset]; | ||
function getTupleDecoder(items) { | ||
const fixedSize = sumCodecSizes(items.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(items.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const values = []; | ||
items.forEach((item) => { | ||
const [newValue, newOffset] = item.read(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}); | ||
} | ||
function getTupleCodec(items) { | ||
return combineCodec( | ||
getTupleEncoder(items), | ||
getTupleDecoder(items) | ||
); | ||
} | ||
// src/map.ts | ||
function getMapEncoder(key, value, config = {}) { | ||
return mapEncoder( | ||
getArrayEncoder(getTupleEncoder([key, value]), config), | ||
(map) => [...map.entries()] | ||
); | ||
} | ||
function getMapDecoder(key, value, config = {}) { | ||
return mapDecoder( | ||
getArrayDecoder(getTupleDecoder([key, value]), config), | ||
(entries) => new Map(entries) | ||
); | ||
} | ||
function getMapCodec(key, value, config = {}) { | ||
return combineCodec(getMapEncoder(key, value, config), getMapDecoder(key, value, config)); | ||
} | ||
function getNullableEncoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Encoder(); | ||
const fixed = config.fixed ?? false; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
const fixedSize = prefix.fixedSize + item.fixedSize; | ||
return createEncoder({ | ||
fixedSize, | ||
write: (option, bytes, offset) => { | ||
const prefixOffset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
item.write(option, bytes, prefixOffset); | ||
} | ||
return offset + fixedSize; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize( | ||
size, | ||
[key.fixedSize, value.fixedSize], | ||
bytes, | ||
offset | ||
); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [decodedKey, kOffset] = key.decode(bytes, offset); | ||
offset = kOffset; | ||
const [decodedValue, vOffset] = value.decode(bytes, offset); | ||
offset = vOffset; | ||
map.set(decodedKey, decodedValue); | ||
}); | ||
} | ||
return createEncoder({ | ||
getSizeFromValue: (option) => getEncodedSize(Number(option !== null), prefix) + (option !== null ? getEncodedSize(option, item) : 0), | ||
maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0, | ||
write: (option, bytes, offset) => { | ||
offset = prefix.write(Number(option !== null), bytes, offset); | ||
if (option !== null) { | ||
offset = item.write(option, bytes, offset); | ||
} | ||
return [map, offset]; | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getMapCodec(key, value, options = {}) { | ||
return combineCodec(getMapEncoder(key, value, options), getMapDecoder(key, value, options)); | ||
} | ||
function nullableCodecHelper(item, prefix, fixed, description) { | ||
let descriptionSuffix = `; ${prefix.description}`; | ||
let fixedSize = item.fixedSize === 0 ? prefix.fixedSize : null; | ||
if (fixed) { | ||
assertFixedSizeCodec(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertFixedSizeCodec(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
descriptionSuffix += "; fixed"; | ||
function getNullableDecoder(item, config = {}) { | ||
const prefix = config.prefix ?? getU8Decoder(); | ||
const fixed = config.fixed ?? false; | ||
let fixedSize = null; | ||
const isZeroSizeItem = isFixedSize(item) && isFixedSize(prefix) && item.fixedSize === 0; | ||
if (fixed || isZeroSizeItem) { | ||
assertIsFixedSize(item, "Fixed nullables can only be used with fixed-size codecs."); | ||
assertIsFixedSize(prefix, "Fixed nullables can only be used with fixed-size prefix."); | ||
fixedSize = prefix.fixedSize + item.fixedSize; | ||
} | ||
return { | ||
description: description ?? `nullable(${item.description + descriptionSuffix})`, | ||
fixedSize, | ||
maxSize: sumCodecSizes([prefix.maxSize, item.maxSize]) | ||
}; | ||
} | ||
function getNullableEncoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Encoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
encode: (option) => { | ||
const prefixByte = prefix.encode(Number(option !== null)); | ||
let itemBytes = option !== null ? item.encode(option) : new Uint8Array(); | ||
itemBytes = fixed ? fixBytes(itemBytes, item.fixedSize) : itemBytes; | ||
return mergeBytes([prefixByte, itemBytes]); | ||
} | ||
}; | ||
} | ||
function getNullableDecoder(item, options = {}) { | ||
const prefix = options.prefix ?? getU8Decoder(); | ||
const fixed = options.fixed ?? false; | ||
return { | ||
...nullableCodecHelper(item, prefix, fixed, options.description), | ||
decode: (bytes, offset = 0) => { | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize: sumCodecSizes([prefix, item].map(getMaxSize)) ?? void 0 } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
if (bytes.length - offset <= 0) { | ||
return [null, offset]; | ||
} | ||
const fixedOffset = offset + (prefix.fixedSize ?? 0) + (item.fixedSize ?? 0); | ||
const [isSome, prefixOffset] = prefix.decode(bytes, offset); | ||
offset = prefixOffset; | ||
const [isSome, prefixOffset] = prefix.read(bytes, offset); | ||
if (isSome === 0) { | ||
return [null, fixed ? fixedOffset : offset]; | ||
return [null, fixedSize !== null ? offset + fixedSize : prefixOffset]; | ||
} | ||
const [value, newOffset] = item.decode(bytes, offset); | ||
offset = newOffset; | ||
return [value, fixed ? fixedOffset : offset]; | ||
const [value, newOffset] = item.read(bytes, prefixOffset); | ||
return [value, fixedSize !== null ? offset + fixedSize : newOffset]; | ||
} | ||
}; | ||
}); | ||
} | ||
function getNullableCodec(item, options = {}) { | ||
return combineCodec(getNullableEncoder(item, options), getNullableDecoder(item, options)); | ||
function getNullableCodec(item, config = {}) { | ||
const configCast = config; | ||
return combineCodec(getNullableEncoder(item, configCast), getNullableDecoder(item, configCast)); | ||
} | ||
function scalarEnumCoderHelper(constructor, prefix, description) { | ||
function getScalarEnumEncoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Encoder(); | ||
const { minRange, maxRange, stringValues, enumKeys, enumValues } = getScalarEnumStats(constructor); | ||
return mapEncoder(prefix, (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return value; | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return valueIndex; | ||
return enumKeys.indexOf(value); | ||
}); | ||
} | ||
function getScalarEnumDecoder(constructor, config = {}) { | ||
const prefix = config.size ?? getU8Decoder(); | ||
const { minRange, maxRange, isNumericEnum, enumValues } = getScalarEnumStats(constructor); | ||
return mapDecoder(prefix, (value) => { | ||
const valueAsNumber = Number(value); | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return isNumericEnum ? valueAsNumber : enumValues[valueAsNumber]; | ||
}); | ||
} | ||
function getScalarEnumCodec(constructor, config = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, config), getScalarEnumDecoder(constructor, config)); | ||
} | ||
function getScalarEnumStats(constructor) { | ||
const enumKeys = Object.keys(constructor); | ||
const enumValues = Object.values(constructor); | ||
const isNumericEnum = enumValues.some((v) => typeof v === "number"); | ||
const valueDescriptions = enumValues.filter((v) => typeof v === "string").join(", "); | ||
const minRange = 0; | ||
@@ -428,9 +473,6 @@ const maxRange = isNumericEnum ? enumValues.length / 2 - 1 : enumValues.length - 1; | ||
return { | ||
description: description ?? `enum(${valueDescriptions}; ${prefix.description})`, | ||
enumKeys, | ||
enumValues, | ||
fixedSize: prefix.fixedSize, | ||
isNumericEnum, | ||
maxRange, | ||
maxSize: prefix.maxSize, | ||
minRange, | ||
@@ -440,124 +482,38 @@ stringValues | ||
} | ||
function getScalarEnumEncoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Encoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, stringValues, enumKeys, enumValues } = scalarEnumCoderHelper(constructor, prefix, options.description); | ||
return { | ||
description, | ||
encode: (value) => { | ||
const isInvalidNumber = typeof value === "number" && (value < minRange || value > maxRange); | ||
const isInvalidString = typeof value === "string" && !stringValues.includes(value); | ||
if (isInvalidNumber || isInvalidString) { | ||
throw new Error( | ||
`Invalid scalar enum variant. Expected one of [${stringValues.join(", ")}] or a number between ${minRange} and ${maxRange}, got "${value}".` | ||
); | ||
} | ||
if (typeof value === "number") | ||
return prefix.encode(value); | ||
const valueIndex = enumValues.indexOf(value); | ||
if (valueIndex >= 0) | ||
return prefix.encode(valueIndex); | ||
return prefix.encode(enumKeys.indexOf(value)); | ||
}, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetEncoder(item, config = {}) { | ||
return mapEncoder(getArrayEncoder(item, config), (set) => [...set]); | ||
} | ||
function getScalarEnumDecoder(constructor, options = {}) { | ||
const prefix = options.size ?? getU8Decoder(); | ||
const { description, fixedSize, maxSize, minRange, maxRange, isNumericEnum, enumValues } = scalarEnumCoderHelper( | ||
constructor, | ||
prefix, | ||
options.description | ||
); | ||
return { | ||
decode: (bytes, offset = 0) => { | ||
assertByteArrayIsNotEmptyForCodec("enum", bytes, offset); | ||
const [value, newOffset] = prefix.decode(bytes, offset); | ||
const valueAsNumber = Number(value); | ||
offset = newOffset; | ||
if (valueAsNumber < minRange || valueAsNumber > maxRange) { | ||
throw new Error( | ||
`Enum discriminator out of range. Expected a number between ${minRange} and ${maxRange}, got ${valueAsNumber}.` | ||
); | ||
} | ||
return [isNumericEnum ? valueAsNumber : enumValues[valueAsNumber], offset]; | ||
}, | ||
description, | ||
fixedSize, | ||
maxSize | ||
}; | ||
function getSetDecoder(item, config = {}) { | ||
return mapDecoder(getArrayDecoder(item, config), (entries) => new Set(entries)); | ||
} | ||
function getScalarEnumCodec(constructor, options = {}) { | ||
return combineCodec(getScalarEnumEncoder(constructor, options), getScalarEnumDecoder(constructor, options)); | ||
function getSetCodec(item, config = {}) { | ||
return combineCodec(getSetEncoder(item, config), getSetDecoder(item, config)); | ||
} | ||
function setCodecHelper(item, size, description) { | ||
if (size === "remainder" && item.fixedSize === null) { | ||
throw new Error('Codecs of "remainder" size must have fixed-size items.'); | ||
} | ||
return { | ||
description: description ?? `set(${item.description}; ${getArrayLikeCodecSizeDescription(size)})`, | ||
fixedSize: getArrayLikeCodecSizeFromChildren(size, [item.fixedSize]), | ||
maxSize: getArrayLikeCodecSizeFromChildren(size, [item.maxSize]) | ||
}; | ||
} | ||
function getSetEncoder(item, options = {}) { | ||
const size = options.size ?? getU32Encoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
encode: (set) => { | ||
if (typeof size === "number" && set.size !== size) { | ||
assertValidNumberOfItemsForCodec("set", size, set.size); | ||
} | ||
const itemBytes = Array.from(set, (value) => item.encode(value)); | ||
return mergeBytes([getArrayLikeCodecSizePrefix(size, set.size), ...itemBytes]); | ||
function getStructEncoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createEncoder({ | ||
...fixedSize === null ? { | ||
getSizeFromValue: (value) => fields.map(([key, codec]) => getEncodedSize(value[key], codec)).reduce((all, one) => all + one, 0), | ||
maxSize | ||
} : { fixedSize }, | ||
write: (struct, bytes, offset) => { | ||
fields.forEach(([key, codec]) => { | ||
offset = codec.write(struct[key], bytes, offset); | ||
}); | ||
return offset; | ||
} | ||
}; | ||
}); | ||
} | ||
function getSetDecoder(item, options = {}) { | ||
const size = options.size ?? getU32Decoder(); | ||
return { | ||
...setCodecHelper(item, size, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const set = /* @__PURE__ */ new Set(); | ||
if (typeof size === "object" && bytes.slice(offset).length === 0) { | ||
return [set, offset]; | ||
} | ||
const [resolvedSize, newOffset] = decodeArrayLikeCodecSize(size, [item.fixedSize], bytes, offset); | ||
offset = newOffset; | ||
for (let i = 0; i < resolvedSize; i += 1) { | ||
const [value, newOffset2] = item.decode(bytes, offset); | ||
offset = newOffset2; | ||
set.add(value); | ||
} | ||
return [set, offset]; | ||
} | ||
}; | ||
} | ||
function getSetCodec(item, options = {}) { | ||
return combineCodec(getSetEncoder(item, options), getSetDecoder(item, options)); | ||
} | ||
function structCodecHelper(fields, description) { | ||
const fieldDescriptions = fields.map(([name, codec]) => `${String(name)}: ${codec.description}`).join(", "); | ||
return { | ||
description: description ?? `struct(${fieldDescriptions})`, | ||
fixedSize: sumCodecSizes(fields.map(([, field]) => field.fixedSize)), | ||
maxSize: sumCodecSizes(fields.map(([, field]) => field.maxSize)) | ||
}; | ||
} | ||
function getStructEncoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
encode: (struct) => { | ||
const fieldBytes = fields.map(([key, codec]) => codec.encode(struct[key])); | ||
return mergeBytes(fieldBytes); | ||
} | ||
}; | ||
} | ||
function getStructDecoder(fields, options = {}) { | ||
return { | ||
...structCodecHelper(fields, options.description), | ||
decode: (bytes, offset = 0) => { | ||
function getStructDecoder(fields) { | ||
const fieldCodecs = fields.map(([, codec]) => codec); | ||
const fixedSize = sumCodecSizes(fieldCodecs.map(getFixedSize)); | ||
const maxSize = sumCodecSizes(fieldCodecs.map(getMaxSize)) ?? void 0; | ||
return createDecoder({ | ||
...fixedSize === null ? { maxSize } : { fixedSize }, | ||
read: (bytes, offset) => { | ||
const struct = {}; | ||
fields.forEach(([key, codec]) => { | ||
const [value, newOffset] = codec.decode(bytes, offset); | ||
const [value, newOffset] = codec.read(bytes, offset); | ||
offset = newOffset; | ||
@@ -568,66 +524,23 @@ struct[key] = value; | ||
} | ||
}; | ||
}); | ||
} | ||
function getStructCodec(fields, options = {}) { | ||
return combineCodec(getStructEncoder(fields, options), getStructDecoder(fields, options)); | ||
function getStructCodec(fields) { | ||
return combineCodec(getStructEncoder(fields), getStructDecoder(fields)); | ||
} | ||
function tupleCodecHelper(items, description) { | ||
const itemDescriptions = items.map((item) => item.description).join(", "); | ||
return { | ||
description: description ?? `tuple(${itemDescriptions})`, | ||
fixedSize: sumCodecSizes(items.map((item) => item.fixedSize)), | ||
maxSize: sumCodecSizes(items.map((item) => item.maxSize)) | ||
}; | ||
} | ||
function getTupleEncoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
encode: (value) => { | ||
assertValidNumberOfItemsForCodec("tuple", items.length, value.length); | ||
return mergeBytes(items.map((item, index) => item.encode(value[index]))); | ||
} | ||
}; | ||
} | ||
function getTupleDecoder(items, options = {}) { | ||
return { | ||
...tupleCodecHelper(items, options.description), | ||
decode: (bytes, offset = 0) => { | ||
const values = []; | ||
items.forEach((codec) => { | ||
const [newValue, newOffset] = codec.decode(bytes, offset); | ||
values.push(newValue); | ||
offset = newOffset; | ||
}); | ||
return [values, offset]; | ||
} | ||
}; | ||
} | ||
function getTupleCodec(items, options = {}) { | ||
return combineCodec( | ||
getTupleEncoder(items, options), | ||
getTupleDecoder(items, options) | ||
); | ||
} | ||
function getUnitEncoder(options = {}) { | ||
return { | ||
description: options.description ?? "unit", | ||
encode: () => new Uint8Array(), | ||
function getUnitEncoder() { | ||
return createEncoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
write: (_value, _bytes, offset) => offset | ||
}); | ||
} | ||
function getUnitDecoder(options = {}) { | ||
return { | ||
decode: (_bytes, offset = 0) => [void 0, offset], | ||
description: options.description ?? "unit", | ||
function getUnitDecoder() { | ||
return createDecoder({ | ||
fixedSize: 0, | ||
maxSize: 0 | ||
}; | ||
read: (_bytes, offset) => [void 0, offset] | ||
}); | ||
} | ||
function getUnitCodec(options = {}) { | ||
return combineCodec(getUnitEncoder(options), getUnitDecoder(options)); | ||
function getUnitCodec() { | ||
return combineCodec(getUnitEncoder(), getUnitDecoder()); | ||
} | ||
export { assertValidNumberOfItemsForCodec, decodeArrayLikeCodecSize, getArrayCodec, getArrayDecoder, getArrayEncoder, getArrayLikeCodecSizeDescription, getArrayLikeCodecSizeFromChildren, getArrayLikeCodecSizePrefix, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; | ||
//# sourceMappingURL=out.js.map | ||
//# sourceMappingURL=index.node.js.map | ||
export { assertValidNumberOfItemsForCodec, getArrayCodec, getArrayDecoder, getArrayEncoder, getBitArrayCodec, getBitArrayDecoder, getBitArrayEncoder, getBooleanCodec, getBooleanDecoder, getBooleanEncoder, getBytesCodec, getBytesDecoder, getBytesEncoder, getDataEnumCodec, getDataEnumDecoder, getDataEnumEncoder, getMapCodec, getMapDecoder, getMapEncoder, getNullableCodec, getNullableDecoder, getNullableEncoder, getScalarEnumCodec, getScalarEnumDecoder, getScalarEnumEncoder, getSetCodec, getSetDecoder, getSetEncoder, getStructCodec, getStructDecoder, getStructEncoder, getTupleCodec, getTupleDecoder, getTupleEncoder, getUnitCodec, getUnitDecoder, getUnitEncoder }; |
@@ -5,44 +5,40 @@ this.globalThis = this.globalThis || {}; | ||
function C(e,r,n=0){if(r.length-n<=0)throw new Error(`Codec [${e}] cannot decode empty byte arrays.`)}function E(e,r,n,o=0){let t=n.length-o;if(t<r)throw new Error(`Codec [${e}] expected ${r} bytes, got ${t}.`)}function D(e,r){if(e.fixedSize===null)throw new Error(r??"Expected a fixed-size codec, got a variable-size one.")}var x=e=>{let r=e.filter(i=>i.length);if(r.length===0)return e.length?e[0]:new Uint8Array;if(r.length===1)return r[0];let n=r.reduce((i,c)=>i+c.length,0),o=new Uint8Array(n),t=0;return r.forEach(i=>{o.set(i,t),t+=i.length;}),o},G=(e,r)=>{if(e.length>=r)return e;let n=new Uint8Array(r).fill(0);return n.set(e),n},v=(e,r)=>G(e.length<=r?e:e.slice(0,r),r);function u(e,r,n){if(e.fixedSize!==r.fixedSize)throw new Error(`Encoder and decoder must have the same fixed size, got [${e.fixedSize}] and [${r.fixedSize}].`);if(e.maxSize!==r.maxSize)throw new Error(`Encoder and decoder must have the same max size, got [${e.maxSize}] and [${r.maxSize}].`);if(n===void 0&&e.description!==r.description)throw new Error(`Encoder and decoder must have the same description, got [${e.description}] and [${r.description}]. Pass a custom description as a third argument if you want to override the description and bypass this error.`);return {decode:r.decode,description:n??e.description,encode:e.encode,fixedSize:e.fixedSize,maxSize:e.maxSize}}function w(e,r,n){return {description:n??`fixed(${r}, ${e.description})`,fixedSize:r,maxSize:r}}function k(e,r,n){return {...w(e,r,n),encode:o=>v(e.encode(o),r)}}function I(e,r,n){return {...w(e,r,n),decode:(o,t=0)=>{E("fixCodec",r,o,t),(t>0||o.length>r)&&(o=o.slice(t,t+r)),e.fixedSize!==null&&(o=v(o,e.fixedSize));let[i]=e.decode(o,0);return [i,t+r]}}}function q(e,r,n,o){if(o<r||o>n)throw new Error(`Codec [${e}] expected number to be in the range [${r}, ${n}], got ${o}.`)}function $(e){let r,n=e.name;return e.size>1&&(r=!("endian"in e.options)||e.options.endian===0,n+=r?"(le)":"(be)"),{description:e.options.description??n,fixedSize:e.size,littleEndian:r,maxSize:e.size}}function _(e){let r=$(e);return {description:r.description,encode(n){e.range&&q(e.name,e.range[0],e.range[1],n);let o=new ArrayBuffer(e.size);return e.set(new DataView(o),n,r.littleEndian),new Uint8Array(o)},fixedSize:r.fixedSize,maxSize:r.maxSize}}function F(e){let r=$(e);return {decode(n,o=0){C(r.description,n,o),E(r.description,e.size,n,o);let t=new DataView(J(n,o,e.size));return [e.get(t,r.littleEndian),o+e.size]},description:r.description,fixedSize:r.fixedSize,maxSize:r.maxSize}}function J(e,r,n){let o=e.byteOffset+(r??0),t=n??e.byteLength;return e.buffer.slice(o,o+t)}var O=(e={})=>_({name:"u32",options:e,range:[0,+"0xffffffff"],set:(r,n,o)=>r.setUint32(0,n,o),size:4}),N=(e={})=>F({get:(r,n)=>r.getUint32(0,n),name:"u32",options:e,size:4});var S=(e={})=>_({name:"u8",options:e,range:[0,+"0xff"],set:(r,n)=>r.setUint8(0,n),size:1}),b=(e={})=>F({get:r=>r.getUint8(0),name:"u8",options:e,size:1});function V(e){return e.reduce((r,n)=>r===null||n===null?null:Math.max(r,n),0)}function l(e){return e.reduce((r,n)=>r===null||n===null?null:r+n,0)}function U(e,r,n,o){if(typeof e=="number")return [e,o];if(typeof e=="object")return e.decode(n,o);if(e==="remainder"){let t=l(r);if(t===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');let i=n.slice(o).length;if(i%t!==0)throw new Error(`The remainder of the byte array (${i} bytes) cannot be split into chunks of ${t} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${i} modulo ${t} should be equal to zero.`);return [i/t,o]}throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(e)}`)}function h(e){return typeof e=="object"?e.description:`${e}`}function g(e,r){if(typeof e!="number")return null;if(e===0)return 0;let n=l(r);return n===null?null:n*e}function B(e,r){return typeof e=="object"?e.encode(r):new Uint8Array}function y(e,r,n){if(r!==n)throw new Error(`Expected [${e}] to have ${r} items, got ${n}.`)}function L(e,r,n){if(r==="remainder"&&e.fixedSize===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:n??`array(${e.description}; ${h(r)})`,fixedSize:g(r,[e.fixedSize]),maxSize:g(r,[e.maxSize])}}function Q(e,r={}){let n=r.size??O();return {...L(e,n,r.description),encode:o=>(typeof n=="number"&&y("array",n,o.length),x([B(n,o.length),...o.map(t=>e.encode(t))]))}}function X(e,r={}){let n=r.size??N();return {...L(e,n,r.description),decode:(o,t=0)=>{if(typeof n=="object"&&o.slice(t).length===0)return [[],t];let[i,c]=U(n,[e.fixedSize],o,t);t=c;let d=[];for(let a=0;a<i;a+=1){let[s,f]=e.decode(o,t);d.push(s),t=f;}return [d,t]}}}function We(e,r={}){return u(Q(e,r),X(e,r))}var Y=(e,r={})=>{let n=typeof r=="boolean"?{backward:r}:r,o=n.backward??!1,t=o?"; backward":"";return {description:n.description??`bitArray(${e}${t})`,encode(i){let c=[];for(let d=0;d<e;d+=1){let a=0;for(let s=0;s<8;s+=1){let f=Number(i[d*8+s]??0);a|=f<<(o?s:7-s);}o?c.unshift(a):c.push(a);}return new Uint8Array(c)},fixedSize:e,maxSize:e}},Z=(e,r={})=>{let n=typeof r=="boolean"?{backward:r}:r,o=n.backward??!1,t=o?"; backward":"";return {decode(i,c=0){E("bitArray",e,i,c);let d=[],a=i.slice(c,c+e);return a=o?a.reverse():a,a.forEach(s=>{for(let f=0;f<8;f+=1)o?(d.push(!!(s&1)),s>>=1):(d.push(!!(s&128)),s<<=1);}),[d,c+e]},description:n.description??`bitArray(${e}${t})`,fixedSize:e,maxSize:e}},er=(e,r={})=>u(Y(e,r),Z(e,r));function ee(e={}){let r=e.size??S();return D(r,"Codec [bool] requires a fixed size."),{description:e.description??`bool(${r.description})`,encode:n=>r.encode(n?1:0),fixedSize:r.fixedSize,maxSize:r.fixedSize}}function re(e={}){let r=e.size??b();return D(r,"Codec [bool] requires a fixed size."),{decode:(n,o=0)=>{C("bool",n,o);let[t,i]=r.decode(n,o);return [t===1,i]},description:e.description??`bool(${r.description})`,fixedSize:r.fixedSize,maxSize:r.fixedSize}}function mr(e={}){return u(ee(e),re(e))}function ne(e={}){let r=e.size??"variable",n=typeof r=="object"?r.description:`${r}`,o=e.description??`bytes(${n})`,t={description:o,encode:i=>i,fixedSize:null,maxSize:null};return r==="variable"?t:typeof r=="number"?k(t,r,o):{...t,encode:i=>{let c=t.encode(i),d=r.encode(c.length);return x([d,c])}}}function oe(e={}){let r=e.size??"variable",n=typeof r=="object"?r.description:`${r}`,o=e.description??`bytes(${n})`,t={decode:(i,c=0)=>{let d=i.slice(c);return [d,c+d.length]},description:o,fixedSize:null,maxSize:null};return r==="variable"?t:typeof r=="number"?I(t,r,o):{...t,decode:(i,c=0)=>{C("bytes",i,c);let[d,a]=r.decode(i,c),s=Number(d);c=a;let f=i.slice(c,c+s);E("bytes",s,f);let[m,T]=t.decode(f);return c+=T,[m,c]}}}function br(e={}){return u(ne(e),oe(e))}function K(e,r,n){let o=e.map(([d,a])=>`${String(d)}${a?`: ${a.description}`:""}`).join(", "),i=e.every((d,a,s)=>d[1].fixedSize===s[0][1].fixedSize)?e[0][1].fixedSize:null,c=V(e.map(([,d])=>d.maxSize));return {description:n??`dataEnum(${o}; ${r.description})`,fixedSize:e.length===0?r.fixedSize:l([r.fixedSize,i]),maxSize:e.length===0?r.maxSize:l([r.maxSize,c])}}function te(e,r={}){let n=r.size??S();return {...K(e,n,r.description),encode:o=>{let t=e.findIndex(([a])=>o.__kind===a);if(t<0)throw new Error(`Invalid data enum variant. Expected one of [${e.map(([a])=>a).join(", ")}], got "${o.__kind}".`);let i=n.encode(t),d=e[t][1].encode(o);return x([i,d])}}}function ie(e,r={}){let n=r.size??b();return {...K(e,n,r.description),decode:(o,t=0)=>{C("dataEnum",o,t);let[i,c]=n.decode(o,t);t=c;let d=e[Number(i)]??null;if(!d)throw new Error(`Enum discriminator out of range. Expected a number between 0 and ${e.length-1}, got ${i}.`);let[a,s]=d[1].decode(o,t);return t=s,[{__kind:d[0],...a??{}},t]}}}function Ir(e,r={}){return u(te(e,r),ie(e,r))}function P(e,r,n,o){if(n==="remainder"&&(e.fixedSize===null||r.fixedSize===null))throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:o??`map(${e.description}, ${r.description}; ${h(n)})`,fixedSize:g(n,[e.fixedSize,r.fixedSize]),maxSize:g(n,[e.maxSize,r.maxSize])}}function ce(e,r,n={}){let o=n.size??O();return {...P(e,r,o,n.description),encode:t=>{typeof o=="number"&&y("map",o,t.size);let i=Array.from(t,([c,d])=>x([e.encode(c),r.encode(d)]));return x([B(o,t.size),...i])}}}function de(e,r,n={}){let o=n.size??N();return {...P(e,r,o,n.description),decode:(t,i=0)=>{let c=new Map;if(typeof o=="object"&&t.slice(i).length===0)return [c,i];let[d,a]=U(o,[e.fixedSize,r.fixedSize],t,i);i=a;for(let s=0;s<d;s+=1){let[f,m]=e.decode(t,i);i=m;let[T,A]=r.decode(t,i);i=A,c.set(f,T);}return [c,i]}}}function Qr(e,r,n={}){return u(ce(e,r,n),de(e,r,n))}function j(e,r,n,o){let t=`; ${r.description}`,i=e.fixedSize===0?r.fixedSize:null;return n&&(D(e,"Fixed nullables can only be used with fixed-size codecs."),D(r,"Fixed nullables can only be used with fixed-size prefix."),t+="; fixed",i=r.fixedSize+e.fixedSize),{description:o??`nullable(${e.description+t})`,fixedSize:i,maxSize:l([r.maxSize,e.maxSize])}}function ae(e,r={}){let n=r.prefix??S(),o=r.fixed??!1;return {...j(e,n,o,r.description),encode:t=>{let i=n.encode(+(t!==null)),c=t!==null?e.encode(t):new Uint8Array;return c=o?v(c,e.fixedSize):c,x([i,c])}}}function se(e,r={}){let n=r.prefix??b(),o=r.fixed??!1;return {...j(e,n,o,r.description),decode:(t,i=0)=>{if(t.length-i<=0)return [null,i];let c=i+(n.fixedSize??0)+(e.fixedSize??0),[d,a]=n.decode(t,i);if(i=a,d===0)return [null,o?c:i];let[s,f]=e.decode(t,i);return i=f,[s,o?c:i]}}}function fn(e,r={}){return u(ae(e,r),se(e,r))}function M(e,r,n){let o=Object.keys(e),t=Object.values(e),i=t.some(f=>typeof f=="number"),c=t.filter(f=>typeof f=="string").join(", "),d=0,a=i?t.length/2-1:t.length-1,s=i?[...o]:[...new Set([...o,...t])];return {description:n??`enum(${c}; ${r.description})`,enumKeys:o,enumValues:t,fixedSize:r.fixedSize,isNumericEnum:i,maxRange:a,maxSize:r.maxSize,minRange:d,stringValues:s}}function ue(e,r={}){let n=r.size??S(),{description:o,fixedSize:t,maxSize:i,minRange:c,maxRange:d,stringValues:a,enumKeys:s,enumValues:f}=M(e,n,r.description);return {description:o,encode:m=>{let T=typeof m=="number"&&(m<c||m>d),A=typeof m=="string"&&!a.includes(m);if(T||A)throw new Error(`Invalid scalar enum variant. Expected one of [${a.join(", ")}] or a number between ${c} and ${d}, got "${m}".`);if(typeof m=="number")return n.encode(m);let z=f.indexOf(m);return z>=0?n.encode(z):n.encode(s.indexOf(m))},fixedSize:t,maxSize:i}}function fe(e,r={}){let n=r.size??b(),{description:o,fixedSize:t,maxSize:i,minRange:c,maxRange:d,isNumericEnum:a,enumValues:s}=M(e,n,r.description);return {decode:(f,m=0)=>{C("enum",f,m);let[T,A]=n.decode(f,m),z=Number(T);if(m=A,z<c||z>d)throw new Error(`Enum discriminator out of range. Expected a number between ${c} and ${d}, got ${z}.`);return [a?z:s[z],m]},description:o,fixedSize:t,maxSize:i}}function Dn(e,r={}){return u(ue(e,r),fe(e,r))}function H(e,r,n){if(r==="remainder"&&e.fixedSize===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');return {description:n??`set(${e.description}; ${h(r)})`,fixedSize:g(r,[e.fixedSize]),maxSize:g(r,[e.maxSize])}}function me(e,r={}){let n=r.size??O();return {...H(e,n,r.description),encode:o=>{typeof n=="number"&&o.size!==n&&y("set",n,o.size);let t=Array.from(o,i=>e.encode(i));return x([B(n,o.size),...t])}}}function pe(e,r={}){let n=r.size??N();return {...H(e,n,r.description),decode:(o,t=0)=>{let i=new Set;if(typeof n=="object"&&o.slice(t).length===0)return [i,t];let[c,d]=U(n,[e.fixedSize],o,t);t=d;for(let a=0;a<c;a+=1){let[s,f]=e.decode(o,t);t=f,i.add(s);}return [i,t]}}}function Kn(e,r={}){return u(me(e,r),pe(e,r))}function R(e,r){let n=e.map(([o,t])=>`${String(o)}: ${t.description}`).join(", ");return {description:r??`struct(${n})`,fixedSize:l(e.map(([,o])=>o.fixedSize)),maxSize:l(e.map(([,o])=>o.maxSize))}}function xe(e,r={}){return {...R(e,r.description),encode:n=>{let o=e.map(([t,i])=>i.encode(n[t]));return x(o)}}}function le(e,r={}){return {...R(e,r.description),decode:(n,o=0)=>{let t={};return e.forEach(([i,c])=>{let[d,a]=c.decode(n,o);o=a,t[i]=d;}),[t,o]}}}function Qn(e,r={}){return u(xe(e,r),le(e,r))}function W(e,r){let n=e.map(o=>o.description).join(", ");return {description:r??`tuple(${n})`,fixedSize:l(e.map(o=>o.fixedSize)),maxSize:l(e.map(o=>o.maxSize))}}function Ce(e,r={}){return {...W(e,r.description),encode:n=>(y("tuple",e.length,n.length),x(e.map((o,t)=>o.encode(n[t]))))}}function ge(e,r={}){return {...W(e,r.description),decode:(n,o=0)=>{let t=[];return e.forEach(i=>{let[c,d]=i.decode(n,o);t.push(c),o=d;}),[t,o]}}}function ao(e,r={}){return u(Ce(e,r),ge(e,r))}function ze(e={}){return {description:e.description??"unit",encode:()=>new Uint8Array,fixedSize:0,maxSize:0}}function Se(e={}){return {decode:(r,n=0)=>[void 0,n],description:e.description??"unit",fixedSize:0,maxSize:0}}function Co(e={}){return u(ze(e),Se(e))} | ||
function h(e,o,r=0){if(o.length-r<=0)throw new Error(`Codec [${e}] cannot decode empty byte arrays.`)}function V(e,o,r,n=0){let t=r.length-n;if(t<o)throw new Error(`Codec [${e}] expected ${o} bytes, got ${t}.`)}var J=(e,o)=>{if(e.length>=o)return e;let r=new Uint8Array(o).fill(0);return r.set(e),r},Q=(e,o)=>J(e.length<=o?e:e.slice(0,o),o);function S(e,o){return "fixedSize"in o?o.fixedSize:o.getSizeFromValue(e)}function l(e){return Object.freeze({...e,encode:o=>{let r=new Uint8Array(S(o,e));return e.write(o,r,0),r}})}function x(e){return Object.freeze({...e,decode:(o,r=0)=>e.read(o,r)[0]})}function f(e){return "fixedSize"in e&&typeof e.fixedSize=="number"}function b(e,o){if(!f(e))throw new Error(o!=null?o:"Expected a fixed-size codec, got a variable-size one.")}function X(e){return !f(e)}function m(e,o){if(f(e)!==f(o))throw new Error("Encoder and decoder must either both be fixed-size or variable-size.");if(f(e)&&f(o)&&e.fixedSize!==o.fixedSize)throw new Error(`Encoder and decoder must have the same fixed size, got [${e.fixedSize}] and [${o.fixedSize}].`);if(!f(e)&&!f(o)&&e.maxSize!==o.maxSize)throw new Error(`Encoder and decoder must have the same max size, got [${e.maxSize}] and [${o.maxSize}].`);return {...o,...e,decode:o.decode,encode:e.encode,read:o.read,write:e.write}}function M(e,o){return l({fixedSize:o,write:(r,n,t)=>{let i=e.encode(r),c=i.length>o?i.slice(0,o):i;return n.set(c,t),t+o}})}function _(e,o){return x({fixedSize:o,read:(r,n)=>{V("fixCodec",o,r,n),(n>0||r.length>o)&&(r=r.slice(n,n+o)),f(e)&&(r=Q(r,e.fixedSize));let[t]=e.read(r,0);return [t,n+o]}})}function E(e,o){return l({...X(e)?{...e,getSizeFromValue:r=>e.getSizeFromValue(o(r))}:e,write:(r,n,t)=>e.write(o(r),n,t)})}function y(e,o){return x({...e,read:(r,n)=>{let[t,i]=e.read(r,n);return [o(t,r,n),i]}})}function Y(e,o,r,n){if(n<o||n>r)throw new Error(`Codec [${e}] expected number to be in the range [${o}, ${r}], got ${n}.`)}function O(e){return (e==null?void 0:e.endian)!==1}function j(e){return l({fixedSize:e.size,write(o,r,n){e.range&&Y(e.name,e.range[0],e.range[1],o);let t=new ArrayBuffer(e.size);return e.set(new DataView(t),o,O(e.config)),r.set(new Uint8Array(t),n),n+e.size}})}function $(e){return x({fixedSize:e.size,read(o,r=0){h(e.name,o,r),V(e.name,e.size,o,r);let n=new DataView(ee(o,r,e.size));return [e.get(n,O(e.config)),r+e.size]}})}function ee(e,o,r){let n=e.byteOffset+(o!=null?o:0),t=r!=null?r:e.byteLength;return e.buffer.slice(n,n+t)}var P=(e={})=>j({config:e,name:"u32",range:[0,+"0xffffffff"],set:(o,r,n)=>o.setUint32(0,r,n),size:4}),L=(e={})=>$({config:e,get:(o,r)=>o.getUint32(0,r),name:"u32",size:4});var D=()=>j({name:"u8",range:[0,+"0xff"],set:(e,o)=>e.setUint8(0,o),size:1}),N=()=>$({get:e=>e.getUint8(0),name:"u8",size:1});function B(e,o,r){if(o!==r)throw new Error(`Expected [${e}] to have ${o} items, got ${r}.`)}function W(e){return e.reduce((o,r)=>o===null||r===null?null:Math.max(o,r),0)}function g(e){return e.reduce((o,r)=>o===null||r===null?null:o+r,0)}function p(e){return f(e)?e.fixedSize:null}function z(e){var o;return f(e)?e.fixedSize:(o=e.maxSize)!=null?o:null}function K(e,o={}){var i,c;let r=(i=o.size)!=null?i:P();r==="remainder"&&b(e,'Codecs of "remainder" size must have fixed-size items.');let n=v(r,p(e)),t=(c=v(r,z(e)))!=null?c:void 0;return l({...n!==null?{fixedSize:n}:{getSizeFromValue:a=>(typeof r=="object"?S(a.length,r):0)+[...a].reduce((d,u)=>d+S(u,e),0),maxSize:t},write:(a,T,d)=>(typeof r=="number"&&B("array",r,a.length),typeof r=="object"&&(d=r.write(a.length,T,d)),a.forEach(u=>{d=e.write(u,T,d);}),d)})}function w(e,o={}){var c,a;let r=(c=o.size)!=null?c:L();r==="remainder"&&b(e,'Codecs of "remainder" size must have fixed-size items.');let n=p(e),t=v(r,n),i=(a=v(r,z(e)))!=null?a:void 0;return x({...t!==null?{fixedSize:t}:{maxSize:i},read:(T,d)=>{let u=[];if(typeof r=="object"&&T.slice(d).length===0)return [u,d];let[s,F]=oe(r,n,T,d);d=F;for(let A=0;A<s;A+=1){let[U,Z]=e.read(T,d);d=Z,u.push(U);}return [u,d]}})}function Je(e,o={}){return m(K(e,o),w(e,o))}function oe(e,o,r,n){if(typeof e=="number")return [e,n];if(typeof e=="object")return e.read(r,n);if(e==="remainder"){if(o===null)throw new Error('Codecs of "remainder" size must have fixed-size items.');let t=Math.max(0,r.length-n);if(t%o!==0)throw new Error(`The remainder of the byte array (${t} bytes) cannot be split into chunks of ${o} bytes. Codecs of "remainder" size must have a remainder that is a multiple of its item size. In other words, ${t} modulo ${o} should be equal to zero.`);return [t/o,n]}throw new Error(`Unrecognized array-like codec size: ${JSON.stringify(e)}`)}function v(e,o){return typeof e!="number"?null:e===0?0:o===null?null:o*e}function re(e,o={}){var t;let n=(t=(typeof o=="boolean"?{backward:o}:o).backward)!=null?t:!1;return l({fixedSize:e,write(i,c,a){var d;let T=[];for(let u=0;u<e;u+=1){let s=0;for(let F=0;F<8;F+=1){let A=Number((d=i[u*8+F])!=null?d:0);s|=A<<(n?F:7-F);}n?T.unshift(s):T.push(s);}return c.set(T,a),e}})}function ne(e,o={}){var t;let n=(t=(typeof o=="boolean"?{backward:o}:o).backward)!=null?t:!1;return x({fixedSize:e,read(i,c){V("bitArray",e,i,c);let a=[],T=i.slice(c,c+e);return T=n?T.reverse():T,T.forEach(d=>{for(let u=0;u<8;u+=1)n?(a.push(!!(d&1)),d>>=1):(a.push(!!(d&128)),d<<=1);}),[a,c+e]}})}function no(e,o={}){return m(re(e,o),ne(e,o))}function te(e={}){var r;let o=(r=e.size)!=null?r:D();return b(o,"Codec [bool] requires a fixed size."),E(o,n=>n?1:0)}function ie(e={}){var r;let o=(r=e.size)!=null?r:N();return b(o,"Codec [bool] requires a fixed size."),y(o,n=>Number(n)===1)}function bo(e={}){return m(te(e),ie(e))}function ce(e={}){var n;let o=(n=e.size)!=null?n:"variable",r=l({getSizeFromValue:t=>t.length,write:(t,i,c)=>(i.set(t,c),c+t.length)});return o==="variable"?r:typeof o=="number"?M(r,o):l({getSizeFromValue:t=>S(t.length,o)+t.length,write:(t,i,c)=>(c=o.write(t.length,i,c),r.write(t,i,c))})}function de(e={}){var n;let o=(n=e.size)!=null?n:"variable",r=x({read:(t,i)=>{let c=t.slice(i);return [c,i+c.length]}});return o==="variable"?r:typeof o=="number"?_(r,o):x({read:(t,i)=>{h("bytes",t,i);let[c,a]=o.read(t,i),T=Number(c);i=a;let d=t.slice(i,i+T);V("bytes",T,d);let[u,s]=r.read(d,0);return i+=s,[u,i]}})}function Uo(e={}){return m(ce(e),de(e))}function ae(e,o={}){var t;let r=(t=o.size)!=null?t:D(),n=G(e,r);return l({...n!==null?{fixedSize:n}:{getSizeFromValue:i=>{let c=R(e,i),a=e[c][1];return S(c,r)+S(i,a)},maxSize:H(e,r)},write:(i,c,a)=>{let T=R(e,i);return a=r.write(T,c,a),e[T][1].write(i,c,a)}})}function Te(e,o={}){var t;let r=(t=o.size)!=null?t:N(),n=G(e,r);return x({...n!==null?{fixedSize:n}:{maxSize:H(e,r)},read:(i,c)=>{var F;h("dataEnum",i,c);let[a,T]=r.read(i,c);c=T;let d=(F=e[Number(a)])!=null?F:null;if(!d)throw new Error(`Enum discriminator out of range. Expected a number between 0 and ${e.length-1}, got ${a}.`);let[u,s]=d[1].read(i,c);return c=s,[{__kind:d[0],...u!=null?u:{}},c]}})}function Go(e,o={}){return m(ae(e,o),Te(e,o))}function G(e,o){if(e.length===0)return f(o)?o.fixedSize:null;if(!f(e[0][1]))return null;let r=e[0][1].fixedSize;return e.every(t=>f(t[1])&&t[1].fixedSize===r)&&f(o)?o.fixedSize+r:null}function H(e,o){var n;let r=W(e.map(([,t])=>z(t)));return (n=g([z(o),r]))!=null?n:void 0}function R(e,o){let r=e.findIndex(([n])=>o.__kind===n);if(r<0)throw new Error(`Invalid data enum variant. Expected one of [${e.map(([n])=>n).join(", ")}], got "${o.__kind}".`);return r}function I(e){var n;let o=g(e.map(p)),r=(n=g(e.map(z)))!=null?n:void 0;return l({...o===null?{getSizeFromValue:t=>e.map((i,c)=>S(t[c],i)).reduce((i,c)=>i+c,0),maxSize:r}:{fixedSize:o},write:(t,i,c)=>(B("tuple",e.length,t.length),e.forEach((a,T)=>{c=a.write(t[T],i,c);}),c)})}function k(e){var n;let o=g(e.map(p)),r=(n=g(e.map(z)))!=null?n:void 0;return x({...o===null?{maxSize:r}:{fixedSize:o},read:(t,i)=>{let c=[];return e.forEach(a=>{let[T,d]=a.read(t,i);c.push(T),i=d;}),[c,i]}})}function dr(e){return m(I(e),k(e))}function ue(e,o,r={}){return E(K(I([e,o]),r),n=>[...n.entries()])}function me(e,o,r={}){return y(w(k([e,o]),r),n=>new Map(n))}function Er(e,o,r={}){return m(ue(e,o,r),me(e,o,r))}function fe(e,o={}){var i,c,a;let r=(i=o.prefix)!=null?i:D(),n=(c=o.fixed)!=null?c:!1,t=f(e)&&f(r)&&e.fixedSize===0;if(n||t){b(e,"Fixed nullables can only be used with fixed-size codecs."),b(r,"Fixed nullables can only be used with fixed-size prefix.");let T=r.fixedSize+e.fixedSize;return l({fixedSize:T,write:(d,u,s)=>{let F=r.write(+(d!==null),u,s);return d!==null&&e.write(d,u,F),s+T}})}return l({getSizeFromValue:T=>S(+(T!==null),r)+(T!==null?S(T,e):0),maxSize:(a=g([r,e].map(z)))!=null?a:void 0,write:(T,d,u)=>(u=r.write(+(T!==null),d,u),T!==null&&(u=e.write(T,d,u)),u)})}function le(e,o={}){var c,a,T;let r=(c=o.prefix)!=null?c:N(),n=(a=o.fixed)!=null?a:!1,t=null,i=f(e)&&f(r)&&e.fixedSize===0;return (n||i)&&(b(e,"Fixed nullables can only be used with fixed-size codecs."),b(r,"Fixed nullables can only be used with fixed-size prefix."),t=r.fixedSize+e.fixedSize),x({...t===null?{maxSize:(T=g([r,e].map(z)))!=null?T:void 0}:{fixedSize:t},read:(d,u)=>{if(d.length-u<=0)return [null,u];let[s,F]=r.read(d,u);if(s===0)return [null,t!==null?u+t:F];let[A,U]=e.read(d,F);return [A,t!==null?u+t:U]}})}function Wr(e,o={}){let r=o;return m(fe(e,r),le(e,r))}function xe(e,o={}){var T;let r=(T=o.size)!=null?T:D(),{minRange:n,maxRange:t,stringValues:i,enumKeys:c,enumValues:a}=q(e);return E(r,d=>{let u=typeof d=="number"&&(d<n||d>t),s=typeof d=="string"&&!i.includes(d);if(u||s)throw new Error(`Invalid scalar enum variant. Expected one of [${i.join(", ")}] or a number between ${n} and ${t}, got "${d}".`);if(typeof d=="number")return d;let F=a.indexOf(d);return F>=0?F:c.indexOf(d)})}function se(e,o={}){var a;let r=(a=o.size)!=null?a:N(),{minRange:n,maxRange:t,isNumericEnum:i,enumValues:c}=q(e);return y(r,T=>{let d=Number(T);if(d<n||d>t)throw new Error(`Enum discriminator out of range. Expected a number between ${n} and ${t}, got ${d}.`);return i?d:c[d]})}function mn(e,o={}){return m(xe(e,o),se(e,o))}function q(e){let o=Object.keys(e),r=Object.values(e),n=r.some(a=>typeof a=="number"),t=0,i=n?r.length/2-1:r.length-1,c=n?[...o]:[...new Set([...o,...r])];return {enumKeys:o,enumValues:r,isNumericEnum:n,maxRange:i,minRange:t,stringValues:c}}function Fe(e,o={}){return E(K(e,o),r=>[...r])}function Ce(e,o={}){return y(w(e,o),r=>new Set(r))}function Nn(e,o={}){return m(Fe(e,o),Ce(e,o))}function Se(e){var t;let o=e.map(([,i])=>i),r=g(o.map(p)),n=(t=g(o.map(z)))!=null?t:void 0;return l({...r===null?{getSizeFromValue:i=>e.map(([c,a])=>S(i[c],a)).reduce((c,a)=>c+a,0),maxSize:n}:{fixedSize:r},write:(i,c,a)=>(e.forEach(([T,d])=>{a=d.write(i[T],c,a);}),a)})}function ge(e){var t;let o=e.map(([,i])=>i),r=g(o.map(p)),n=(t=g(o.map(z)))!=null?t:void 0;return x({...r===null?{maxSize:n}:{fixedSize:r},read:(i,c)=>{let a={};return e.forEach(([T,d])=>{let[u,s]=d.read(i,c);c=s,a[T]=u;}),[a,c]}})}function jn(e){return m(Se(e),ge(e))}function ze(){return l({fixedSize:0,write:(e,o,r)=>r})}function be(){return x({fixedSize:0,read:(e,o)=>[void 0,o]})}function Hn(){return m(ze(),be())} | ||
exports.assertValidNumberOfItemsForCodec = y; | ||
exports.decodeArrayLikeCodecSize = U; | ||
exports.getArrayCodec = We; | ||
exports.getArrayDecoder = X; | ||
exports.getArrayEncoder = Q; | ||
exports.getArrayLikeCodecSizeDescription = h; | ||
exports.getArrayLikeCodecSizeFromChildren = g; | ||
exports.getArrayLikeCodecSizePrefix = B; | ||
exports.getBitArrayCodec = er; | ||
exports.getBitArrayDecoder = Z; | ||
exports.getBitArrayEncoder = Y; | ||
exports.getBooleanCodec = mr; | ||
exports.getBooleanDecoder = re; | ||
exports.getBooleanEncoder = ee; | ||
exports.getBytesCodec = br; | ||
exports.getBytesDecoder = oe; | ||
exports.getBytesEncoder = ne; | ||
exports.getDataEnumCodec = Ir; | ||
exports.getDataEnumDecoder = ie; | ||
exports.getDataEnumEncoder = te; | ||
exports.getMapCodec = Qr; | ||
exports.getMapDecoder = de; | ||
exports.getMapEncoder = ce; | ||
exports.getNullableCodec = fn; | ||
exports.getNullableDecoder = se; | ||
exports.getNullableEncoder = ae; | ||
exports.getScalarEnumCodec = Dn; | ||
exports.getScalarEnumDecoder = fe; | ||
exports.getScalarEnumEncoder = ue; | ||
exports.getSetCodec = Kn; | ||
exports.getSetDecoder = pe; | ||
exports.getSetEncoder = me; | ||
exports.getStructCodec = Qn; | ||
exports.getStructDecoder = le; | ||
exports.getStructEncoder = xe; | ||
exports.getTupleCodec = ao; | ||
exports.getTupleDecoder = ge; | ||
exports.getTupleEncoder = Ce; | ||
exports.getUnitCodec = Co; | ||
exports.getUnitDecoder = Se; | ||
exports.assertValidNumberOfItemsForCodec = B; | ||
exports.getArrayCodec = Je; | ||
exports.getArrayDecoder = w; | ||
exports.getArrayEncoder = K; | ||
exports.getBitArrayCodec = no; | ||
exports.getBitArrayDecoder = ne; | ||
exports.getBitArrayEncoder = re; | ||
exports.getBooleanCodec = bo; | ||
exports.getBooleanDecoder = ie; | ||
exports.getBooleanEncoder = te; | ||
exports.getBytesCodec = Uo; | ||
exports.getBytesDecoder = de; | ||
exports.getBytesEncoder = ce; | ||
exports.getDataEnumCodec = Go; | ||
exports.getDataEnumDecoder = Te; | ||
exports.getDataEnumEncoder = ae; | ||
exports.getMapCodec = Er; | ||
exports.getMapDecoder = me; | ||
exports.getMapEncoder = ue; | ||
exports.getNullableCodec = Wr; | ||
exports.getNullableDecoder = le; | ||
exports.getNullableEncoder = fe; | ||
exports.getScalarEnumCodec = mn; | ||
exports.getScalarEnumDecoder = se; | ||
exports.getScalarEnumEncoder = xe; | ||
exports.getSetCodec = Nn; | ||
exports.getSetDecoder = Ce; | ||
exports.getSetEncoder = Fe; | ||
exports.getStructCodec = jn; | ||
exports.getStructDecoder = ge; | ||
exports.getStructEncoder = Se; | ||
exports.getTupleCodec = dr; | ||
exports.getTupleDecoder = k; | ||
exports.getTupleEncoder = I; | ||
exports.getUnitCodec = Hn; | ||
exports.getUnitDecoder = be; | ||
exports.getUnitEncoder = ze; | ||
@@ -49,0 +45,0 @@ |
@@ -1,6 +0,17 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
import { ArrayLikeCodecSize } from './array-like-codec-size'; | ||
/** Defines the options for array codecs. */ | ||
export type ArrayCodecOptions<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
/** | ||
* Represents all the size options for array-like codecs | ||
* — i.e. `array`, `map` and `set`. | ||
* | ||
* It can be one of the following: | ||
* - a {@link NumberCodec} that prefixes its content with its size. | ||
* - a fixed number of items. | ||
* - or `'remainder'` to infer the number of items by dividing | ||
* the rest of the byte array by the fixed size of its item. | ||
* Note that this option is only available for fixed-size items. | ||
*/ | ||
export type ArrayLikeCodecSize<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = TPrefix | number | 'remainder'; | ||
/** Defines the configs for array codecs. */ | ||
export type ArrayCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -16,5 +27,16 @@ * The size of the array. | ||
* @param item - The encoder to use for the array's items. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getArrayEncoder<T>(item: Encoder<T>, options?: ArrayCodecOptions<NumberEncoder>): Encoder<T[]>; | ||
export declare function getArrayEncoder<TFrom>(item: Encoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & { | ||
size: 0; | ||
}): FixedSizeEncoder<TFrom[], 0>; | ||
export declare function getArrayEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & { | ||
size: number; | ||
}): FixedSizeEncoder<TFrom[]>; | ||
export declare function getArrayEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: ArrayCodecConfig<NumberEncoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeEncoder<TFrom[]>; | ||
export declare function getArrayEncoder<TFrom>(item: Encoder<TFrom>, config?: ArrayCodecConfig<NumberEncoder> & { | ||
size?: number | NumberEncoder; | ||
}): VariableSizeEncoder<TFrom[]>; | ||
/** | ||
@@ -24,5 +46,16 @@ * Decodes an array of items. | ||
* @param item - The encoder to use for the array's items. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getArrayDecoder<T>(item: Decoder<T>, options?: ArrayCodecOptions<NumberDecoder>): Decoder<T[]>; | ||
export declare function getArrayDecoder<TTo>(item: Decoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & { | ||
size: 0; | ||
}): FixedSizeDecoder<TTo[], 0>; | ||
export declare function getArrayDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & { | ||
size: number; | ||
}): FixedSizeDecoder<TTo[]>; | ||
export declare function getArrayDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: ArrayCodecConfig<NumberDecoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeDecoder<TTo[]>; | ||
export declare function getArrayDecoder<TTo>(item: Decoder<TTo>, config?: ArrayCodecConfig<NumberDecoder> & { | ||
size?: number | NumberDecoder; | ||
}): VariableSizeDecoder<TTo[]>; | ||
/** | ||
@@ -32,5 +65,16 @@ * Creates a codec for an array of items. | ||
* @param item - The codec to use for the array's items. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getArrayCodec<T, U extends T = T>(item: Codec<T, U>, options?: ArrayCodecOptions<NumberCodec>): Codec<T[], U[]>; | ||
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & { | ||
size: 0; | ||
}): FixedSizeCodec<TFrom[], TTo[], 0>; | ||
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & { | ||
size: number; | ||
}): FixedSizeCodec<TFrom[], TTo[]>; | ||
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: ArrayCodecConfig<NumberCodec> & { | ||
size: 'remainder'; | ||
}): VariableSizeCodec<TFrom[], TTo[]>; | ||
export declare function getArrayCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: ArrayCodecConfig<NumberCodec> & { | ||
size?: number | NumberCodec; | ||
}): VariableSizeCodec<TFrom[], TTo[]>; | ||
//# sourceMappingURL=array.d.ts.map |
@@ -1,4 +0,4 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
/** Defines the options for bitArray codecs. */ | ||
export type BitArrayCodecOptions = BaseCodecOptions & { | ||
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core'; | ||
/** Defines the config for bitArray codecs. */ | ||
export type BitArrayCodecConfig = { | ||
/** | ||
@@ -14,5 +14,5 @@ * Whether to read the bits in reverse order. | ||
* @param size - The amount of bytes to use for the bit array. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare const getBitArrayEncoder: (size: number, options?: BitArrayCodecOptions | boolean) => Encoder<boolean[]>; | ||
export declare function getBitArrayEncoder<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeEncoder<boolean[], TSize>; | ||
/** | ||
@@ -22,5 +22,5 @@ * Decodes bits into an array of booleans. | ||
* @param size - The amount of bytes to use for the bit array. | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare const getBitArrayDecoder: (size: number, options?: BitArrayCodecOptions | boolean) => Decoder<boolean[]>; | ||
export declare function getBitArrayDecoder<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeDecoder<boolean[], TSize>; | ||
/** | ||
@@ -30,5 +30,5 @@ * An array of boolean codec that converts booleans to bits and vice versa. | ||
* @param size - The amount of bytes to use for the bit array. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare const getBitArrayCodec: (size: number, options?: BitArrayCodecOptions | boolean) => Codec<boolean[]>; | ||
export declare function getBitArrayCodec<TSize extends number>(size: TSize, config?: BitArrayCodecConfig | boolean): FixedSizeCodec<boolean[], boolean[], TSize>; | ||
//# sourceMappingURL=bit-array.d.ts.map |
@@ -1,5 +0,5 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** Defines the options for boolean codecs. */ | ||
export type BooleanCodecOptions<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core'; | ||
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** Defines the config for boolean codecs. */ | ||
export type BooleanCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -14,17 +14,29 @@ * The number codec to delegate to. | ||
* | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getBooleanEncoder(options?: BooleanCodecOptions<NumberEncoder>): Encoder<boolean>; | ||
export declare function getBooleanEncoder(): FixedSizeEncoder<boolean, 1>; | ||
export declare function getBooleanEncoder<TSize extends number>(config: BooleanCodecConfig<NumberEncoder> & { | ||
size: FixedSizeNumberEncoder<TSize>; | ||
}): FixedSizeEncoder<boolean, TSize>; | ||
export declare function getBooleanEncoder(config: BooleanCodecConfig<NumberEncoder>): Encoder<boolean>; | ||
/** | ||
* Decodes booleans. | ||
* | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getBooleanDecoder(options?: BooleanCodecOptions<NumberDecoder>): Decoder<boolean>; | ||
export declare function getBooleanDecoder(): FixedSizeDecoder<boolean, 1>; | ||
export declare function getBooleanDecoder<TSize extends number>(config: BooleanCodecConfig<NumberDecoder> & { | ||
size: FixedSizeNumberDecoder<TSize>; | ||
}): FixedSizeDecoder<boolean, TSize>; | ||
export declare function getBooleanDecoder(config: BooleanCodecConfig<NumberDecoder>): Decoder<boolean>; | ||
/** | ||
* Creates a boolean codec. | ||
* | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getBooleanCodec(options?: BooleanCodecOptions<NumberCodec>): Codec<boolean>; | ||
export declare function getBooleanCodec(): FixedSizeCodec<boolean, boolean, 1>; | ||
export declare function getBooleanCodec<TSize extends number>(config: BooleanCodecConfig<NumberCodec> & { | ||
size: FixedSizeNumberCodec<TSize>; | ||
}): FixedSizeCodec<boolean, boolean, TSize>; | ||
export declare function getBooleanCodec(config: BooleanCodecConfig<NumberCodec>): Codec<boolean>; | ||
//# sourceMappingURL=boolean.d.ts.map |
@@ -1,5 +0,5 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** Defines the options for bytes codecs. */ | ||
export type BytesCodecOptions<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
/** Defines the config for bytes codecs. */ | ||
export type BytesCodecConfig<TSize extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -17,17 +17,26 @@ * The size of the byte array. It can be one of the following: | ||
* | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getBytesEncoder(options?: BytesCodecOptions<NumberEncoder>): Encoder<Uint8Array>; | ||
export declare function getBytesEncoder<TSize extends number>(config: BytesCodecConfig<NumberEncoder> & { | ||
size: TSize; | ||
}): FixedSizeEncoder<Uint8Array, TSize>; | ||
export declare function getBytesEncoder(config?: BytesCodecConfig<NumberEncoder>): VariableSizeEncoder<Uint8Array>; | ||
/** | ||
* Decodes sized bytes. | ||
* | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getBytesDecoder(options?: BytesCodecOptions<NumberDecoder>): Decoder<Uint8Array>; | ||
export declare function getBytesDecoder<TSize extends number>(config: BytesCodecConfig<NumberDecoder> & { | ||
size: TSize; | ||
}): FixedSizeDecoder<Uint8Array, TSize>; | ||
export declare function getBytesDecoder(config?: BytesCodecConfig<NumberDecoder>): VariableSizeDecoder<Uint8Array>; | ||
/** | ||
* Creates a sized bytes codec. | ||
* | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getBytesCodec(options?: BytesCodecOptions<NumberCodec>): Codec<Uint8Array>; | ||
export declare function getBytesCodec<TSize extends number>(config: BytesCodecConfig<NumberCodec> & { | ||
size: TSize; | ||
}): FixedSizeCodec<Uint8Array, Uint8Array, TSize>; | ||
export declare function getBytesCodec(config?: BytesCodecConfig<NumberCodec>): VariableSizeCodec<Uint8Array>; | ||
//# sourceMappingURL=bytes.d.ts.map |
@@ -1,2 +0,2 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
@@ -46,19 +46,19 @@ /** | ||
}>, '__kind'>; | ||
/** Get the name and codec of each variant in a data enum. */ | ||
export type DataEnumToCodecTuple<T extends DataEnum, U extends T = T> = Array<T extends never ? never : [ | ||
T['__kind'], | ||
keyof Omit<T, '__kind'> extends never ? Codec<Omit<T, '__kind'>, Omit<U, '__kind'>> | Codec<void> : Codec<Omit<T, '__kind'>, Omit<U, '__kind'>> | ||
]>; | ||
/** Get the name and encoder of each variant in a data enum. */ | ||
export type DataEnumToEncoderTuple<T extends DataEnum> = Array<T extends never ? never : [ | ||
T['__kind'], | ||
keyof Omit<T, '__kind'> extends never ? Encoder<Omit<T, '__kind'>> | Encoder<void> : Encoder<Omit<T, '__kind'>> | ||
export type DataEnumToEncoderTuple<TFrom extends DataEnum> = Array<TFrom extends never ? never : [ | ||
TFrom['__kind'], | ||
keyof Omit<TFrom, '__kind'> extends never ? Encoder<Omit<TFrom, '__kind'>> | Encoder<void> : Encoder<Omit<TFrom, '__kind'>> | ||
]>; | ||
/** Get the name and decoder of each variant in a data enum. */ | ||
export type DataEnumToDecoderTuple<T extends DataEnum> = Array<T extends never ? never : [ | ||
T['__kind'], | ||
keyof Omit<T, '__kind'> extends never ? Decoder<Omit<T, '__kind'>> | Decoder<void> : Decoder<Omit<T, '__kind'>> | ||
export type DataEnumToDecoderTuple<TTo extends DataEnum> = Array<TTo extends never ? never : [ | ||
TTo['__kind'], | ||
keyof Omit<TTo, '__kind'> extends never ? Decoder<Omit<TTo, '__kind'>> | Decoder<void> : Decoder<Omit<TTo, '__kind'>> | ||
]>; | ||
/** Defines the options for data enum codecs. */ | ||
export type DataEnumCodecOptions<TDiscriminator = NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
/** Get the name and codec of each variant in a data enum. */ | ||
export type DataEnumToCodecTuple<TFrom extends DataEnum, TTo extends TFrom = TFrom> = Array<TFrom extends never ? never : [ | ||
TFrom['__kind'], | ||
keyof Omit<TFrom, '__kind'> extends never ? Codec<Omit<TFrom, '__kind'>, Omit<TTo, '__kind'>> | Codec<void> : Codec<Omit<TFrom, '__kind'>, Omit<TTo, '__kind'>> | ||
]>; | ||
/** Defines the config for data enum codecs. */ | ||
export type DataEnumCodecConfig<TDiscriminator = NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -74,5 +74,5 @@ * The codec to use for the enum discriminator prefixing the variant. | ||
* @param variants - The variant encoders of the data enum. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getDataEnumEncoder<T extends DataEnum>(variants: DataEnumToEncoderTuple<T>, options?: DataEnumCodecOptions<NumberEncoder>): Encoder<T>; | ||
export declare function getDataEnumEncoder<TFrom extends DataEnum>(variants: DataEnumToEncoderTuple<TFrom>, config?: DataEnumCodecConfig<NumberEncoder>): Encoder<TFrom>; | ||
/** | ||
@@ -82,5 +82,5 @@ * Creates a data enum decoder. | ||
* @param variants - The variant decoders of the data enum. | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getDataEnumDecoder<T extends DataEnum>(variants: DataEnumToDecoderTuple<T>, options?: DataEnumCodecOptions<NumberDecoder>): Decoder<T>; | ||
export declare function getDataEnumDecoder<T extends DataEnum>(variants: DataEnumToDecoderTuple<T>, config?: DataEnumCodecConfig<NumberDecoder>): Decoder<T>; | ||
/** | ||
@@ -90,5 +90,5 @@ * Creates a data enum codec. | ||
* @param variants - The variant codecs of the data enum. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getDataEnumCodec<T extends DataEnum, U extends T = T>(variants: DataEnumToCodecTuple<T, U>, options?: DataEnumCodecOptions<NumberCodec>): Codec<T, U>; | ||
export declare function getDataEnumCodec<T extends DataEnum, U extends T = T>(variants: DataEnumToCodecTuple<T, U>, config?: DataEnumCodecConfig<NumberCodec>): Codec<T, U>; | ||
//# sourceMappingURL=data-enum.d.ts.map |
export * from './array'; | ||
export * from './array-like-codec-size'; | ||
export * from './assertions'; | ||
@@ -4,0 +3,0 @@ export * from './bit-array'; |
@@ -1,6 +0,6 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
import { ArrayLikeCodecSize } from './array-like-codec-size'; | ||
/** Defines the options for Map codecs. */ | ||
export type MapCodecOptions<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
import { ArrayLikeCodecSize } from './array'; | ||
/** Defines the config for Map codecs. */ | ||
export type MapCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -17,5 +17,16 @@ * The size of the array. | ||
* @param value - The encoder to use for the map's values. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getMapEncoder<K, V>(key: Encoder<K>, value: Encoder<V>, options?: MapCodecOptions<NumberEncoder>): Encoder<Map<K, V>>; | ||
export declare function getMapEncoder<TFromKey, TFromValue>(key: Encoder<TFromKey>, value: Encoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & { | ||
size: 0; | ||
}): FixedSizeEncoder<Map<TFromKey, TFromValue>, 0>; | ||
export declare function getMapEncoder<TFromKey, TFromValue>(key: FixedSizeEncoder<TFromKey>, value: FixedSizeEncoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & { | ||
size: number; | ||
}): FixedSizeEncoder<Map<TFromKey, TFromValue>>; | ||
export declare function getMapEncoder<TFromKey, TFromValue>(key: FixedSizeEncoder<TFromKey>, value: FixedSizeEncoder<TFromValue>, config: MapCodecConfig<NumberEncoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeEncoder<Map<TFromKey, TFromValue>>; | ||
export declare function getMapEncoder<TFromKey, TFromValue>(key: Encoder<TFromKey>, value: Encoder<TFromValue>, config?: MapCodecConfig<NumberEncoder> & { | ||
size?: number | NumberEncoder; | ||
}): VariableSizeEncoder<Map<TFromKey, TFromValue>>; | ||
/** | ||
@@ -26,5 +37,16 @@ * Creates a decoder for a map. | ||
* @param value - The decoder to use for the map's values. | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getMapDecoder<K, V>(key: Decoder<K>, value: Decoder<V>, options?: MapCodecOptions<NumberDecoder>): Decoder<Map<K, V>>; | ||
export declare function getMapDecoder<TToKey, TToValue>(key: Decoder<TToKey>, value: Decoder<TToValue>, config: MapCodecConfig<NumberDecoder> & { | ||
size: 0; | ||
}): FixedSizeDecoder<Map<TToKey, TToValue>, 0>; | ||
export declare function getMapDecoder<TToKey, TToValue>(key: FixedSizeDecoder<TToKey>, value: FixedSizeDecoder<TToValue>, config: MapCodecConfig<NumberDecoder> & { | ||
size: number; | ||
}): FixedSizeDecoder<Map<TToKey, TToValue>>; | ||
export declare function getMapDecoder<TToKey, TToValue>(key: FixedSizeDecoder<TToKey>, value: FixedSizeDecoder<TToValue>, config: MapCodecConfig<NumberDecoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeDecoder<Map<TToKey, TToValue>>; | ||
export declare function getMapDecoder<TToKey, TToValue>(key: Decoder<TToKey>, value: Decoder<TToValue>, config?: MapCodecConfig<NumberDecoder> & { | ||
size?: number | NumberDecoder; | ||
}): VariableSizeDecoder<Map<TToKey, TToValue>>; | ||
/** | ||
@@ -35,5 +57,16 @@ * Creates a codec for a map. | ||
* @param value - The codec to use for the map's values. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getMapCodec<TK, TV, UK extends TK = TK, UV extends TV = TV>(key: Codec<TK, UK>, value: Codec<TV, UV>, options?: MapCodecOptions<NumberCodec>): Codec<Map<TK, TV>, Map<UK, UV>>; | ||
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: Codec<TFromKey, TToKey>, value: Codec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & { | ||
size: 0; | ||
}): FixedSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>, 0>; | ||
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: FixedSizeCodec<TFromKey, TToKey>, value: FixedSizeCodec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & { | ||
size: number; | ||
}): FixedSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>; | ||
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: FixedSizeCodec<TFromKey, TToKey>, value: FixedSizeCodec<TFromValue, TToValue>, config: MapCodecConfig<NumberCodec> & { | ||
size: 'remainder'; | ||
}): VariableSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>; | ||
export declare function getMapCodec<TFromKey, TFromValue, TToKey extends TFromKey = TFromKey, TToValue extends TFromValue = TFromValue>(key: Codec<TFromKey, TToKey>, value: Codec<TFromValue, TToValue>, config?: MapCodecConfig<NumberCodec> & { | ||
size?: number | NumberCodec; | ||
}): VariableSizeCodec<Map<TFromKey, TFromValue>, Map<TToKey, TToValue>>; | ||
//# sourceMappingURL=map.d.ts.map |
@@ -1,5 +0,5 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** Defines the options for nullable codecs. */ | ||
export type NullableCodecOptions<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** Defines the config for nullable codecs. */ | ||
export type NullableCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -24,5 +24,11 @@ * The codec to use for the boolean prefix. | ||
* @param item - The encoder to use for the value that may be present. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getNullableEncoder<T>(item: Encoder<T>, options?: NullableCodecOptions<NumberEncoder>): Encoder<T | null>; | ||
export declare function getNullableEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: NullableCodecConfig<FixedSizeNumberEncoder> & { | ||
fixed: true; | ||
}): FixedSizeEncoder<TFrom | null>; | ||
export declare function getNullableEncoder<TFrom>(item: FixedSizeEncoder<TFrom, 0>, config?: NullableCodecConfig<FixedSizeNumberEncoder>): FixedSizeEncoder<TFrom | null>; | ||
export declare function getNullableEncoder<TFrom>(item: Encoder<TFrom>, config?: NullableCodecConfig<NumberEncoder> & { | ||
fixed?: false; | ||
}): VariableSizeEncoder<TFrom | null>; | ||
/** | ||
@@ -32,5 +38,11 @@ * Creates a decoder for an optional value using `null` as the `None` value. | ||
* @param item - The decoder to use for the value that may be present. | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getNullableDecoder<T>(item: Decoder<T>, options?: NullableCodecOptions<NumberDecoder>): Decoder<T | null>; | ||
export declare function getNullableDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: NullableCodecConfig<FixedSizeNumberDecoder> & { | ||
fixed: true; | ||
}): FixedSizeDecoder<TTo | null>; | ||
export declare function getNullableDecoder<TTo>(item: FixedSizeDecoder<TTo, 0>, config?: NullableCodecConfig<FixedSizeNumberDecoder>): FixedSizeDecoder<TTo | null>; | ||
export declare function getNullableDecoder<TTo>(item: Decoder<TTo>, config?: NullableCodecConfig<NumberDecoder> & { | ||
fixed?: false; | ||
}): VariableSizeDecoder<TTo | null>; | ||
/** | ||
@@ -40,5 +52,11 @@ * Creates a codec for an optional value using `null` as the `None` value. | ||
* @param item - The codec to use for the value that may be present. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getNullableCodec<T, U extends T = T>(item: Codec<T, U>, options?: NullableCodecOptions<NumberCodec>): Codec<T | null, U | null>; | ||
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: NullableCodecConfig<FixedSizeNumberCodec> & { | ||
fixed: true; | ||
}): FixedSizeCodec<TFrom | null, TTo | null>; | ||
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo, 0>, config?: NullableCodecConfig<FixedSizeNumberCodec>): FixedSizeCodec<TFrom | null, TTo | null>; | ||
export declare function getNullableCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: NullableCodecConfig<NumberCodec> & { | ||
fixed?: false; | ||
}): VariableSizeCodec<TFrom | null, TTo | null>; | ||
//# sourceMappingURL=nullable.d.ts.map |
@@ -1,3 +0,3 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { FixedSizeNumberCodec, FixedSizeNumberDecoder, FixedSizeNumberEncoder, NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
/** | ||
@@ -15,4 +15,4 @@ * Defines a scalar enum as a type from its constructor. | ||
} | number | T) & NonNullable<unknown>; | ||
/** Defines the options for scalar enum codecs. */ | ||
export type ScalarEnumCodecOptions<TDiscriminator extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
/** Defines the config for scalar enum codecs. */ | ||
export type ScalarEnumCodecConfig<TDiscriminator extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -28,5 +28,9 @@ * The codec to use for the enum discriminator. | ||
* @param constructor - The constructor of the scalar enum. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getScalarEnumEncoder<T>(constructor: ScalarEnum<T>, options?: ScalarEnumCodecOptions<NumberEncoder>): Encoder<T>; | ||
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor): FixedSizeEncoder<TFrom, 1>; | ||
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>, TSize extends number>(constructor: TFromConstructor, config: ScalarEnumCodecConfig<NumberEncoder> & { | ||
size: FixedSizeNumberEncoder<TSize>; | ||
}): FixedSizeEncoder<TFrom, TSize>; | ||
export declare function getScalarEnumEncoder<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor, config?: ScalarEnumCodecConfig<NumberEncoder>): VariableSizeEncoder<TFrom>; | ||
/** | ||
@@ -36,5 +40,9 @@ * Creates a scalar enum decoder. | ||
* @param constructor - The constructor of the scalar enum. | ||
* @param options - A set of options for the decoder. | ||
* @param config - A set of config for the decoder. | ||
*/ | ||
export declare function getScalarEnumDecoder<T>(constructor: ScalarEnum<T>, options?: ScalarEnumCodecOptions<NumberDecoder>): Decoder<T>; | ||
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>>(constructor: TToConstructor): FixedSizeDecoder<TTo, 1>; | ||
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>, TSize extends number>(constructor: TToConstructor, config: ScalarEnumCodecConfig<NumberDecoder> & { | ||
size: FixedSizeNumberDecoder<TSize>; | ||
}): FixedSizeDecoder<TTo, TSize>; | ||
export declare function getScalarEnumDecoder<TTo, TToConstructor extends ScalarEnum<TTo>>(constructor: TToConstructor, config?: ScalarEnumCodecConfig<NumberDecoder>): VariableSizeDecoder<TTo>; | ||
/** | ||
@@ -44,5 +52,9 @@ * Creates a scalar enum codec. | ||
* @param constructor - The constructor of the scalar enum. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getScalarEnumCodec<T>(constructor: ScalarEnum<T>, options?: ScalarEnumCodecOptions<NumberCodec>): Codec<T>; | ||
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor): FixedSizeCodec<TFrom, TFrom, 1>; | ||
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>, TSize extends number>(constructor: TFromConstructor, config: ScalarEnumCodecConfig<NumberCodec> & { | ||
size: FixedSizeNumberCodec<TSize>; | ||
}): FixedSizeCodec<TFrom, TFrom, TSize>; | ||
export declare function getScalarEnumCodec<TFrom, TFromConstructor extends ScalarEnum<TFrom>>(constructor: TFromConstructor, config?: ScalarEnumCodecConfig<NumberCodec>): VariableSizeCodec<TFrom>; | ||
//# sourceMappingURL=scalar-enum.d.ts.map |
@@ -1,6 +0,6 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
import { NumberCodec, NumberDecoder, NumberEncoder } from '@solana/codecs-numbers'; | ||
import { ArrayLikeCodecSize } from './array-like-codec-size'; | ||
/** Defines the options for set codecs. */ | ||
export type SetCodecOptions<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = BaseCodecOptions & { | ||
import { ArrayLikeCodecSize } from './array'; | ||
/** Defines the config for set codecs. */ | ||
export type SetCodecConfig<TPrefix extends NumberCodec | NumberEncoder | NumberDecoder> = { | ||
/** | ||
@@ -16,5 +16,16 @@ * The size of the set. | ||
* @param item - The encoder to use for the set's items. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getSetEncoder<T>(item: Encoder<T>, options?: SetCodecOptions<NumberEncoder>): Encoder<Set<T>>; | ||
export declare function getSetEncoder<TFrom>(item: Encoder<TFrom>, config: SetCodecConfig<NumberEncoder> & { | ||
size: 0; | ||
}): FixedSizeEncoder<Set<TFrom>, 0>; | ||
export declare function getSetEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: SetCodecConfig<NumberEncoder> & { | ||
size: number; | ||
}): FixedSizeEncoder<Set<TFrom>>; | ||
export declare function getSetEncoder<TFrom>(item: FixedSizeEncoder<TFrom>, config: SetCodecConfig<NumberEncoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeEncoder<Set<TFrom>>; | ||
export declare function getSetEncoder<TFrom>(item: Encoder<TFrom>, config?: SetCodecConfig<NumberEncoder> & { | ||
size?: number | NumberEncoder; | ||
}): VariableSizeEncoder<Set<TFrom>>; | ||
/** | ||
@@ -24,5 +35,16 @@ * Decodes an set of items. | ||
* @param item - The encoder to use for the set's items. | ||
* @param options - A set of options for the encoder. | ||
* @param config - A set of config for the encoder. | ||
*/ | ||
export declare function getSetDecoder<T>(item: Decoder<T>, options?: SetCodecOptions<NumberDecoder>): Decoder<Set<T>>; | ||
export declare function getSetDecoder<TTo>(item: Decoder<TTo>, config: SetCodecConfig<NumberDecoder> & { | ||
size: 0; | ||
}): FixedSizeDecoder<Set<TTo>, 0>; | ||
export declare function getSetDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: SetCodecConfig<NumberDecoder> & { | ||
size: number; | ||
}): FixedSizeDecoder<Set<TTo>>; | ||
export declare function getSetDecoder<TTo>(item: FixedSizeDecoder<TTo>, config: SetCodecConfig<NumberDecoder> & { | ||
size: 'remainder'; | ||
}): VariableSizeDecoder<Set<TTo>>; | ||
export declare function getSetDecoder<TTo>(item: Decoder<TTo>, config?: SetCodecConfig<NumberDecoder> & { | ||
size?: number | NumberDecoder; | ||
}): VariableSizeDecoder<Set<TTo>>; | ||
/** | ||
@@ -32,5 +54,16 @@ * Creates a codec for an set of items. | ||
* @param item - The codec to use for the set's items. | ||
* @param options - A set of options for the codec. | ||
* @param config - A set of config for the codec. | ||
*/ | ||
export declare function getSetCodec<T, U extends T = T>(item: Codec<T, U>, options?: SetCodecOptions<NumberCodec>): Codec<Set<T>, Set<U>>; | ||
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & { | ||
size: 0; | ||
}): FixedSizeCodec<Set<TFrom>, Set<TTo>, 0>; | ||
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & { | ||
size: number; | ||
}): FixedSizeCodec<Set<TFrom>, Set<TTo>>; | ||
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: FixedSizeCodec<TFrom, TTo>, config: SetCodecConfig<NumberCodec> & { | ||
size: 'remainder'; | ||
}): VariableSizeCodec<Set<TFrom>, Set<TTo>>; | ||
export declare function getSetCodec<TFrom, TTo extends TFrom = TFrom>(item: Codec<TFrom, TTo>, config?: SetCodecConfig<NumberCodec> & { | ||
size?: number | NumberCodec; | ||
}): VariableSizeCodec<Set<TFrom>, Set<TTo>>; | ||
//# sourceMappingURL=set.d.ts.map |
@@ -1,16 +0,26 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
/** Get the name and encoder of each field in a struct. */ | ||
export type StructToEncoderTuple<T extends object> = Array<{ | ||
[K in keyof T]: [K, Encoder<T[K]>]; | ||
}[keyof T]>; | ||
export type StructToEncoderTuple<TFrom extends object> = Array<{ | ||
[K in keyof TFrom]: [K, Encoder<TFrom[K]>]; | ||
}[keyof TFrom]>; | ||
/** Get the name and fixed-size encoder of each field in a struct. */ | ||
export type StructToFixedSizeEncoderTuple<TFrom extends object> = Array<{ | ||
[K in keyof TFrom]: [K, FixedSizeEncoder<TFrom[K]>]; | ||
}[keyof TFrom]>; | ||
/** Get the name and decoder of each field in a struct. */ | ||
export type StructToDecoderTuple<T extends object> = Array<{ | ||
[K in keyof T]: [K, Decoder<T[K]>]; | ||
}[keyof T]>; | ||
export type StructToDecoderTuple<TTo extends object> = Array<{ | ||
[K in keyof TTo]: [K, Decoder<TTo[K]>]; | ||
}[keyof TTo]>; | ||
/** Get the name and fixed-size decoder of each field in a struct. */ | ||
export type StructToFixedSizeDecoderTuple<TTo extends object> = Array<{ | ||
[K in keyof TTo]: [K, FixedSizeDecoder<TTo[K]>]; | ||
}[keyof TTo]>; | ||
/** Get the name and codec of each field in a struct. */ | ||
export type StructToCodecTuple<T extends object, U extends T> = Array<{ | ||
[K in keyof T]: [K, Codec<T[K], U[K]>]; | ||
}[keyof T]>; | ||
/** Defines the options for struct codecs. */ | ||
export type StructCodecOptions = BaseCodecOptions; | ||
export type StructToCodecTuple<TFrom extends object, TTo extends TFrom> = Array<{ | ||
[K in keyof TFrom]: [K, Codec<TFrom[K], TTo[K]>]; | ||
}[keyof TFrom]>; | ||
/** Get the name and fixed-size codec of each field in a struct. */ | ||
export type StructToFixedSizeCodecTuple<TFrom extends object, TTo extends TFrom> = Array<{ | ||
[K in keyof TFrom]: [K, FixedSizeCodec<TFrom[K], TTo[K]>]; | ||
}[keyof TFrom]>; | ||
/** | ||
@@ -20,5 +30,5 @@ * Creates a encoder for a custom object. | ||
* @param fields - The name and encoder of each field. | ||
* @param options - A set of options for the encoder. | ||
*/ | ||
export declare function getStructEncoder<T extends object>(fields: StructToEncoderTuple<T>, options?: StructCodecOptions): Encoder<T>; | ||
export declare function getStructEncoder<TFrom extends object>(fields: StructToFixedSizeEncoderTuple<TFrom>): FixedSizeEncoder<TFrom>; | ||
export declare function getStructEncoder<TFrom extends object>(fields: StructToEncoderTuple<TFrom>): VariableSizeEncoder<TFrom>; | ||
/** | ||
@@ -28,5 +38,5 @@ * Creates a decoder for a custom object. | ||
* @param fields - The name and decoder of each field. | ||
* @param options - A set of options for the decoder. | ||
*/ | ||
export declare function getStructDecoder<T extends object>(fields: StructToDecoderTuple<T>, options?: StructCodecOptions): Decoder<T>; | ||
export declare function getStructDecoder<TTo extends object>(fields: StructToFixedSizeDecoderTuple<TTo>): FixedSizeDecoder<TTo>; | ||
export declare function getStructDecoder<TTo extends object>(fields: StructToDecoderTuple<TTo>): VariableSizeDecoder<TTo>; | ||
/** | ||
@@ -36,5 +46,5 @@ * Creates a codec for a custom object. | ||
* @param fields - The name and codec of each field. | ||
* @param options - A set of options for the codec. | ||
*/ | ||
export declare function getStructCodec<T extends object, U extends T = T>(fields: StructToCodecTuple<T, U>, options?: StructCodecOptions): Codec<T, U>; | ||
export declare function getStructCodec<TFrom extends object, TTo extends TFrom = TFrom>(fields: StructToFixedSizeCodecTuple<TFrom, TTo>): FixedSizeCodec<TFrom, TTo>; | ||
export declare function getStructCodec<TFrom extends object, TTo extends TFrom = TFrom>(fields: StructToCodecTuple<TFrom, TTo>): VariableSizeCodec<TFrom, TTo>; | ||
//# sourceMappingURL=struct.d.ts.map |
@@ -1,13 +0,20 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
/** Defines the options for tuple codecs. */ | ||
export type TupleCodecOptions = BaseCodecOptions; | ||
type WrapInEncoder<T> = { | ||
[P in keyof T]: Encoder<T[P]>; | ||
import { Codec, Decoder, Encoder, FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder, VariableSizeCodec, VariableSizeDecoder, VariableSizeEncoder } from '@solana/codecs-core'; | ||
type WrapInFixedSizeEncoder<TFrom> = { | ||
[P in keyof TFrom]: FixedSizeEncoder<TFrom[P]>; | ||
}; | ||
type WrapInDecoder<T> = { | ||
[P in keyof T]: Decoder<T[P]>; | ||
type WrapInEncoder<TFrom> = { | ||
[P in keyof TFrom]: Encoder<TFrom[P]>; | ||
}; | ||
type WrapInCodec<T, U extends T = T> = { | ||
[P in keyof T]: Codec<T[P], U[P]>; | ||
type WrapInFixedSizeDecoder<TTo> = { | ||
[P in keyof TTo]: FixedSizeDecoder<TTo[P]>; | ||
}; | ||
type WrapInDecoder<TTo> = { | ||
[P in keyof TTo]: Decoder<TTo[P]>; | ||
}; | ||
type WrapInCodec<TFrom, TTo extends TFrom> = { | ||
[P in keyof TFrom]: Codec<TFrom[P], TTo[P]>; | ||
}; | ||
type WrapInFixedSizeCodec<TFrom, TTo extends TFrom> = { | ||
[P in keyof TFrom]: FixedSizeCodec<TFrom[P], TTo[P]>; | ||
}; | ||
type AnyArray = any[]; | ||
@@ -18,5 +25,5 @@ /** | ||
* @param items - The encoders to use for each item in the tuple. | ||
* @param options - A set of options for the encoder. | ||
*/ | ||
export declare function getTupleEncoder<T extends AnyArray>(items: WrapInEncoder<[...T]>, options?: TupleCodecOptions): Encoder<T>; | ||
export declare function getTupleEncoder<TFrom extends AnyArray>(items: WrapInFixedSizeEncoder<[...TFrom]>): FixedSizeEncoder<TFrom>; | ||
export declare function getTupleEncoder<TFrom extends AnyArray>(items: WrapInEncoder<[...TFrom]>): VariableSizeEncoder<TFrom>; | ||
/** | ||
@@ -26,5 +33,5 @@ * Creates a decoder for a tuple-like array. | ||
* @param items - The decoders to use for each item in the tuple. | ||
* @param options - A set of options for the decoder. | ||
*/ | ||
export declare function getTupleDecoder<T extends AnyArray>(items: WrapInDecoder<[...T]>, options?: TupleCodecOptions): Decoder<T>; | ||
export declare function getTupleDecoder<TTo extends AnyArray>(items: WrapInFixedSizeDecoder<[...TTo]>): FixedSizeDecoder<TTo>; | ||
export declare function getTupleDecoder<TTo extends AnyArray>(items: WrapInDecoder<[...TTo]>): VariableSizeDecoder<TTo>; | ||
/** | ||
@@ -34,6 +41,6 @@ * Creates a codec for a tuple-like array. | ||
* @param items - The codecs to use for each item in the tuple. | ||
* @param options - A set of options for the codec. | ||
*/ | ||
export declare function getTupleCodec<T extends AnyArray, U extends T = T>(items: WrapInCodec<[...T], [...U]>, options?: TupleCodecOptions): Codec<T, U>; | ||
export declare function getTupleCodec<TFrom extends AnyArray, TTo extends TFrom = TFrom>(items: WrapInFixedSizeCodec<[...TFrom], [...TTo]>): FixedSizeCodec<TFrom, TTo>; | ||
export declare function getTupleCodec<TFrom extends AnyArray, TTo extends TFrom = TFrom>(items: WrapInCodec<[...TFrom], [...TTo]>): VariableSizeCodec<TFrom, TTo>; | ||
export {}; | ||
//# sourceMappingURL=tuple.d.ts.map |
@@ -1,22 +0,14 @@ | ||
import { BaseCodecOptions, Codec, Decoder, Encoder } from '@solana/codecs-core'; | ||
/** Defines the options for unit codecs. */ | ||
export type UnitSerializerOptions = BaseCodecOptions; | ||
import { FixedSizeCodec, FixedSizeDecoder, FixedSizeEncoder } from '@solana/codecs-core'; | ||
/** | ||
* Creates a void encoder. | ||
* | ||
* @param options - A set of options for the encoder. | ||
*/ | ||
export declare function getUnitEncoder(options?: UnitSerializerOptions): Encoder<void>; | ||
export declare function getUnitEncoder(): FixedSizeEncoder<void, 0>; | ||
/** | ||
* Creates a void decoder. | ||
* | ||
* @param options - A set of options for the decoder. | ||
*/ | ||
export declare function getUnitDecoder(options?: UnitSerializerOptions): Decoder<void>; | ||
export declare function getUnitDecoder(): FixedSizeDecoder<void, 0>; | ||
/** | ||
* Creates a void codec. | ||
* | ||
* @param options - A set of options for the codec. | ||
*/ | ||
export declare function getUnitCodec(options?: UnitSerializerOptions): Codec<void>; | ||
export declare function getUnitCodec(): FixedSizeCodec<void, void, 0>; | ||
//# sourceMappingURL=unit.d.ts.map |
@@ -1,5 +0,13 @@ | ||
/** Returns the max size or null if at least one size is null. */ | ||
export declare function maxCodecSizes(sizes: (number | null)[]): number | null; | ||
/** Returns the sum of all sizes or null if at least one size is null. */ | ||
export declare function sumCodecSizes(sizes: (number | null)[]): number | null; | ||
export declare function getFixedSize(codec: { | ||
fixedSize: number; | ||
} | { | ||
maxSize?: number; | ||
}): number | null; | ||
export declare function getMaxSize(codec: { | ||
fixedSize: number; | ||
} | { | ||
maxSize?: number; | ||
}): number | null; | ||
//# sourceMappingURL=utils.d.ts.map |
{ | ||
"name": "@solana/codecs-data-structures", | ||
"version": "2.0.0-experimental.f7d1af1", | ||
"version": "2.0.0-experimental.fb88a79", | ||
"description": "Codecs for various data structures", | ||
@@ -52,4 +52,4 @@ "exports": { | ||
"dependencies": { | ||
"@solana/codecs-core": "2.0.0-experimental.f7d1af1", | ||
"@solana/codecs-numbers": "2.0.0-experimental.f7d1af1" | ||
"@solana/codecs-core": "2.0.0-experimental.fb88a79", | ||
"@solana/codecs-numbers": "2.0.0-experimental.fb88a79" | ||
}, | ||
@@ -70,7 +70,7 @@ "devDependencies": { | ||
"jest-runner-prettier": "^1.0.0", | ||
"prettier": "^2.8", | ||
"tsup": "7.2.0", | ||
"prettier": "^3.1", | ||
"tsup": "^8.0.1", | ||
"typescript": "^5.2.2", | ||
"version-from-git": "^1.1.1", | ||
"@solana/codecs-strings": "2.0.0-experimental.f7d1af1", | ||
"@solana/codecs-strings": "2.0.0-experimental.fb88a79", | ||
"build-scripts": "0.0.0", | ||
@@ -77,0 +77,0 @@ "test-config": "0.0.0", |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
736044
47
4803
+ Added@solana/codecs-core@2.0.0-experimental.fb88a79(transitive)
+ Added@solana/codecs-numbers@2.0.0-experimental.fb88a79(transitive)
- Removed@solana/codecs-core@2.0.0-experimental.f7d1af1(transitive)
- Removed@solana/codecs-numbers@2.0.0-experimental.f7d1af1(transitive)